mirror of
https://github.com/robweber/xbmcbackup.git
synced 2026-01-07 00:04:44 +01:00
Compare commits
49 Commits
krypton_cu
...
matrix-1.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16e13c7d80 | ||
|
|
dd5b99c978 | ||
|
|
6c99667afa | ||
|
|
6514b3db02 | ||
|
|
88341d9e1f | ||
|
|
95649c2b3f | ||
|
|
3e9de429dd | ||
|
|
db18c6a7b4 | ||
|
|
35e05acaf2 | ||
|
|
92ec8bf25c | ||
|
|
0c79aef4e7 | ||
|
|
fea7dca500 | ||
|
|
f7665c8ddd | ||
|
|
bbbfc3dd84 | ||
|
|
0b03914175 | ||
|
|
51553f7720 | ||
|
|
294683fb43 | ||
|
|
b74c1af704 | ||
|
|
edd4002d3f | ||
|
|
3aa912ca4c | ||
|
|
5c3e1712f6 | ||
|
|
82bdc955b5 | ||
|
|
4f1e5060e9 | ||
|
|
7d895a6028 | ||
|
|
7ede17fbbd | ||
|
|
d32620ea18 | ||
|
|
def99767e8 | ||
|
|
c7a9a8512d | ||
|
|
8d07310980 | ||
|
|
048d016e0e | ||
|
|
c50c5245fc | ||
|
|
e91037208b | ||
|
|
c0b0fa82cb | ||
|
|
6ac1d3559b | ||
|
|
d93589ecad | ||
|
|
aa94060cfe | ||
|
|
b9e0424ea5 | ||
|
|
495ecb1048 | ||
|
|
a1c0c0bbfe | ||
|
|
9f570233d9 | ||
|
|
b38aff2a8e | ||
|
|
456ebe9374 | ||
|
|
30f8b93629 | ||
|
|
94f872fb81 | ||
|
|
8f8402ae8a | ||
|
|
db93e40f59 | ||
|
|
72c77fb33a | ||
|
|
1f0e262c5b | ||
|
|
b75487bb2a |
2
.settings/org.eclipse.core.resources.prefs
Normal file
2
.settings/org.eclipse.core.resources.prefs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
eclipse.preferences.version=1
|
||||||
|
encoding//resources/lib/croniter.py=utf-8
|
||||||
@@ -3,11 +3,12 @@ language: python
|
|||||||
python: 3.7
|
python: 3.7
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- pip install kodi-addon-checker
|
- pip install flake8 kodi-addon-checker
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- git config core.quotepath false
|
- git config core.quotepath false
|
||||||
|
|
||||||
# command to run our tests
|
# command to run our tests
|
||||||
script:
|
script:
|
||||||
- kodi-addon-checker --branch=krypton --allow-folder-id-mismatch
|
- flake8 ./ --statistics --show-source --ignore=E501,E722 --exclude=croniter.py,relativedelta.py,*/dropbox/* # check python structure against flake8 tests, ignore long lines
|
||||||
|
- kodi-addon-checker --branch=matrix --allow-folder-id-mismatch
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# Backup Addon
|
# Backup Addon
|
||||||
 [](https://travis-ci.org/robweber/xbmcbackup) [](https://github.com/robweber/xbmcbackup/blob/master/LICENSE.txt)
|
 [](https://travis-ci.org/robweber/xbmcbackup) [](https://github.com/robweber/xbmcbackup/blob/master/LICENSE.txt) [](https://www.python.org/dev/peps/pep-0008/)
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
|
|||||||
18
addon.xml
18
addon.xml
@@ -1,20 +1,18 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
<addon id="script.xbmcbackup"
|
<addon id="script.xbmcbackup"
|
||||||
name="Backup" version="1.5.2" provider-name="robweber">
|
name="Backup" version="1.6.0" provider-name="robweber">
|
||||||
<requires>
|
<requires>
|
||||||
<!-- jarvis -->
|
<import addon="xbmc.python" version="3.0.0"/>
|
||||||
<import addon="xbmc.python" version="2.25.0"/>
|
<import addon="script.module.dateutil" version="2.8.0" />
|
||||||
<import addon="script.module.httplib2" version="0.8.0" />
|
<import addon="script.module.future" version="0.16.0.4"/>
|
||||||
<import addon="script.module.oauth2client" version="4.1.2" />
|
<!-- for dropbox -->
|
||||||
<import addon="script.module.uritemplate" version="0.6" />
|
<import addon="script.module.requests" version="2.18.4"/>
|
||||||
<import addon="script.module.yaml" version="3.11"/>
|
<import addon="script.module.six" version="1.11.0"/>
|
||||||
<import addon="script.module.googleapi" version="1.6.4" />
|
|
||||||
<import addon="script.module.dropbox" version="8.4.2"/>
|
|
||||||
</requires>
|
</requires>
|
||||||
<extension point="xbmc.python.script" library="default.py">
|
<extension point="xbmc.python.script" library="default.py">
|
||||||
<provides>executable</provides>
|
<provides>executable</provides>
|
||||||
</extension>
|
</extension>
|
||||||
<extension point="xbmc.service" library="scheduler.py" start="startup" />
|
<extension point="xbmc.service" library="scheduler.py" />
|
||||||
<extension point="xbmc.addon.metadata">
|
<extension point="xbmc.addon.metadata">
|
||||||
<summary lang="ar_SA">إنسخ إحتياطياً قاعده بيانات إكس بى إم سى وملفات اﻹعدادات فى حاله وقوع إنهيار مع إمكانيه اﻹسترجاع</summary>
|
<summary lang="ar_SA">إنسخ إحتياطياً قاعده بيانات إكس بى إم سى وملفات اﻹعدادات فى حاله وقوع إنهيار مع إمكانيه اﻹسترجاع</summary>
|
||||||
<summary lang="be_BY">Backup and restore your Kodi database and configuration files in the event of a crash or file corruption.</summary>
|
<summary lang="be_BY">Backup and restore your Kodi database and configuration files in the event of a crash or file corruption.</summary>
|
||||||
|
|||||||
23
changelog.md
23
changelog.md
@@ -4,11 +4,30 @@ All notable changes to this project will be documented in this file.
|
|||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||||
|
|
||||||
## [Unreleased](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.2...HEAD)
|
## [Version 1.6.0](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.2...robweber:matrix-1.6.0) - 2019-11-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
- added new badges for Kodi Version, TravisCI and license information from shields.io
|
- added new badges for Kodi Version, TravisCI and license information from shields.io
|
||||||
|
- dependency on script.module.dateutil for relativedelta.py class
|
||||||
|
- add Dropbox library back in v 9.4.0, for Python 3 compatibility
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- addon.xml updated to use Leia specific syntax and library imports
|
||||||
|
- removed specific encode() calls per Python2/3 compatibility
|
||||||
|
- call isdigit() method on the string directly instead of str.isdigit() (results in unicode error)
|
||||||
|
- added flake8 testing to travis-ci
|
||||||
|
- updated code to make python3 compatible
|
||||||
|
- updated code for pep9 styling
|
||||||
|
- use setArt() to set ListItem icons as the icon= constructor is deprecated
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- removed need for urlparse library
|
||||||
|
- Removed GoogleDrive support - issues with python 3 compatibility
|
||||||
|
- removed script.module.dropbox dependency, need version 9+ and it isn't in the Kodi repo yet
|
||||||
|
- removed relativedelta.py, use the dateutil module for this
|
||||||
|
|
||||||
## [Version 1.5.2](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.1...robweber:krypton-1.5.2) - 2019-09-30
|
## [Version 1.5.2](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.1...robweber:krypton-1.5.2) - 2019-09-30
|
||||||
|
|
||||||
|
|||||||
67
default.py
67
default.py
@@ -1,21 +1,27 @@
|
|||||||
import sys, urlparse
|
import sys
|
||||||
import xbmc, xbmcgui
|
import xbmc
|
||||||
|
import xbmcgui
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
from resources.lib.backup import XbmcBackup
|
from resources.lib.backup import XbmcBackup
|
||||||
|
|
||||||
|
|
||||||
def get_params():
|
def get_params():
|
||||||
param = {}
|
param = {}
|
||||||
|
try:
|
||||||
if(len(sys.argv) > 1):
|
|
||||||
for i in sys.argv:
|
for i in sys.argv:
|
||||||
args = i
|
args = i
|
||||||
if(args.startswith('?')):
|
if('=' in args):
|
||||||
args = args[1:]
|
if(args.startswith('?')):
|
||||||
param.update(dict(urlparse.parse_qsl(args)))
|
args = args[1:] # legacy in case of url params
|
||||||
|
splitString = args.split('=')
|
||||||
|
param[splitString[0]] = splitString[1]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return param
|
return param
|
||||||
|
|
||||||
#the program mode
|
|
||||||
|
# the program mode
|
||||||
mode = -1
|
mode = -1
|
||||||
params = get_params()
|
params = get_params()
|
||||||
|
|
||||||
@@ -26,37 +32,38 @@ if("mode" in params):
|
|||||||
elif(params['mode'] == 'restore'):
|
elif(params['mode'] == 'restore'):
|
||||||
mode = 1
|
mode = 1
|
||||||
|
|
||||||
#if mode wasn't passed in as arg, get from user
|
|
||||||
|
# if mode wasn't passed in as arg, get from user
|
||||||
if(mode == -1):
|
if(mode == -1):
|
||||||
#by default, Backup,Restore,Open Settings
|
# by default, Backup,Restore,Open Settings
|
||||||
options = [utils.getString(30016),utils.getString(30017),utils.getString(30099)]
|
options = [utils.getString(30016), utils.getString(30017), utils.getString(30099)]
|
||||||
|
|
||||||
#find out if we're using the advanced editor
|
# find out if we're using the advanced editor
|
||||||
if(int(utils.getSetting('backup_selection_type')) == 1):
|
if(int(utils.getSetting('backup_selection_type')) == 1):
|
||||||
options.append(utils.getString(30125))
|
options.append(utils.getString(30125))
|
||||||
|
|
||||||
#figure out if this is a backup or a restore from the user
|
# figure out if this is a backup or a restore from the user
|
||||||
mode = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30023),options)
|
mode = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30023), options)
|
||||||
|
|
||||||
#check if program should be run
|
# check if program should be run
|
||||||
if(mode != -1):
|
if(mode != -1):
|
||||||
#run the profile backup
|
# run the profile backup
|
||||||
backup = XbmcBackup()
|
backup = XbmcBackup()
|
||||||
|
|
||||||
if(mode == 2):
|
if(mode == 2):
|
||||||
#open the settings dialog
|
# open the settings dialog
|
||||||
utils.openSettings()
|
utils.openSettings()
|
||||||
elif(mode == 3 and int(utils.getSetting('backup_selection_type')) == 1):
|
elif(mode == 3 and int(utils.getSetting('backup_selection_type')) == 1):
|
||||||
#open the advanced editor
|
# open the advanced editor
|
||||||
xbmc.executebuiltin('RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=advanced_editor)')
|
xbmc.executebuiltin('RunScript(special://home/addons/script.xbmcbackup/launcher.py, action=advanced_editor)')
|
||||||
elif(backup.remoteConfigured()):
|
elif(backup.remoteConfigured()):
|
||||||
|
|
||||||
if(mode == backup.Restore):
|
if(mode == backup.Restore):
|
||||||
#get list of valid restore points
|
# get list of valid restore points
|
||||||
restorePoints = backup.listBackups()
|
restorePoints = backup.listBackups()
|
||||||
pointNames = []
|
pointNames = []
|
||||||
folderNames = []
|
folderNames = []
|
||||||
|
|
||||||
for aDir in restorePoints:
|
for aDir in restorePoints:
|
||||||
pointNames.append(aDir[1])
|
pointNames.append(aDir[1])
|
||||||
folderNames.append(aDir[0])
|
folderNames.append(aDir[0])
|
||||||
@@ -64,21 +71,21 @@ if(mode != -1):
|
|||||||
selectedRestore = -1
|
selectedRestore = -1
|
||||||
|
|
||||||
if("archive" in params):
|
if("archive" in params):
|
||||||
#check that the user give archive exists
|
# check that the user give archive exists
|
||||||
if(params['archive'] in folderNames):
|
if(params['archive'] in folderNames):
|
||||||
#set the index
|
# set the index
|
||||||
selectedRestore = folderNames.index(params['archive'])
|
selectedRestore = folderNames.index(params['archive'])
|
||||||
utils.log(str(selectedRestore) + " : " + params['archive'])
|
utils.log(str(selectedRestore) + " : " + params['archive'])
|
||||||
else:
|
else:
|
||||||
utils.showNotification(utils.getString(30045))
|
utils.showNotification(utils.getString(30045))
|
||||||
utils.log(params['archive'] + ' is not a valid restore point')
|
utils.log(params['archive'] + ' is not a valid restore point')
|
||||||
else:
|
else:
|
||||||
#allow user to select the backup to restore from
|
# allow user to select the backup to restore from
|
||||||
selectedRestore = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30021),pointNames)
|
selectedRestore = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30021), pointNames)
|
||||||
|
|
||||||
if(selectedRestore != -1):
|
if(selectedRestore != -1):
|
||||||
backup.selectRestore(restorePoints[selectedRestore][0])
|
backup.selectRestore(restorePoints[selectedRestore][0])
|
||||||
|
|
||||||
if('sets' in params):
|
if('sets' in params):
|
||||||
backup.restore(selectedSets=params['sets'].split('|'))
|
backup.restore(selectedSets=params['sets'].split('|'))
|
||||||
else:
|
else:
|
||||||
@@ -86,6 +93,6 @@ if(mode != -1):
|
|||||||
else:
|
else:
|
||||||
backup.backup()
|
backup.backup()
|
||||||
else:
|
else:
|
||||||
#can't go any further
|
# can't go any further
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30045))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30045))
|
||||||
utils.openSettings()
|
utils.openSettings()
|
||||||
|
|||||||
43
launcher.py
43
launcher.py
@@ -1,55 +1,50 @@
|
|||||||
|
# launcher for various helpful functions found in the settings.xml area
|
||||||
import sys
|
import sys
|
||||||
import urlparse
|
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
from resources.lib.authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
from resources.lib.authorizers import DropboxAuthorizer
|
||||||
from resources.lib.advanced_editor import AdvancedBackupEditor
|
from resources.lib.advanced_editor import AdvancedBackupEditor
|
||||||
|
|
||||||
|
|
||||||
#launcher for various helpful functions found in the settings.xml area
|
|
||||||
|
|
||||||
def authorize_cloud(cloudProvider):
|
def authorize_cloud(cloudProvider):
|
||||||
#drobpox
|
# drobpox
|
||||||
if(cloudProvider == 'dropbox'):
|
if(cloudProvider == 'dropbox'):
|
||||||
authorizer = DropboxAuthorizer()
|
authorizer = DropboxAuthorizer()
|
||||||
|
|
||||||
if(authorizer.authorize()):
|
if(authorizer.authorize()):
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30027) + ' ' + utils.getString(30106))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30027) + ' ' + utils.getString(30106))
|
||||||
else:
|
else:
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30107) + ' ' + utils.getString(30027))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30107) + ' ' + utils.getString(30027))
|
||||||
|
|
||||||
#google drive
|
|
||||||
elif(cloudProvider == 'google_drive'):
|
|
||||||
authorizer = GoogleDriveAuthorizer()
|
|
||||||
|
|
||||||
if(authorizer.authorize()):
|
|
||||||
xbmcgui.Dialog().ok("Backup",utils.getString(30098) + ' ' + utils.getString(30106))
|
|
||||||
else:
|
|
||||||
xbmcgui.Dialog().ok("Backup",utils.getString(30107) + ' ' + utils.getString(30098))
|
|
||||||
|
|
||||||
def remove_auth():
|
def remove_auth():
|
||||||
#triggered from settings.xml - asks if user wants to delete OAuth token information
|
# triggered from settings.xml - asks if user wants to delete OAuth token information
|
||||||
shouldDelete = xbmcgui.Dialog().yesno(utils.getString(30093),utils.getString(30094),utils.getString(30095),autoclose=7000)
|
shouldDelete = xbmcgui.Dialog().yesno(utils.getString(30093), utils.getString(30094), utils.getString(30095), autoclose=7000)
|
||||||
|
|
||||||
if(shouldDelete):
|
if(shouldDelete):
|
||||||
#delete any of the known token file types
|
# delete any of the known token file types
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt")) #dropbox
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt")) # dropbox
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "google_drive.dat")) #google drive
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "google_drive.dat")) # google drive
|
||||||
|
|
||||||
|
|
||||||
def get_params():
|
def get_params():
|
||||||
param = {}
|
param = {}
|
||||||
try:
|
try:
|
||||||
for i in sys.argv:
|
for i in sys.argv:
|
||||||
args = i
|
args = i
|
||||||
if(args.startswith('?')):
|
if('=' in args):
|
||||||
args = args[1:]
|
if(args.startswith('?')):
|
||||||
param.update(dict(urlparse.parse_qsl(args)))
|
args = args[1:] # legacy in case of url params
|
||||||
|
splitString = args.split('=')
|
||||||
|
param[splitString[0]] = splitString[1]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return param
|
return param
|
||||||
|
|
||||||
|
|
||||||
params = get_params()
|
params = get_params()
|
||||||
|
|
||||||
if(params['action'] == 'authorize_cloud'):
|
if(params['action'] == 'authorize_cloud'):
|
||||||
|
|||||||
@@ -1,229 +1,232 @@
|
|||||||
import json
|
import json
|
||||||
import xbmcvfs
|
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
|
import xbmcvfs
|
||||||
from . import utils as utils
|
from . import utils as utils
|
||||||
|
|
||||||
|
|
||||||
class BackupSetManager:
|
class BackupSetManager:
|
||||||
jsonFile = xbmc.translatePath(utils.data_dir() + "custom_paths.json")
|
jsonFile = xbmc.translatePath(utils.data_dir() + "custom_paths.json")
|
||||||
paths = None
|
paths = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.paths = {}
|
self.paths = {}
|
||||||
|
|
||||||
#try and read in the custom file
|
# try and read in the custom file
|
||||||
self._readFile()
|
self._readFile()
|
||||||
|
|
||||||
def addSet(self,aSet):
|
def addSet(self, aSet):
|
||||||
self.paths[aSet['name']] = {'root':aSet['root'],'dirs':[{"type":"include","path":aSet['root'],'recurse':True}]}
|
self.paths[aSet['name']] = {'root': aSet['root'], 'dirs': [{"type": "include", "path": aSet['root'], 'recurse': True}]}
|
||||||
|
|
||||||
#save the file
|
# save the file
|
||||||
self._writeFile()
|
self._writeFile()
|
||||||
|
|
||||||
def updateSet(self,name,aSet):
|
def updateSet(self, name, aSet):
|
||||||
self.paths[name] = aSet
|
self.paths[name] = aSet
|
||||||
|
|
||||||
#save the file
|
# save the file
|
||||||
self._writeFile()
|
self._writeFile()
|
||||||
|
|
||||||
def deleteSet(self,index):
|
def deleteSet(self, index):
|
||||||
#match the index to a key
|
# match the index to a key
|
||||||
keys = self.getSets()
|
keys = self.getSets()
|
||||||
|
|
||||||
#delete this set
|
# delete this set
|
||||||
del self.paths[keys[index]]
|
del self.paths[keys[index]]
|
||||||
|
|
||||||
#save the file
|
# save the file
|
||||||
self._writeFile()
|
self._writeFile()
|
||||||
|
|
||||||
def getSets(self):
|
def getSets(self):
|
||||||
#list all current sets by name
|
# list all current sets by name
|
||||||
keys = list(self.paths.keys())
|
keys = list(self.paths.keys())
|
||||||
keys.sort()
|
keys.sort()
|
||||||
|
|
||||||
return keys
|
return keys
|
||||||
|
|
||||||
def getSet(self,index):
|
def getSet(self, index):
|
||||||
keys = self.getSets();
|
keys = self.getSets()
|
||||||
|
|
||||||
#return the set at this index
|
# return the set at this index
|
||||||
return {'name':keys[index],'set':self.paths[keys[index]]}
|
return {'name': keys[index], 'set': self.paths[keys[index]]}
|
||||||
|
|
||||||
def validateSetName(self,name):
|
def validateSetName(self, name):
|
||||||
return (name not in self.getSets())
|
return (name not in self.getSets())
|
||||||
|
|
||||||
def _writeFile(self):
|
def _writeFile(self):
|
||||||
#create the custom file
|
# create the custom file
|
||||||
aFile = xbmcvfs.File(self.jsonFile,'w')
|
aFile = xbmcvfs.File(self.jsonFile, 'w')
|
||||||
aFile.write(json.dumps(self.paths))
|
aFile.write(json.dumps(self.paths))
|
||||||
aFile.close()
|
aFile.close()
|
||||||
|
|
||||||
def _readFile(self):
|
def _readFile(self):
|
||||||
|
|
||||||
if(xbmcvfs.exists(self.jsonFile)):
|
if(xbmcvfs.exists(self.jsonFile)):
|
||||||
|
|
||||||
#read in the custom file
|
# read in the custom file
|
||||||
aFile = xbmcvfs.File(self.jsonFile)
|
aFile = xbmcvfs.File(self.jsonFile)
|
||||||
|
|
||||||
#load custom dirs
|
# load custom dirs
|
||||||
self.paths = json.loads(aFile.read())
|
self.paths = json.loads(aFile.read())
|
||||||
aFile.close()
|
aFile.close()
|
||||||
else:
|
else:
|
||||||
#write a blank file
|
# write a blank file
|
||||||
self._writeFile()
|
self._writeFile()
|
||||||
|
|
||||||
|
|
||||||
class AdvancedBackupEditor:
|
class AdvancedBackupEditor:
|
||||||
dialog = None
|
dialog = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.dialog = xbmcgui.Dialog()
|
self.dialog = xbmcgui.Dialog()
|
||||||
|
|
||||||
def _cleanPath(self,root,path):
|
def _cleanPath(self, root, path):
|
||||||
return path[len(root)-1:]
|
return path[len(root) - 1:]
|
||||||
|
|
||||||
def _validatePath(self,root,path):
|
def _validatePath(self, root, path):
|
||||||
return path.startswith(root)
|
return path.startswith(root)
|
||||||
|
|
||||||
def createSet(self):
|
def createSet(self):
|
||||||
backupSet = None
|
backupSet = None
|
||||||
|
|
||||||
name = self.dialog.input(utils.getString(30110),defaultt='Backup Set')
|
|
||||||
|
|
||||||
if(name != None):
|
name = self.dialog.input(utils.getString(30110), defaultt='Backup Set')
|
||||||
|
|
||||||
#give a choice to start in home or enter a root path
|
if(name is not None):
|
||||||
enterHome = self.dialog.yesno(utils.getString(30111),line1=utils.getString(30112) + " - " + utils.getString(30114),line2=utils.getString(30113) + " - " + utils.getString(30115),nolabel=utils.getString(30112),yeslabel=utils.getString(30113))
|
|
||||||
|
# give a choice to start in home or enter a root path
|
||||||
|
enterHome = self.dialog.yesno(utils.getString(30111), line1=utils.getString(30112) + " - " + utils.getString(30114), line2=utils.getString(30113) + " - " + utils.getString(30115), nolabel=utils.getString(30112), yeslabel=utils.getString(30113))
|
||||||
|
|
||||||
rootFolder = 'special://home'
|
rootFolder = 'special://home'
|
||||||
if(enterHome):
|
if(enterHome):
|
||||||
rootFolder = self.dialog.input(utils.getString(30116),defaultt=rootFolder)
|
rootFolder = self.dialog.input(utils.getString(30116), defaultt=rootFolder)
|
||||||
|
|
||||||
#direcotry has to end in slash
|
# direcotry has to end in slash
|
||||||
if(rootFolder[:-1] != '/'):
|
if(rootFolder[:-1] != '/'):
|
||||||
rootFolder = rootFolder + '/'
|
rootFolder = rootFolder + '/'
|
||||||
|
|
||||||
#check that this path even exists
|
# check that this path even exists
|
||||||
if(not xbmcvfs.exists(xbmc.translatePath(rootFolder))):
|
if(not xbmcvfs.exists(xbmc.translatePath(rootFolder))):
|
||||||
self.dialog.ok(utils.getString(30117),utils.getString(30118),rootFolder)
|
self.dialog.ok(utils.getString(30117), utils.getString(30118), rootFolder)
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
#select path to start set
|
# select path to start set
|
||||||
rootFolder = self.dialog.browse(type=0,heading=utils.getString(30119),shares='files',defaultt=rootFolder)
|
rootFolder = self.dialog.browse(type=0, heading=utils.getString(30119), shares='files', defaultt=rootFolder)
|
||||||
|
|
||||||
|
backupSet = {'name': name, 'root': rootFolder}
|
||||||
|
|
||||||
backupSet = {'name':name,'root':rootFolder}
|
|
||||||
|
|
||||||
return backupSet
|
return backupSet
|
||||||
|
|
||||||
def editSet(self,name,backupSet):
|
def editSet(self, name, backupSet):
|
||||||
optionSelected = ''
|
optionSelected = ''
|
||||||
rootPath = backupSet['root']
|
rootPath = backupSet['root']
|
||||||
utils.log(rootPath)
|
|
||||||
while(optionSelected != -1):
|
while(optionSelected != -1):
|
||||||
options = [xbmcgui.ListItem(utils.getString(30120),"Exclude a specific folder from this backup set"),xbmcgui.ListItem(utils.getString(30135),"Include a specific folder to this backup set"),xbmcgui.ListItem(rootPath,utils.getString(30121))]
|
options = [xbmcgui.ListItem(utils.getString(30120), "Exclude a specific folder from this backup set"), xbmcgui.ListItem(utils.getString(30135), "Include a specific folder to this backup set"), xbmcgui.ListItem(rootPath, utils.getString(30121))]
|
||||||
|
|
||||||
for aDir in backupSet['dirs']:
|
for aDir in backupSet['dirs']:
|
||||||
if(aDir['type'] == 'exclude'):
|
if(aDir['type'] == 'exclude'):
|
||||||
options.append(xbmcgui.ListItem(self._cleanPath(rootPath,aDir['path']),"%s: %s" % ("Type",utils.getString(30129))))
|
options.append(xbmcgui.ListItem(self._cleanPath(rootPath, aDir['path']), "%s: %s" % ("Type", utils.getString(30129))))
|
||||||
elif(aDir['type'] == 'include'):
|
elif(aDir['type'] == 'include'):
|
||||||
options.append(xbmcgui.ListItem(self._cleanPath(rootPath,aDir['path']),"%s: %s | %s: %s" % ("Type",utils.getString(30134),"Include Sub Folders",str(aDir['recurse']))))
|
options.append(xbmcgui.ListItem(self._cleanPath(rootPath, aDir['path']), "%s: %s | %s: %s" % ("Type", utils.getString(30134), "Include Sub Folders", str(aDir['recurse']))))
|
||||||
|
|
||||||
optionSelected = self.dialog.select(utils.getString(30122) + ' ' + name,options,useDetails=True)
|
optionSelected = self.dialog.select(utils.getString(30122) + ' ' + name, options, useDetails=True)
|
||||||
|
|
||||||
if(optionSelected == 0 or optionSelected == 1):
|
if(optionSelected == 0 or optionSelected == 1):
|
||||||
#add a folder, will equal root if cancel is hit
|
# add a folder, will equal root if cancel is hit
|
||||||
addFolder = self.dialog.browse(type=0,heading=utils.getString(30120),shares='files',defaultt=backupSet['root'])
|
addFolder = self.dialog.browse(type=0, heading=utils.getString(30120), shares='files', defaultt=backupSet['root'])
|
||||||
|
|
||||||
if(addFolder.startswith(rootPath)):
|
if(addFolder.startswith(rootPath)):
|
||||||
|
|
||||||
if(not any(addFolder == aDir['path'] for aDir in backupSet['dirs'])):
|
if(not any(addFolder == aDir['path'] for aDir in backupSet['dirs'])):
|
||||||
#cannot add root as an exclusion
|
# cannot add root as an exclusion
|
||||||
if(optionSelected == 0 and addFolder != backupSet['root']):
|
if(optionSelected == 0 and addFolder != backupSet['root']):
|
||||||
backupSet['dirs'].append({"path":addFolder,"type":"exclude"})
|
backupSet['dirs'].append({"path": addFolder, "type": "exclude"})
|
||||||
elif(optionSelected == 1):
|
elif(optionSelected == 1):
|
||||||
#can add root as inclusion
|
# can add root as inclusion
|
||||||
backupSet['dirs'].append({"path":addFolder,"type":"include","recurse":True})
|
backupSet['dirs'].append({"path": addFolder, "type": "include", "recurse": True})
|
||||||
else:
|
else:
|
||||||
#this path is already part of another include/exclude rule
|
# this path is already part of another include/exclude rule
|
||||||
self.dialog.ok(utils.getString(30117),utils.getString(30137),addFolder)
|
self.dialog.ok(utils.getString(30117), utils.getString(30137), addFolder)
|
||||||
else:
|
else:
|
||||||
#folder must be under root folder
|
# folder must be under root folder
|
||||||
self.dialog.ok(utils.getString(30117), utils.getString(30136),rootPath)
|
self.dialog.ok(utils.getString(30117), utils.getString(30136), rootPath)
|
||||||
elif(optionSelected == 2):
|
elif(optionSelected == 2):
|
||||||
self.dialog.ok(utils.getString(30121),utils.getString(30130),backupSet['root'])
|
self.dialog.ok(utils.getString(30121), utils.getString(30130), backupSet['root'])
|
||||||
elif(optionSelected > 2):
|
elif(optionSelected > 2):
|
||||||
|
|
||||||
cOptions = ['Delete']
|
cOptions = ['Delete']
|
||||||
if(backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
if(backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
||||||
cOptions.append('Toggle Sub Folders')
|
cOptions.append('Toggle Sub Folders')
|
||||||
|
|
||||||
contextOption = self.dialog.contextmenu(cOptions)
|
contextOption = self.dialog.contextmenu(cOptions)
|
||||||
|
|
||||||
if(contextOption == 0):
|
if(contextOption == 0):
|
||||||
if(self.dialog.yesno(heading=utils.getString(30123),line1=utils.getString(30128))):
|
if(self.dialog.yesno(heading=utils.getString(30123), line1=utils.getString(30128))):
|
||||||
#remove folder
|
# remove folder
|
||||||
del backupSet['dirs'][optionSelected - 3]
|
del backupSet['dirs'][optionSelected - 3]
|
||||||
elif(contextOption == 1 and backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
elif(contextOption == 1 and backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
||||||
#toggle if this folder should be recursive
|
# toggle if this folder should be recursive
|
||||||
backupSet['dirs'][optionSelected - 3]['recurse'] = not backupSet['dirs'][optionSelected - 3]['recurse']
|
backupSet['dirs'][optionSelected - 3]['recurse'] = not backupSet['dirs'][optionSelected - 3]['recurse']
|
||||||
|
|
||||||
return backupSet
|
return backupSet
|
||||||
|
|
||||||
|
|
||||||
def showMainScreen(self):
|
def showMainScreen(self):
|
||||||
exitCondition = ""
|
exitCondition = ""
|
||||||
customPaths = BackupSetManager()
|
customPaths = BackupSetManager()
|
||||||
|
|
||||||
#show this every time
|
# show this every time
|
||||||
self.dialog.ok(utils.getString(30036),utils.getString(30037))
|
self.dialog.ok(utils.getString(30036), utils.getString(30037))
|
||||||
|
|
||||||
while(exitCondition != -1):
|
while(exitCondition != -1):
|
||||||
#load the custom paths
|
# load the custom paths
|
||||||
options = [xbmcgui.ListItem(utils.getString(30126),'',utils.addon_dir() + '/resources/images/plus-icon.png')]
|
listItem = xbmcgui.ListItem(utils.getString(30126), '')
|
||||||
|
listItem.setArt({'icon': utils.addon_dir() + 'resources/images/plus-icon.png'})
|
||||||
for index in range(0,len(customPaths.getSets())):
|
options = [listItem]
|
||||||
|
|
||||||
|
for index in range(0, len(customPaths.getSets())):
|
||||||
aSet = customPaths.getSet(index)
|
aSet = customPaths.getSet(index)
|
||||||
options.append(xbmcgui.ListItem(aSet['name'],utils.getString(30121) + ': ' + aSet['set']['root'],utils.addon_dir() + '/resources/images/folder-icon.png'))
|
|
||||||
|
listItem = xbmcgui.ListItem(aSet['name'], utils.getString(30121) + ': ' + aSet['set']['root'])
|
||||||
#show the gui
|
listItem.setArt({'icon': utils.addon_dir() + 'resources/images/folder-icon.png'})
|
||||||
exitCondition = self.dialog.select(utils.getString(30125),options,useDetails=True)
|
options.append(listItem)
|
||||||
|
|
||||||
|
# show the gui
|
||||||
|
exitCondition = self.dialog.select(utils.getString(30125), options, useDetails=True)
|
||||||
|
|
||||||
if(exitCondition >= 0):
|
if(exitCondition >= 0):
|
||||||
if(exitCondition == 0):
|
if(exitCondition == 0):
|
||||||
newSet = self.createSet()
|
newSet = self.createSet()
|
||||||
|
|
||||||
#check that the name is unique
|
# check that the name is unique
|
||||||
if(customPaths.validateSetName(newSet['name'])):
|
if(customPaths.validateSetName(newSet['name'])):
|
||||||
customPaths.addSet(newSet)
|
customPaths.addSet(newSet)
|
||||||
else:
|
else:
|
||||||
self.dialog.ok(utils.getString(30117), utils.getString(30138),newSet['name'])
|
self.dialog.ok(utils.getString(30117), utils.getString(30138), newSet['name'])
|
||||||
else:
|
else:
|
||||||
#bring up a context menu
|
# bring up a context menu
|
||||||
menuOption = self.dialog.contextmenu([utils.getString(30122),utils.getString(30123)])
|
menuOption = self.dialog.contextmenu([utils.getString(30122), utils.getString(30123)])
|
||||||
|
|
||||||
if(menuOption == 0):
|
if(menuOption == 0):
|
||||||
#get the set
|
# get the set
|
||||||
aSet = customPaths.getSet(exitCondition -1)
|
aSet = customPaths.getSet(exitCondition - 1)
|
||||||
|
|
||||||
#edit the set
|
# edit the set
|
||||||
updatedSet = self.editSet(aSet['name'],aSet['set'])
|
updatedSet = self.editSet(aSet['name'], aSet['set'])
|
||||||
|
|
||||||
|
# save it
|
||||||
|
customPaths.updateSet(aSet['name'], updatedSet)
|
||||||
|
|
||||||
#save it
|
|
||||||
customPaths.updateSet(aSet['name'],updatedSet)
|
|
||||||
|
|
||||||
elif(menuOption == 1):
|
elif(menuOption == 1):
|
||||||
if(self.dialog.yesno(heading=utils.getString(30127),line1=utils.getString(30128))):
|
if(self.dialog.yesno(heading=utils.getString(30127), line1=utils.getString(30128))):
|
||||||
#delete this path - subtract one because of "add" item
|
# delete this path - subtract one because of "add" item
|
||||||
customPaths.deleteSet(exitCondition -1)
|
customPaths.deleteSet(exitCondition - 1)
|
||||||
|
|
||||||
def copySimpleConfig(self):
|
def copySimpleConfig(self):
|
||||||
#disclaimer in case the user hit this on accident
|
# disclaimer in case the user hit this on accident
|
||||||
shouldContinue = self.dialog.yesno(utils.getString(30139),utils.getString(30140),utils.getString(30141))
|
shouldContinue = self.dialog.yesno(utils.getString(30139), utils.getString(30140), utils.getString(30141))
|
||||||
|
|
||||||
if(shouldContinue):
|
if(shouldContinue):
|
||||||
source = xbmc.translatePath(utils.addon_dir() + "/resources/data/default_files.json")
|
source = xbmc.translatePath(utils.addon_dir() + "/resources/data/default_files.json")
|
||||||
dest = xbmc.translatePath(utils.data_dir() + "/custom_paths.json")
|
dest = xbmc.translatePath(utils.data_dir() + "/custom_paths.json")
|
||||||
|
|
||||||
xbmcvfs.copy(source,dest)
|
xbmcvfs.copy(source, dest)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,65 +4,60 @@ import xbmcvfs
|
|||||||
import resources.lib.tinyurl as tinyurl
|
import resources.lib.tinyurl as tinyurl
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
|
|
||||||
#don't die on import error yet, these might not even get used
|
# don't die on import error yet, these might not even get used
|
||||||
try:
|
try:
|
||||||
import dropbox
|
from . import dropbox
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
|
||||||
from resources.lib.pydrive.auth import GoogleAuth
|
|
||||||
from resources.lib.pydrive.drive import GoogleDrive
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
class DropboxAuthorizer:
|
class DropboxAuthorizer:
|
||||||
APP_KEY = ""
|
APP_KEY = ""
|
||||||
APP_SECRET = ""
|
APP_SECRET = ""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.APP_KEY = utils.getSetting('dropbox_key')
|
self.APP_KEY = utils.getSetting('dropbox_key')
|
||||||
self.APP_SECRET = utils.getSetting('dropbox_secret')
|
self.APP_SECRET = utils.getSetting('dropbox_secret')
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
result = True
|
result = True
|
||||||
|
|
||||||
if(self.APP_KEY == '' and self.APP_SECRET == ''):
|
if(self.APP_KEY == '' and self.APP_SECRET == ''):
|
||||||
#we can't go any farther, need these for sure
|
# we can't go any farther, need these for sure
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30027) + ' ' + utils.getString(30058),utils.getString(30059))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30027) + ' ' + utils.getString(30058), utils.getString(30059))
|
||||||
|
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def isAuthorized(self):
|
def isAuthorized(self):
|
||||||
user_token = self._getToken()
|
user_token = self._getToken()
|
||||||
|
|
||||||
return user_token != ''
|
return user_token != ''
|
||||||
|
|
||||||
def authorize(self):
|
def authorize(self):
|
||||||
result = True
|
result = True
|
||||||
|
|
||||||
if(not self.setup()):
|
if(not self.setup()):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if(self.isAuthorized()):
|
if(self.isAuthorized()):
|
||||||
#delete the token to start over
|
# delete the token to start over
|
||||||
self._deleteToken()
|
self._deleteToken()
|
||||||
|
|
||||||
#copied flow from http://dropbox-sdk-python.readthedocs.io/en/latest/moduledoc.html#dropbox.oauth.DropboxOAuth2FlowNoRedirect
|
# copied flow from http://dropbox-sdk-python.readthedocs.io/en/latest/moduledoc.html#dropbox.oauth.DropboxOAuth2FlowNoRedirect
|
||||||
flow = dropbox.oauth.DropboxOAuth2FlowNoRedirect(self.APP_KEY,self.APP_SECRET)
|
flow = dropbox.oauth.DropboxOAuth2FlowNoRedirect(self.APP_KEY, self.APP_SECRET)
|
||||||
|
|
||||||
url = flow.start()
|
url = flow.start()
|
||||||
|
|
||||||
#print url in log
|
# print url in log
|
||||||
utils.log("Authorize URL: " + url)
|
utils.log("Authorize URL: " + url)
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30056),utils.getString(30057),tinyurl.shorten(url))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30056), utils.getString(30057), tinyurl.shorten(url))
|
||||||
|
|
||||||
#get the auth code
|
# get the auth code
|
||||||
code = xbmcgui.Dialog().input(utils.getString(30027) + ' ' + utils.getString(30103))
|
code = xbmcgui.Dialog().input(utils.getString(30027) + ' ' + utils.getString(30103))
|
||||||
|
|
||||||
#if user authorized this will work
|
# if user authorized this will work
|
||||||
|
|
||||||
try:
|
try:
|
||||||
user_token = flow.finish(code)
|
user_token = flow.finish(code)
|
||||||
@@ -70,36 +65,36 @@ class DropboxAuthorizer:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
utils.log("Error: %s" % (e,))
|
utils.log("Error: %s" % (e,))
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
return result;
|
|
||||||
|
|
||||||
#return the DropboxClient, or None if can't be created
|
return result
|
||||||
|
|
||||||
|
# return the DropboxClient, or None if can't be created
|
||||||
def getClient(self):
|
def getClient(self):
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
user_token = self._getToken()
|
user_token = self._getToken()
|
||||||
|
|
||||||
if(user_token != ''):
|
if(user_token != ''):
|
||||||
#create the client
|
# create the client
|
||||||
result = dropbox.Dropbox(user_token)
|
result = dropbox.Dropbox(user_token)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result.users_get_current_account()
|
result.users_get_current_account()
|
||||||
except:
|
except:
|
||||||
#this didn't work, delete the token file
|
# this didn't work, delete the token file
|
||||||
self._deleteToken()
|
self._deleteToken()
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _setToken(self,token):
|
def _setToken(self, token):
|
||||||
#write the token files
|
# write the token files
|
||||||
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"),'w')
|
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"), 'w')
|
||||||
token_file.write(token)
|
token_file.write(token)
|
||||||
token_file.close()
|
token_file.close()
|
||||||
|
|
||||||
def _getToken(self):
|
def _getToken(self):
|
||||||
#get token, if it exists
|
# get token, if it exists
|
||||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "tokens.txt"))):
|
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "tokens.txt"))):
|
||||||
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"))
|
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"))
|
||||||
token = token_file.read()
|
token = token_file.read()
|
||||||
@@ -108,66 +103,7 @@ class DropboxAuthorizer:
|
|||||||
return token
|
return token
|
||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def _deleteToken(self):
|
def _deleteToken(self):
|
||||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "tokens.txt"))):
|
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "tokens.txt"))):
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt"))
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt"))
|
||||||
|
|
||||||
class GoogleDriveAuthorizer:
|
|
||||||
CLIENT_ID = ''
|
|
||||||
CLIENT_SECRET = ''
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.CLIENT_ID = utils.getSetting('google_drive_id')
|
|
||||||
self.CLIENT_SECRET = utils.getSetting('google_drive_secret')
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
result = True
|
|
||||||
|
|
||||||
if(self.CLIENT_ID == '' and self.CLIENT_SECRET == ''):
|
|
||||||
#we can't go any farther, need these for sure
|
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30098) + ' ' + utils.getString(30058),utils.getString(30108))
|
|
||||||
result = False
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def isAuthorized(self):
|
|
||||||
return xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "google_drive.dat"))
|
|
||||||
|
|
||||||
def authorize(self):
|
|
||||||
result = True
|
|
||||||
|
|
||||||
if(not self.setup()):
|
|
||||||
return False
|
|
||||||
|
|
||||||
#create authorization helper and load default settings
|
|
||||||
gauth = GoogleAuth(xbmc.validatePath(xbmc.translatePath(utils.addon_dir() + '/resources/lib/pydrive/settings.yaml')))
|
|
||||||
gauth.LoadClientConfigSettings()
|
|
||||||
|
|
||||||
settings = {"client_id":self.CLIENT_ID,'client_secret':self.CLIENT_SECRET}
|
|
||||||
|
|
||||||
drive_url = gauth.GetAuthUrl(settings)
|
|
||||||
|
|
||||||
utils.log("Google Drive Authorize URL: " + drive_url)
|
|
||||||
|
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30056),utils.getString(30102),tinyurl.shorten(drive_url))
|
|
||||||
code = xbmcgui.Dialog().input(utils.getString(30098) + ' ' + utils.getString(30103))
|
|
||||||
|
|
||||||
gauth.Auth(code)
|
|
||||||
gauth.SaveCredentialsFile(xbmc.validatePath(xbmc.translatePath(utils.data_dir() + 'google_drive.dat')))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def getClient(self):
|
|
||||||
#create authorization helper and load default settings
|
|
||||||
gauth = GoogleAuth(xbmc.validatePath(xbmc.translatePath(utils.addon_dir() + '/resources/lib/pydrive/settings.yaml')))
|
|
||||||
gauth.LoadClientConfigSettings()
|
|
||||||
|
|
||||||
gauth.LoadCredentialsFile(xbmc.validatePath(xbmc.translatePath(utils.data_dir() + 'google_drive.dat')))
|
|
||||||
|
|
||||||
result = GoogleDrive(gauth)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,48 +1,50 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
import time
|
||||||
|
import json
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
import time
|
|
||||||
import json
|
|
||||||
from . import utils as utils
|
from . import utils as utils
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from .vfs import XBMCFileSystem,DropboxFileSystem,ZipFileSystem,GoogleDriveFilesystem
|
from . vfs import XBMCFileSystem, DropboxFileSystem, ZipFileSystem
|
||||||
from .progressbar import BackupProgressBar
|
from . progressbar import BackupProgressBar
|
||||||
from resources.lib.guisettings import GuiSettingsManager
|
from resources.lib.guisettings import GuiSettingsManager
|
||||||
from resources.lib.extractor import ZipExtractor
|
from resources.lib.extractor import ZipExtractor
|
||||||
|
|
||||||
|
|
||||||
def folderSort(aKey):
|
def folderSort(aKey):
|
||||||
result = aKey[0]
|
result = aKey[0]
|
||||||
|
|
||||||
if(len(result) < 8):
|
if(len(result) < 8):
|
||||||
result = result + "0000"
|
result = result + "0000"
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class XbmcBackup:
|
class XbmcBackup:
|
||||||
#constants for initiating a back or restore
|
# constants for initiating a back or restore
|
||||||
Backup = 0
|
Backup = 0
|
||||||
Restore = 1
|
Restore = 1
|
||||||
|
|
||||||
#list of dirs for the "simple" file selection
|
# list of dirs for the "simple" file selection
|
||||||
simple_directory_list = ['addons','addon_data','database','game_saves','playlists','profiles','thumbnails','config']
|
simple_directory_list = ['addons', 'addon_data', 'database', 'game_saves', 'playlists', 'profiles', 'thumbnails', 'config']
|
||||||
|
|
||||||
#file systems
|
# file systems
|
||||||
xbmc_vfs = None
|
xbmc_vfs = None
|
||||||
remote_vfs = None
|
remote_vfs = None
|
||||||
saved_remote_vfs = None
|
saved_remote_vfs = None
|
||||||
|
|
||||||
restoreFile = None
|
restoreFile = None
|
||||||
remote_base_path = None
|
remote_base_path = None
|
||||||
|
|
||||||
#for the progress bar
|
# for the progress bar
|
||||||
progressBar = None
|
progressBar = None
|
||||||
filesLeft = 0
|
filesLeft = 0
|
||||||
filesTotal = 1
|
filesTotal = 1
|
||||||
|
|
||||||
restore_point = None
|
restore_point = None
|
||||||
skip_advanced = False #if we should check for the existance of advancedsettings in the restore
|
skip_advanced = False # if we should check for the existance of advancedsettings in the restore
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.xbmc_vfs = XBMCFileSystem(xbmc.translatePath('special://home'))
|
self.xbmc_vfs = XBMCFileSystem(xbmc.translatePath('special://home'))
|
||||||
|
|
||||||
@@ -51,18 +53,15 @@ class XbmcBackup:
|
|||||||
|
|
||||||
def configureRemote(self):
|
def configureRemote(self):
|
||||||
if(utils.getSetting('remote_selection') == '1'):
|
if(utils.getSetting('remote_selection') == '1'):
|
||||||
self.remote_base_path = utils.getSetting('remote_path_2');
|
self.remote_base_path = utils.getSetting('remote_path_2')
|
||||||
self.remote_vfs = XBMCFileSystem(utils.getSetting('remote_path_2'))
|
self.remote_vfs = XBMCFileSystem(utils.getSetting('remote_path_2'))
|
||||||
utils.setSetting("remote_path","")
|
utils.setSetting("remote_path", "")
|
||||||
elif(utils.getSetting('remote_selection') == '0'):
|
elif(utils.getSetting('remote_selection') == '0'):
|
||||||
self.remote_base_path = utils.getSetting('remote_path');
|
self.remote_base_path = utils.getSetting('remote_path')
|
||||||
self.remote_vfs = XBMCFileSystem(utils.getSetting("remote_path"))
|
self.remote_vfs = XBMCFileSystem(utils.getSetting("remote_path"))
|
||||||
elif(utils.getSetting('remote_selection') == '2'):
|
elif(utils.getSetting('remote_selection') == '2'):
|
||||||
self.remote_base_path = "/"
|
self.remote_base_path = "/"
|
||||||
self.remote_vfs = DropboxFileSystem("/")
|
self.remote_vfs = DropboxFileSystem("/")
|
||||||
elif(utils.getSetting('remote_selection') == '3'):
|
|
||||||
self.remote_base_path = '/Kodi Backup/'
|
|
||||||
self.remote_vfs = GoogleDriveFilesystem('/Kodi Backup/')
|
|
||||||
|
|
||||||
def remoteConfigured(self):
|
def remoteConfigured(self):
|
||||||
result = True
|
result = True
|
||||||
@@ -72,54 +71,53 @@ class XbmcBackup:
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
#reverse - should reverse the resulting, default is true - newest to oldest
|
# reverse - should reverse the resulting, default is true - newest to oldest
|
||||||
def listBackups(self,reverse=True):
|
def listBackups(self, reverse=True):
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
#get all the folders in the current root path
|
# get all the folders in the current root path
|
||||||
dirs,files = self.remote_vfs.listdir(self.remote_base_path)
|
dirs, files = self.remote_vfs.listdir(self.remote_base_path)
|
||||||
|
|
||||||
for aDir in dirs:
|
for aDir in dirs:
|
||||||
if(self.remote_vfs.exists(self.remote_base_path + aDir + "/xbmcbackup.val")):
|
if(self.remote_vfs.exists(self.remote_base_path + aDir + "/xbmcbackup.val")):
|
||||||
|
|
||||||
#format the name according to regional settings
|
# format the name according to regional settings
|
||||||
folderName = self._dateFormat(aDir)
|
folderName = self._dateFormat(aDir)
|
||||||
|
|
||||||
result.append((aDir,folderName))
|
result.append((aDir, folderName))
|
||||||
|
|
||||||
for aFile in files:
|
for aFile in files:
|
||||||
file_ext = aFile.split('.')[-1]
|
file_ext = aFile.split('.')[-1]
|
||||||
folderName = utils.encode(aFile.split('.')[0])
|
folderName = aFile.split('.')[0]
|
||||||
|
|
||||||
if(file_ext == 'zip' and len(folderName) == 12 and str.isdigit(folderName)):
|
if(file_ext == 'zip' and len(folderName) == 12 and folderName.isdigit()):
|
||||||
|
|
||||||
#format the name according to regional settings
|
# format the name according to regional settings
|
||||||
folderName = self._dateFormat(folderName)
|
folderName = self._dateFormat(folderName)
|
||||||
|
|
||||||
result.append((aFile ,folderName))
|
result.append((aFile, folderName))
|
||||||
|
|
||||||
|
result.sort(key=folderSort, reverse=reverse)
|
||||||
|
|
||||||
result.sort(key=folderSort,reverse=reverse)
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def selectRestore(self,restore_point):
|
def selectRestore(self, restore_point):
|
||||||
self.restore_point = restore_point
|
self.restore_point = restore_point
|
||||||
|
|
||||||
def skipAdvanced(self):
|
def skipAdvanced(self):
|
||||||
self.skip_advanced = True
|
self.skip_advanced = True
|
||||||
|
|
||||||
def backup(self,progressOverride=False):
|
def backup(self, progressOverride=False):
|
||||||
shouldContinue = self._setupVFS(self.Backup,progressOverride)
|
shouldContinue = self._setupVFS(self.Backup, progressOverride)
|
||||||
|
|
||||||
if(shouldContinue):
|
if(shouldContinue):
|
||||||
utils.log(utils.getString(30023) + " - " + utils.getString(30016))
|
utils.log(utils.getString(30023) + " - " + utils.getString(30016))
|
||||||
#check if remote path exists
|
# check if remote path exists
|
||||||
if(self.remote_vfs.exists(self.remote_vfs.root_path)):
|
if(self.remote_vfs.exists(self.remote_vfs.root_path)):
|
||||||
#may be data in here already
|
# may be data in here already
|
||||||
utils.log(utils.getString(30050))
|
utils.log(utils.getString(30050))
|
||||||
else:
|
else:
|
||||||
#make the remote directory
|
# make the remote directory
|
||||||
self.remote_vfs.mkdir(self.remote_vfs.root_path)
|
self.remote_vfs.mkdir(self.remote_vfs.root_path)
|
||||||
|
|
||||||
utils.log(utils.getString(30051))
|
utils.log(utils.getString(30051))
|
||||||
@@ -127,247 +125,245 @@ class XbmcBackup:
|
|||||||
allFiles = []
|
allFiles = []
|
||||||
|
|
||||||
if(int(utils.getSetting('backup_selection_type')) == 0):
|
if(int(utils.getSetting('backup_selection_type')) == 0):
|
||||||
#read in a list of the directories to backup
|
# read in a list of the directories to backup
|
||||||
selectedDirs = self._readBackupConfig(utils.addon_dir() + "/resources/data/default_files.json")
|
selectedDirs = self._readBackupConfig(utils.addon_dir() + "/resources/data/default_files.json")
|
||||||
|
|
||||||
#simple mode - get file listings for all enabled directories
|
# simple mode - get file listings for all enabled directories
|
||||||
for aDir in self.simple_directory_list:
|
for aDir in self.simple_directory_list:
|
||||||
#if this dir enabled
|
# if this dir enabled
|
||||||
if(utils.getSetting('backup_' + aDir) == 'true'):
|
if(utils.getSetting('backup_' + aDir) == 'true'):
|
||||||
#get a file listing and append it to the allfiles array
|
# get a file listing and append it to the allfiles array
|
||||||
allFiles.append(self._addBackupDir(aDir,selectedDirs[aDir]['root'],selectedDirs[aDir]['dirs']))
|
allFiles.append(self._addBackupDir(aDir, selectedDirs[aDir]['root'], selectedDirs[aDir]['dirs']))
|
||||||
else:
|
else:
|
||||||
#advanced mode - load custom paths
|
# advanced mode - load custom paths
|
||||||
selectedDirs = self._readBackupConfig(utils.data_dir() + "/custom_paths.json")
|
selectedDirs = self._readBackupConfig(utils.data_dir() + "/custom_paths.json")
|
||||||
|
|
||||||
#get the set names
|
# get the set names
|
||||||
keys = list(selectedDirs.keys())
|
keys = list(selectedDirs.keys())
|
||||||
|
|
||||||
#go through the custom sets
|
# go through the custom sets
|
||||||
for aKey in keys:
|
for aKey in keys:
|
||||||
#get the set
|
# get the set
|
||||||
aSet = selectedDirs[aKey]
|
aSet = selectedDirs[aKey]
|
||||||
|
|
||||||
#get file listing and append
|
# get file listing and append
|
||||||
allFiles.append(self._addBackupDir(aKey,aSet['root'],aSet['dirs']))
|
allFiles.append(self._addBackupDir(aKey, aSet['root'], aSet['dirs']))
|
||||||
|
|
||||||
#create a validation file for backup rotation
|
# create a validation file for backup rotation
|
||||||
writeCheck = self._createValidationFile(allFiles)
|
writeCheck = self._createValidationFile(allFiles)
|
||||||
|
|
||||||
if(not writeCheck):
|
if(not writeCheck):
|
||||||
#we may not be able to write to this destination for some reason
|
# we may not be able to write to this destination for some reason
|
||||||
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30089),utils.getString(30090), utils.getString(30044),autoclose=25000)
|
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30089), utils.getString(30090), utils.getString(30044), autoclose=25000)
|
||||||
|
|
||||||
if(not shouldContinue):
|
if(not shouldContinue):
|
||||||
return
|
return
|
||||||
|
|
||||||
orig_base_path = self.remote_vfs.root_path
|
orig_base_path = self.remote_vfs.root_path
|
||||||
|
|
||||||
#backup all the files
|
# backup all the files
|
||||||
self.filesLeft = self.filesTotal
|
self.filesLeft = self.filesTotal
|
||||||
for fileGroup in allFiles:
|
for fileGroup in allFiles:
|
||||||
self.xbmc_vfs.set_root(xbmc.translatePath(fileGroup['source']))
|
self.xbmc_vfs.set_root(xbmc.translatePath(fileGroup['source']))
|
||||||
self.remote_vfs.set_root(fileGroup['dest'] + fileGroup['name'])
|
self.remote_vfs.set_root(fileGroup['dest'] + fileGroup['name'])
|
||||||
filesCopied = self._copyFiles(fileGroup['files'],self.xbmc_vfs,self.remote_vfs)
|
filesCopied = self._copyFiles(fileGroup['files'], self.xbmc_vfs, self.remote_vfs)
|
||||||
|
|
||||||
if(not filesCopied):
|
if(not filesCopied):
|
||||||
utils.showNotification(utils.getString(30092))
|
utils.showNotification(utils.getString(30092))
|
||||||
utils.log(utils.getString(30092))
|
utils.log(utils.getString(30092))
|
||||||
|
|
||||||
#reset remote and xbmc vfs
|
# reset remote and xbmc vfs
|
||||||
self.xbmc_vfs.set_root("special://home/")
|
self.xbmc_vfs.set_root("special://home/")
|
||||||
self.remote_vfs.set_root(orig_base_path)
|
self.remote_vfs.set_root(orig_base_path)
|
||||||
|
|
||||||
if(utils.getSetting("compress_backups") == 'true'):
|
if(utils.getSetting("compress_backups") == 'true'):
|
||||||
fileManager = FileManager(self.xbmc_vfs)
|
fileManager = FileManager(self.xbmc_vfs)
|
||||||
|
|
||||||
#send the zip file to the real remote vfs
|
# send the zip file to the real remote vfs
|
||||||
zip_name = self.remote_vfs.root_path[:-1] + ".zip"
|
zip_name = self.remote_vfs.root_path[:-1] + ".zip"
|
||||||
self.remote_vfs.cleanup()
|
self.remote_vfs.cleanup()
|
||||||
self.xbmc_vfs.rename(xbmc.translatePath("special://temp/xbmc_backup_temp.zip"), xbmc.translatePath("special://temp/" + zip_name))
|
self.xbmc_vfs.rename(xbmc.translatePath("special://temp/xbmc_backup_temp.zip"), xbmc.translatePath("special://temp/" + zip_name))
|
||||||
fileManager.addFile(xbmc.translatePath("special://temp/" + zip_name))
|
fileManager.addFile(xbmc.translatePath("special://temp/" + zip_name))
|
||||||
|
|
||||||
#set root to data dir home
|
# set root to data dir home
|
||||||
self.xbmc_vfs.set_root(xbmc.translatePath("special://temp/"))
|
self.xbmc_vfs.set_root(xbmc.translatePath("special://temp/"))
|
||||||
|
|
||||||
self.remote_vfs = self.saved_remote_vfs
|
self.remote_vfs = self.saved_remote_vfs
|
||||||
self.progressBar.updateProgress(98, utils.getString(30088))
|
self.progressBar.updateProgress(98, utils.getString(30088))
|
||||||
fileCopied = self._copyFiles(fileManager.getFiles(),self.xbmc_vfs, self.remote_vfs)
|
fileCopied = self._copyFiles(fileManager.getFiles(), self.xbmc_vfs, self.remote_vfs)
|
||||||
|
|
||||||
if(not fileCopied):
|
if(not fileCopied):
|
||||||
#zip archive copy filed, inform the user
|
# zip archive copy filed, inform the user
|
||||||
shouldContinue = xbmcgui.Dialog().ok(utils.getString(30089),utils.getString(30090), utils.getString(30091))
|
shouldContinue = xbmcgui.Dialog().ok(utils.getString(30089), utils.getString(30090), utils.getString(30091))
|
||||||
|
|
||||||
#delete the temp zip file
|
# delete the temp zip file
|
||||||
self.xbmc_vfs.rmfile(xbmc.translatePath("special://temp/" + zip_name))
|
self.xbmc_vfs.rmfile(xbmc.translatePath("special://temp/" + zip_name))
|
||||||
|
|
||||||
#remove old backups
|
# remove old backups
|
||||||
self._rotateBackups()
|
self._rotateBackups()
|
||||||
|
|
||||||
#close any files
|
# close any files
|
||||||
self._closeVFS()
|
self._closeVFS()
|
||||||
|
|
||||||
def restore(self,progressOverride=False,selectedSets=None):
|
def restore(self, progressOverride=False, selectedSets=None):
|
||||||
shouldContinue = self._setupVFS(self.Restore, progressOverride)
|
shouldContinue = self._setupVFS(self.Restore, progressOverride)
|
||||||
|
|
||||||
if(shouldContinue):
|
if(shouldContinue):
|
||||||
utils.log(utils.getString(30023) + " - " + utils.getString(30017))
|
utils.log(utils.getString(30023) + " - " + utils.getString(30017))
|
||||||
|
|
||||||
#catch for if the restore point is actually a zip file
|
# catch for if the restore point is actually a zip file
|
||||||
if(self.restore_point.split('.')[-1] == 'zip'):
|
if(self.restore_point.split('.')[-1] == 'zip'):
|
||||||
self.progressBar.updateProgress(2, utils.getString(30088))
|
self.progressBar.updateProgress(2, utils.getString(30088))
|
||||||
utils.log("copying zip file: " + self.restore_point)
|
utils.log("copying zip file: " + self.restore_point)
|
||||||
|
|
||||||
#set root to data dir home
|
# set root to data dir home
|
||||||
self.xbmc_vfs.set_root(xbmc.translatePath("special://temp/"))
|
self.xbmc_vfs.set_root(xbmc.translatePath("special://temp/"))
|
||||||
|
|
||||||
if(not self.xbmc_vfs.exists(xbmc.translatePath("special://temp/" + self.restore_point))):
|
if(not self.xbmc_vfs.exists(xbmc.translatePath("special://temp/" + self.restore_point))):
|
||||||
#copy just this file from the remote vfs
|
# copy just this file from the remote vfs
|
||||||
zipFile = []
|
zipFile = []
|
||||||
zipFile.append(self.remote_base_path + self.restore_point)
|
zipFile.append(self.remote_base_path + self.restore_point)
|
||||||
|
|
||||||
self._copyFiles(zipFile,self.remote_vfs, self.xbmc_vfs)
|
self._copyFiles(zipFile, self.remote_vfs, self.xbmc_vfs)
|
||||||
else:
|
else:
|
||||||
utils.log("zip file exists already")
|
utils.log("zip file exists already")
|
||||||
|
|
||||||
#extract the zip file
|
# extract the zip file
|
||||||
zip_vfs = ZipFileSystem(xbmc.translatePath("special://temp/"+ self.restore_point),'r')
|
zip_vfs = ZipFileSystem(xbmc.translatePath("special://temp/" + self.restore_point), 'r')
|
||||||
extractor = ZipExtractor()
|
extractor = ZipExtractor()
|
||||||
|
|
||||||
if(not extractor.extract(zip_vfs, xbmc.translatePath("special://temp/"), self.progressBar)):
|
if(not extractor.extract(zip_vfs, xbmc.translatePath("special://temp/"), self.progressBar)):
|
||||||
#we had a problem extracting the archive, delete everything
|
# we had a problem extracting the archive, delete everything
|
||||||
zip_vfs.cleanup()
|
zip_vfs.cleanup()
|
||||||
self.xbmc_vfs.rmfile(xbmc.translatePath("special://temp/" + self.restore_point))
|
self.xbmc_vfs.rmfile(xbmc.translatePath("special://temp/" + self.restore_point))
|
||||||
|
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30101))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30101))
|
||||||
return
|
return
|
||||||
|
|
||||||
zip_vfs.cleanup()
|
zip_vfs.cleanup()
|
||||||
|
|
||||||
self.progressBar.updateProgress(0,utils.getString(30049) + "......")
|
self.progressBar.updateProgress(0, utils.getString(30049) + "......")
|
||||||
#set the new remote vfs and fix xbmc path
|
# set the new remote vfs and fix xbmc path
|
||||||
self.remote_vfs = XBMCFileSystem(xbmc.translatePath("special://temp/" + self.restore_point.split(".")[0] + "/"))
|
self.remote_vfs = XBMCFileSystem(xbmc.translatePath("special://temp/" + self.restore_point.split(".")[0] + "/"))
|
||||||
self.xbmc_vfs.set_root(xbmc.translatePath("special://home/"))
|
self.xbmc_vfs.set_root(xbmc.translatePath("special://home/"))
|
||||||
|
|
||||||
#for restores remote path must exist
|
# for restores remote path must exist
|
||||||
if(not self.remote_vfs.exists(self.remote_vfs.root_path)):
|
if(not self.remote_vfs.exists(self.remote_vfs.root_path)):
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30045),self.remote_vfs.root_path)
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30045), self.remote_vfs.root_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
valFile = self._checkValidationFile(self.remote_vfs.root_path)
|
valFile = self._checkValidationFile(self.remote_vfs.root_path)
|
||||||
if(valFile == None):
|
if(valFile is None):
|
||||||
#don't continue
|
# don't continue
|
||||||
return
|
return
|
||||||
|
|
||||||
utils.log(utils.getString(30051))
|
utils.log(utils.getString(30051))
|
||||||
allFiles = []
|
allFiles = []
|
||||||
fileManager = FileManager(self.remote_vfs)
|
fileManager = FileManager(self.remote_vfs)
|
||||||
|
|
||||||
#check for the existance of an advancedsettings file
|
# check for the existance of an advancedsettings file
|
||||||
if(self.remote_vfs.exists(self.remote_vfs.root_path + "config/advancedsettings.xml") and not self.skip_advanced):
|
if(self.remote_vfs.exists(self.remote_vfs.root_path + "config/advancedsettings.xml") and not self.skip_advanced):
|
||||||
#let the user know there is an advanced settings file present
|
# let the user know there is an advanced settings file present
|
||||||
restartXbmc = xbmcgui.Dialog().yesno(utils.getString(30038),utils.getString(30039),utils.getString(30040), utils.getString(30041))
|
restartXbmc = xbmcgui.Dialog().yesno(utils.getString(30038), utils.getString(30039), utils.getString(30040), utils.getString(30041))
|
||||||
|
|
||||||
if(restartXbmc):
|
if(restartXbmc):
|
||||||
#add only this file to the file list
|
# add only this file to the file list
|
||||||
fileManager.addFile(self.remote_vfs.root_path + "config/advancedsettings.xml")
|
fileManager.addFile(self.remote_vfs.root_path + "config/advancedsettings.xml")
|
||||||
self._copyFiles(fileManager.getFiles(),self.remote_vfs,self.xbmc_vfs)
|
self._copyFiles(fileManager.getFiles(), self.remote_vfs, self.xbmc_vfs)
|
||||||
|
|
||||||
#let the service know to resume this backup on startup
|
# let the service know to resume this backup on startup
|
||||||
self._createResumeBackupFile()
|
self._createResumeBackupFile()
|
||||||
|
|
||||||
#do not continue running
|
# do not continue running
|
||||||
xbmcgui.Dialog().ok(utils.getString(30077),utils.getString(30078))
|
xbmcgui.Dialog().ok(utils.getString(30077), utils.getString(30078))
|
||||||
return
|
return
|
||||||
|
|
||||||
#use a multiselect dialog to select sets to restore
|
# use a multiselect dialog to select sets to restore
|
||||||
restoreSets = [n['name'] for n in valFile['directories']]
|
restoreSets = [n['name'] for n in valFile['directories']]
|
||||||
|
|
||||||
#if passed in list, skip selection
|
# if passed in list, skip selection
|
||||||
if(selectedSets == None):
|
if(selectedSets is None):
|
||||||
selectedSets = xbmcgui.Dialog().multiselect(utils.getString(30131),restoreSets)
|
selectedSets = xbmcgui.Dialog().multiselect(utils.getString(30131), restoreSets)
|
||||||
else:
|
else:
|
||||||
selectedSets = [restoreSets.index(n) for n in selectedSets if n in restoreSets] #if set name not found just skip it
|
selectedSets = [restoreSets.index(n) for n in selectedSets if n in restoreSets] # if set name not found just skip it
|
||||||
|
|
||||||
if(selectedSets != None):
|
if(selectedSets is not None):
|
||||||
#go through each of the directories in the backup and write them to the correct location
|
# go through each of the directories in the backup and write them to the correct location
|
||||||
for index in selectedSets:
|
for index in selectedSets:
|
||||||
|
|
||||||
#add this directory
|
# add this directory
|
||||||
aDir = valFile['directories'][index]
|
aDir = valFile['directories'][index]
|
||||||
|
|
||||||
self.xbmc_vfs.set_root(xbmc.translatePath(aDir['path']))
|
self.xbmc_vfs.set_root(xbmc.translatePath(aDir['path']))
|
||||||
if(self.remote_vfs.exists(self.remote_vfs.root_path + aDir['name'] + '/')):
|
if(self.remote_vfs.exists(self.remote_vfs.root_path + aDir['name'] + '/')):
|
||||||
#walk the directory
|
# walk the directory
|
||||||
fileManager.walkTree(self.remote_vfs.root_path + aDir['name'] + '/')
|
fileManager.walkTree(self.remote_vfs.root_path + aDir['name'] + '/')
|
||||||
self.filesTotal = self.filesTotal + fileManager.size()
|
self.filesTotal = self.filesTotal + fileManager.size()
|
||||||
allFiles.append({"source":self.remote_vfs.root_path + aDir['name'],"dest":self.xbmc_vfs.root_path,"files":fileManager.getFiles()})
|
allFiles.append({"source": self.remote_vfs.root_path + aDir['name'], "dest": self.xbmc_vfs.root_path, "files": fileManager.getFiles()})
|
||||||
else:
|
else:
|
||||||
utils.log("error path not found: " + self.remote_vfs.root_path + aDir['name'])
|
utils.log("error path not found: " + self.remote_vfs.root_path + aDir['name'])
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30045),self.remote_vfs.root_path + aDir['name'])
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30045), self.remote_vfs.root_path + aDir['name'])
|
||||||
|
|
||||||
#restore all the files
|
# restore all the files
|
||||||
self.filesLeft = self.filesTotal
|
self.filesLeft = self.filesTotal
|
||||||
for fileGroup in allFiles:
|
for fileGroup in allFiles:
|
||||||
self.remote_vfs.set_root(fileGroup['source'])
|
self.remote_vfs.set_root(fileGroup['source'])
|
||||||
self.xbmc_vfs.set_root(fileGroup['dest'])
|
self.xbmc_vfs.set_root(fileGroup['dest'])
|
||||||
self._copyFiles(fileGroup['files'],self.remote_vfs,self.xbmc_vfs)
|
self._copyFiles(fileGroup['files'], self.remote_vfs, self.xbmc_vfs)
|
||||||
|
|
||||||
self.progressBar.updateProgress(99,"Clean up operations .....")
|
self.progressBar.updateProgress(99, "Clean up operations .....")
|
||||||
|
|
||||||
if(self.restore_point.split('.')[-1] == 'zip'):
|
if(self.restore_point.split('.')[-1] == 'zip'):
|
||||||
#delete the zip file and the extracted directory
|
# delete the zip file and the extracted directory
|
||||||
self.xbmc_vfs.rmfile(xbmc.translatePath("special://temp/" + self.restore_point))
|
self.xbmc_vfs.rmfile(xbmc.translatePath("special://temp/" + self.restore_point))
|
||||||
self.xbmc_vfs.rmdir(self.remote_vfs.root_path)
|
self.xbmc_vfs.rmdir(self.remote_vfs.root_path)
|
||||||
|
|
||||||
|
# update the guisettings information (or what we can from it)
|
||||||
#update the guisettings information (or what we can from it)
|
|
||||||
gui_settings = GuiSettingsManager()
|
gui_settings = GuiSettingsManager()
|
||||||
gui_settings.run()
|
gui_settings.run()
|
||||||
|
|
||||||
#call update addons to refresh everything
|
# call update addons to refresh everything
|
||||||
xbmc.executebuiltin('UpdateLocalAddons')
|
xbmc.executebuiltin('UpdateLocalAddons')
|
||||||
|
|
||||||
def _setupVFS(self,mode=-1,progressOverride=False):
|
def _setupVFS(self, mode=-1, progressOverride=False):
|
||||||
#set windows setting to true
|
# set windows setting to true
|
||||||
window = xbmcgui.Window(10000)
|
window = xbmcgui.Window(10000)
|
||||||
window.setProperty(utils.__addon_id__ + ".running","true")
|
window.setProperty(utils.__addon_id__ + ".running", "true")
|
||||||
|
|
||||||
#append backup folder name
|
# append backup folder name
|
||||||
progressBarTitle = utils.getString(30010) + " - "
|
progressBarTitle = utils.getString(30010) + " - "
|
||||||
if(mode == self.Backup and self.remote_vfs.root_path != ''):
|
if(mode == self.Backup and self.remote_vfs.root_path != ''):
|
||||||
if(utils.getSetting("compress_backups") == 'true'):
|
if(utils.getSetting("compress_backups") == 'true'):
|
||||||
#delete old temp file
|
# delete old temp file
|
||||||
if(self.xbmc_vfs.exists(xbmc.translatePath('special://temp/xbmc_backup_temp.zip'))):
|
if(self.xbmc_vfs.exists(xbmc.translatePath('special://temp/xbmc_backup_temp.zip'))):
|
||||||
if(not self.xbmc_vfs.rmfile(xbmc.translatePath('special://temp/xbmc_backup_temp.zip'))):
|
if(not self.xbmc_vfs.rmfile(xbmc.translatePath('special://temp/xbmc_backup_temp.zip'))):
|
||||||
#we had some kind of error deleting the old file
|
# we had some kind of error deleting the old file
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30096),utils.getString(30097))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30096), utils.getString(30097))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#save the remote file system and use the zip vfs
|
# save the remote file system and use the zip vfs
|
||||||
self.saved_remote_vfs = self.remote_vfs
|
self.saved_remote_vfs = self.remote_vfs
|
||||||
self.remote_vfs = ZipFileSystem(xbmc.translatePath("special://temp/xbmc_backup_temp.zip"),"w")
|
self.remote_vfs = ZipFileSystem(xbmc.translatePath("special://temp/xbmc_backup_temp.zip"), "w")
|
||||||
|
|
||||||
self.remote_vfs.set_root(self.remote_vfs.root_path + time.strftime("%Y%m%d%H%M") + "/")
|
self.remote_vfs.set_root(self.remote_vfs.root_path + time.strftime("%Y%m%d%H%M") + "/")
|
||||||
progressBarTitle = progressBarTitle + utils.getString(30023) + ": " + utils.getString(30016)
|
progressBarTitle = progressBarTitle + utils.getString(30023) + ": " + utils.getString(30016)
|
||||||
elif(mode == self.Restore and self.restore_point != None and self.remote_vfs.root_path != ''):
|
elif(mode == self.Restore and self.restore_point is not None and self.remote_vfs.root_path != ''):
|
||||||
if(self.restore_point.split('.')[-1] != 'zip'):
|
if(self.restore_point.split('.')[-1] != 'zip'):
|
||||||
self.remote_vfs.set_root(self.remote_vfs.root_path + self.restore_point + "/")
|
self.remote_vfs.set_root(self.remote_vfs.root_path + self.restore_point + "/")
|
||||||
progressBarTitle = progressBarTitle + utils.getString(30023) + ": " + utils.getString(30017)
|
progressBarTitle = progressBarTitle + utils.getString(30023) + ": " + utils.getString(30017)
|
||||||
else:
|
else:
|
||||||
#kill the program here
|
# kill the program here
|
||||||
self.remote_vfs = None
|
self.remote_vfs = None
|
||||||
return False
|
return False
|
||||||
|
|
||||||
utils.log(utils.getString(30047) + ": " + self.xbmc_vfs.root_path)
|
utils.log(utils.getString(30047) + ": " + self.xbmc_vfs.root_path)
|
||||||
utils.log(utils.getString(30048) + ": " + self.remote_vfs.root_path)
|
utils.log(utils.getString(30048) + ": " + self.remote_vfs.root_path)
|
||||||
|
|
||||||
|
# setup the progress bar
|
||||||
#setup the progress bar
|
|
||||||
self.progressBar = BackupProgressBar(progressOverride)
|
self.progressBar = BackupProgressBar(progressOverride)
|
||||||
self.progressBar.create(progressBarTitle,utils.getString(30049) + "......")
|
self.progressBar.create(progressBarTitle, utils.getString(30049) + "......")
|
||||||
|
|
||||||
#if we made it this far we're good
|
# if we made it this far we're good
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _closeVFS(self):
|
def _closeVFS(self):
|
||||||
@@ -375,247 +371,238 @@ class XbmcBackup:
|
|||||||
self.remote_vfs.cleanup()
|
self.remote_vfs.cleanup()
|
||||||
self.progressBar.close()
|
self.progressBar.close()
|
||||||
|
|
||||||
#reset the window setting
|
# reset the window setting
|
||||||
window = xbmcgui.Window(10000)
|
window = xbmcgui.Window(10000)
|
||||||
window.setProperty(utils.__addon_id__ + ".running","")
|
window.setProperty(utils.__addon_id__ + ".running", "")
|
||||||
|
|
||||||
def _copyFiles(self,fileList,source,dest):
|
def _copyFiles(self, fileList, source, dest):
|
||||||
result = True
|
result = True
|
||||||
|
|
||||||
utils.log("Source: " + source.root_path)
|
utils.log("Source: " + source.root_path)
|
||||||
utils.log("Desintation: " + dest.root_path)
|
utils.log("Desintation: " + dest.root_path)
|
||||||
|
|
||||||
#make sure the dest folder exists - can cause write errors if the full path doesn't exist
|
# make sure the dest folder exists - can cause write errors if the full path doesn't exist
|
||||||
if(not dest.exists(dest.root_path)):
|
if(not dest.exists(dest.root_path)):
|
||||||
dest.mkdir(dest.root_path)
|
dest.mkdir(dest.root_path)
|
||||||
|
|
||||||
for aFile in fileList:
|
for aFile in fileList:
|
||||||
if(not self.progressBar.checkCancel()):
|
if(not self.progressBar.checkCancel()):
|
||||||
utils.log('Writing file: ' + aFile,xbmc.LOGDEBUG)
|
utils.log('Writing file: ' + aFile, xbmc.LOGDEBUG)
|
||||||
if(aFile.startswith("-")):
|
if(aFile.startswith("-")):
|
||||||
self._updateProgress(aFile[len(source.root_path) + 1:])
|
self._updateProgress(aFile[len(source.root_path) + 1:])
|
||||||
dest.mkdir(dest.root_path + aFile[len(source.root_path) + 1:])
|
dest.mkdir(dest.root_path + aFile[len(source.root_path) + 1:])
|
||||||
else:
|
else:
|
||||||
self._updateProgress()
|
self._updateProgress()
|
||||||
|
|
||||||
wroteFile = True
|
wroteFile = True
|
||||||
destFile = dest.root_path + aFile[len(source.root_path):]
|
destFile = dest.root_path + aFile[len(source.root_path):]
|
||||||
if(isinstance(source,DropboxFileSystem) or isinstance(source,GoogleDriveFilesystem)):
|
if(isinstance(source, DropboxFileSystem)):
|
||||||
#if copying from cloud storage we need the file handle, use get_file
|
# if copying from cloud storage we need the file handle, use get_file
|
||||||
wroteFile = source.get_file(aFile,destFile)
|
wroteFile = source.get_file(aFile, destFile)
|
||||||
else:
|
else:
|
||||||
#copy using normal method
|
# copy using normal method
|
||||||
wroteFile = dest.put(aFile,destFile)
|
wroteFile = dest.put(aFile, destFile)
|
||||||
|
|
||||||
#if result is still true but this file failed
|
# if result is still true but this file failed
|
||||||
if(not wroteFile and result):
|
if(not wroteFile and result):
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _addBackupDir(self,folder_name,root_path,dirList):
|
def _addBackupDir(self, folder_name, root_path, dirList):
|
||||||
utils.log('Backup set: ' + folder_name)
|
utils.log('Backup set: ' + folder_name)
|
||||||
fileManager = FileManager(self.xbmc_vfs)
|
fileManager = FileManager(self.xbmc_vfs)
|
||||||
|
|
||||||
self.xbmc_vfs.set_root(xbmc.translatePath(root_path))
|
self.xbmc_vfs.set_root(xbmc.translatePath(root_path))
|
||||||
for aDir in dirList:
|
for aDir in dirList:
|
||||||
fileManager.addDir(aDir)
|
fileManager.addDir(aDir)
|
||||||
|
|
||||||
#walk all the root trees
|
# walk all the root trees
|
||||||
fileManager.walk()
|
fileManager.walk()
|
||||||
#update total files
|
# update total files
|
||||||
self.filesTotal = self.filesTotal + fileManager.size()
|
self.filesTotal = self.filesTotal + fileManager.size()
|
||||||
|
|
||||||
return {"name":folder_name,"source":root_path,"dest":self.remote_vfs.root_path,"files":fileManager.getFiles()}
|
|
||||||
|
|
||||||
def _dateFormat(self,dirName):
|
return {"name": folder_name, "source": root_path, "dest": self.remote_vfs.root_path, "files": fileManager.getFiles()}
|
||||||
#create date_time object from foldername YYYYMMDDHHmm
|
|
||||||
date_time = datetime(int(dirName[0:4]),int(dirName[4:6]),int(dirName[6:8]),int(dirName[8:10]),int(dirName[10:12]))
|
def _dateFormat(self, dirName):
|
||||||
|
# create date_time object from foldername YYYYMMDDHHmm
|
||||||
#format the string based on region settings
|
date_time = datetime(int(dirName[0:4]), int(dirName[4:6]), int(dirName[6:8]), int(dirName[8:10]), int(dirName[10:12]))
|
||||||
result = utils.getRegionalTimestamp(date_time, ['dateshort','time'])
|
|
||||||
|
# format the string based on region settings
|
||||||
|
result = utils.getRegionalTimestamp(date_time, ['dateshort', 'time'])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _updateProgress(self,message=None):
|
def _updateProgress(self, message=None):
|
||||||
self.filesLeft = self.filesLeft - 1
|
self.filesLeft = self.filesLeft - 1
|
||||||
self.progressBar.updateProgress(int((float(self.filesTotal - self.filesLeft)/float(self.filesTotal)) * 100),message)
|
self.progressBar.updateProgress(int((float(self.filesTotal - self.filesLeft) / float(self.filesTotal)) * 100), message)
|
||||||
|
|
||||||
def _rotateBackups(self):
|
def _rotateBackups(self):
|
||||||
total_backups = int(utils.getSetting('backup_rotation'))
|
total_backups = int(utils.getSetting('backup_rotation'))
|
||||||
|
|
||||||
if(total_backups > 0):
|
if(total_backups > 0):
|
||||||
#get a list of valid backup folders
|
# get a list of valid backup folders
|
||||||
dirs = self.listBackups(reverse=False)
|
dirs = self.listBackups(reverse=False)
|
||||||
|
|
||||||
if(len(dirs) > total_backups):
|
if(len(dirs) > total_backups):
|
||||||
#remove backups to equal total wanted
|
# remove backups to equal total wanted
|
||||||
remove_num = 0
|
remove_num = 0
|
||||||
self.filesTotal = self.filesTotal + remove_num + 1
|
self.filesTotal = self.filesTotal + remove_num + 1
|
||||||
|
|
||||||
#update the progress bar if it is available
|
# update the progress bar if it is available
|
||||||
while(remove_num < (len(dirs) - total_backups) and not self.progressBar.checkCancel()):
|
while(remove_num < (len(dirs) - total_backups) and not self.progressBar.checkCancel()):
|
||||||
self._updateProgress(utils.getString(30054) + " " + dirs[remove_num][1])
|
self._updateProgress(utils.getString(30054) + " " + dirs[remove_num][1])
|
||||||
utils.log("Removing backup " + dirs[remove_num][0])
|
utils.log("Removing backup " + dirs[remove_num][0])
|
||||||
|
|
||||||
if(dirs[remove_num][0].split('.')[-1] == 'zip'):
|
if(dirs[remove_num][0].split('.')[-1] == 'zip'):
|
||||||
#this is a file, remove it that way
|
# this is a file, remove it that way
|
||||||
self.remote_vfs.rmfile(self.remote_base_path + dirs[remove_num][0])
|
self.remote_vfs.rmfile(self.remote_base_path + dirs[remove_num][0])
|
||||||
else:
|
else:
|
||||||
self.remote_vfs.rmdir(self.remote_base_path + dirs[remove_num][0] + "/")
|
self.remote_vfs.rmdir(self.remote_base_path + dirs[remove_num][0] + "/")
|
||||||
|
|
||||||
remove_num = remove_num + 1
|
remove_num = remove_num + 1
|
||||||
|
|
||||||
def _createValidationFile(self,dirList):
|
def _createValidationFile(self, dirList):
|
||||||
valInfo = {"name":"XBMC Backup Validation File","xbmc_version":xbmc.getInfoLabel('System.BuildVersion'),"type":0}
|
valInfo = {"name": "XBMC Backup Validation File", "xbmc_version": xbmc.getInfoLabel('System.BuildVersion'), "type": 0}
|
||||||
valDirs = []
|
valDirs = []
|
||||||
|
|
||||||
for aDir in dirList:
|
for aDir in dirList:
|
||||||
valDirs.append({"name":aDir['name'],"path":aDir['source']})
|
valDirs.append({"name": aDir['name'], "path": aDir['source']})
|
||||||
valInfo['directories'] = valDirs
|
valInfo['directories'] = valDirs
|
||||||
|
|
||||||
vFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "xbmcbackup.val"),'w')
|
vFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "xbmcbackup.val"), 'w')
|
||||||
vFile.write(json.dumps(valInfo))
|
vFile.write(json.dumps(valInfo))
|
||||||
vFile.write("")
|
vFile.write("")
|
||||||
vFile.close()
|
vFile.close()
|
||||||
|
|
||||||
success = self.remote_vfs.put(xbmc.translatePath(utils.data_dir() + "xbmcbackup.val"),self.remote_vfs.root_path + "xbmcbackup.val")
|
success = self.remote_vfs.put(xbmc.translatePath(utils.data_dir() + "xbmcbackup.val"), self.remote_vfs.root_path + "xbmcbackup.val")
|
||||||
|
|
||||||
#remove the validation file
|
# remove the validation file
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "xbmcbackup.val"))
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "xbmcbackup.val"))
|
||||||
|
|
||||||
if(success):
|
if(success):
|
||||||
#android requires a .nomedia file to not index the directory as media
|
# android requires a .nomedia file to not index the directory as media
|
||||||
if(not xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + ".nomedia"))):
|
if(not xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + ".nomedia"))):
|
||||||
nmFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + ".nomedia"),'w')
|
nmFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + ".nomedia"), 'w')
|
||||||
nmFile.close()
|
nmFile.close()
|
||||||
|
|
||||||
success = self.remote_vfs.put(xbmc.translatePath(utils.data_dir() + ".nomedia"),self.remote_vfs.root_path + ".nomedia")
|
success = self.remote_vfs.put(xbmc.translatePath(utils.data_dir() + ".nomedia"), self.remote_vfs.root_path + ".nomedia")
|
||||||
|
|
||||||
return success
|
return success
|
||||||
|
|
||||||
def _checkValidationFile(self,path):
|
def _checkValidationFile(self, path):
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
#copy the file and open it
|
# copy the file and open it
|
||||||
if(isinstance(self.remote_vfs,DropboxFileSystem) or isinstance(self.remote_vfs,GoogleDriveFilesystem)):
|
if(isinstance(self.remote_vfs, DropboxFileSystem)):
|
||||||
self.remote_vfs.get_file(path + "xbmcbackup.val", xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"))
|
self.remote_vfs.get_file(path + "xbmcbackup.val", xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"))
|
||||||
else:
|
else:
|
||||||
self.xbmc_vfs.put(path + "xbmcbackup.val",xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"))
|
self.xbmc_vfs.put(path + "xbmcbackup.val", xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"))
|
||||||
|
|
||||||
vFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"),'r')
|
vFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"), 'r')
|
||||||
jsonString = vFile.read()
|
jsonString = vFile.read()
|
||||||
vFile.close()
|
vFile.close()
|
||||||
|
|
||||||
#delete after checking
|
# delete after checking
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"))
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "xbmcbackup_restore.val"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = json.loads(jsonString)
|
result = json.loads(jsonString)
|
||||||
|
|
||||||
if(xbmc.getInfoLabel('System.BuildVersion') != result['xbmc_version']):
|
if(xbmc.getInfoLabel('System.BuildVersion') != result['xbmc_version']):
|
||||||
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30085),utils.getString(30086),utils.getString(30044))
|
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30085), utils.getString(30086), utils.getString(30044))
|
||||||
|
|
||||||
if(not shouldContinue):
|
if(not shouldContinue):
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
#may fail on older archives
|
# may fail on older archives
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _createResumeBackupFile(self):
|
def _createResumeBackupFile(self):
|
||||||
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"),'w')
|
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"), 'w')
|
||||||
rFile.write(self.restore_point)
|
rFile.write(self.restore_point)
|
||||||
rFile.close()
|
rFile.close()
|
||||||
|
|
||||||
def _readBackupConfig(self,aFile):
|
def _readBackupConfig(self, aFile):
|
||||||
jFile = xbmcvfs.File(xbmc.translatePath(aFile),'r')
|
jFile = xbmcvfs.File(xbmc.translatePath(aFile), 'r')
|
||||||
jsonString = jFile.read()
|
jsonString = jFile.read()
|
||||||
jFile.close()
|
jFile.close()
|
||||||
return json.loads(jsonString)
|
return json.loads(jsonString)
|
||||||
|
|
||||||
|
|
||||||
class FileManager:
|
class FileManager:
|
||||||
not_dir = ['.zip','.xsp','.rar']
|
not_dir = ['.zip', '.xsp', '.rar']
|
||||||
exclude_dir = []
|
exclude_dir = []
|
||||||
root_dirs = []
|
root_dirs = []
|
||||||
pathSep = '/'
|
pathSep = '/'
|
||||||
|
|
||||||
def __init__(self,vfs):
|
def __init__(self, vfs):
|
||||||
self.vfs = vfs
|
self.vfs = vfs
|
||||||
self.fileArray = []
|
self.fileArray = []
|
||||||
self.exclude_dir = []
|
self.exclude_dir = []
|
||||||
self.root_dirs = []
|
self.root_dirs = []
|
||||||
|
|
||||||
def walk(self):
|
def walk(self):
|
||||||
|
|
||||||
for aDir in self.root_dirs:
|
for aDir in self.root_dirs:
|
||||||
self.addFile('-' + xbmc.translatePath(aDir['path']))
|
self.addFile('-' + xbmc.translatePath(aDir['path']))
|
||||||
self.walkTree(xbmc.translatePath(aDir['path']),aDir['recurse'])
|
self.walkTree(xbmc.translatePath(aDir['path']), aDir['recurse'])
|
||||||
|
|
||||||
def walkTree(self,directory,recurse=True):
|
def walkTree(self, directory, recurse=True):
|
||||||
utils.log('walking ' + directory + ', recurse: ' + str(recurse))
|
utils.log('walking ' + directory + ', recurse: ' + str(recurse))
|
||||||
if(directory[-1:] == '/' or directory[-1:] == '\\'):
|
if(directory[-1:] == '/' or directory[-1:] == '\\'):
|
||||||
directory = directory[:-1]
|
directory = directory[:-1]
|
||||||
|
|
||||||
if(self.vfs.exists(directory + self.pathSep)):
|
if(self.vfs.exists(directory + self.pathSep)):
|
||||||
dirs,files = self.vfs.listdir(directory)
|
dirs, files = self.vfs.listdir(directory)
|
||||||
|
|
||||||
if(recurse):
|
if(recurse):
|
||||||
#create all the subdirs first
|
# create all the subdirs first
|
||||||
for aDir in dirs:
|
for aDir in dirs:
|
||||||
dirPath = xbmc.validatePath(xbmc.translatePath(directory + self.pathSep + aDir))
|
dirPath = xbmc.validatePath(xbmc.translatePath(directory + self.pathSep + aDir))
|
||||||
file_ext = aDir.split('.')[-1]
|
file_ext = aDir.split('.')[-1]
|
||||||
|
|
||||||
#check if directory is excluded
|
# check if directory is excluded
|
||||||
if(not any(dirPath.startswith(exDir) for exDir in self.exclude_dir)):
|
if(not any(dirPath.startswith(exDir) for exDir in self.exclude_dir)):
|
||||||
|
|
||||||
self.addFile("-" + dirPath)
|
self.addFile("-" + dirPath)
|
||||||
|
|
||||||
#catch for "non directory" type files
|
# catch for "non directory" type files
|
||||||
shouldWalk = True
|
shouldWalk = True
|
||||||
|
|
||||||
for s in file_ext:
|
for s in file_ext:
|
||||||
if(s in self.not_dir):
|
if(s in self.not_dir):
|
||||||
shouldWalk = False
|
shouldWalk = False
|
||||||
|
|
||||||
if(shouldWalk):
|
if(shouldWalk):
|
||||||
self.walkTree(dirPath)
|
self.walkTree(dirPath)
|
||||||
|
|
||||||
#copy all the files
|
# copy all the files
|
||||||
for aFile in files:
|
for aFile in files:
|
||||||
filePath = xbmc.translatePath(directory + self.pathSep + aFile)
|
filePath = xbmc.translatePath(directory + self.pathSep + aFile)
|
||||||
self.addFile(filePath)
|
self.addFile(filePath)
|
||||||
|
|
||||||
def addDir(self,dirMeta):
|
def addDir(self, dirMeta):
|
||||||
if(dirMeta['type'] == 'include'):
|
if(dirMeta['type'] == 'include'):
|
||||||
self.root_dirs.append({'path':dirMeta['path'],'recurse':dirMeta['recurse']})
|
self.root_dirs.append({'path': dirMeta['path'], 'recurse': dirMeta['recurse']})
|
||||||
else:
|
else:
|
||||||
self.excludeFile(xbmc.translatePath(dirMeta['path']))
|
self.excludeFile(xbmc.translatePath(dirMeta['path']))
|
||||||
|
|
||||||
def addFile(self,filename):
|
def addFile(self, filename):
|
||||||
try:
|
# write the full remote path name of this file
|
||||||
filename = filename.decode('UTF-8')
|
utils.log("Add File: " + filename)
|
||||||
except UnicodeDecodeError:
|
|
||||||
filename = filename.decode('ISO-8859-2')
|
|
||||||
|
|
||||||
#write the full remote path name of this file
|
|
||||||
utils.log("Add File: " + filename,xbmc.LOGDEBUG)
|
|
||||||
self.fileArray.append(filename)
|
self.fileArray.append(filename)
|
||||||
|
|
||||||
def excludeFile(self,filename):
|
def excludeFile(self, filename):
|
||||||
try:
|
|
||||||
filename = filename.decode('UTF-8')
|
# remove trailing slash
|
||||||
except UnicodeDecodeError:
|
|
||||||
filename = filename.decode('ISO-8859-2')
|
|
||||||
|
|
||||||
#remove trailing slash
|
|
||||||
if(filename[-1] == '/' or filename[-1] == '\\'):
|
if(filename[-1] == '/' or filename[-1] == '\\'):
|
||||||
filename = filename[:-1]
|
filename = filename[:-1]
|
||||||
|
|
||||||
#write the full remote path name of this file
|
# write the full remote path name of this file
|
||||||
utils.log("Exclude File: " + filename)
|
utils.log("Exclude File: " + filename)
|
||||||
self.exclude_dir.append(filename)
|
self.exclude_dir.append(filename)
|
||||||
|
|
||||||
@@ -624,7 +611,7 @@ class FileManager:
|
|||||||
self.fileArray = []
|
self.fileArray = []
|
||||||
self.root_dirs = []
|
self.root_dirs = []
|
||||||
self.exclude_dir = []
|
self.exclude_dir = []
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def size(self):
|
def size(self):
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
from time import time, mktime
|
from time import time, mktime
|
||||||
from datetime import datetime, date
|
from datetime import datetime
|
||||||
from .relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
|
|
||||||
search_re = re.compile(r'^([^-]+)-([^-/]+)(/(.*))?$')
|
search_re = re.compile(r'^([^-]+)-([^-/]+)(/(.*))?$')
|
||||||
only_int_re = re.compile(r'^\d+$')
|
only_int_re = re.compile(r'^\d+$')
|
||||||
|
|||||||
4
resources/lib/dropbox/__init__.py
Normal file
4
resources/lib/dropbox/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from .dropbox import __version__, Dropbox, DropboxTeam, create_session # noqa: F401
|
||||||
|
from .oauth import DropboxOAuth2Flow, DropboxOAuth2FlowNoRedirect # noqa: F401
|
||||||
7
resources/lib/dropbox/async.py
Normal file
7
resources/lib/dropbox/async.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
# If you have issues importing this module because Python recognizes it as a keyword, use async_ instead.
|
||||||
|
from .async_ import *
|
||||||
332
resources/lib/dropbox/async_.py
Normal file
332
resources/lib/dropbox/async_.py
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
class LaunchResultBase(bb.Union):
|
||||||
|
"""
|
||||||
|
Result returned by methods that launch an asynchronous job. A method who may
|
||||||
|
either launch an asynchronous job, or complete the request synchronously,
|
||||||
|
can use this union by extending it, and adding a 'complete' field with the
|
||||||
|
type of the synchronous response. See :class:`LaunchEmptyResult` for an
|
||||||
|
example.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar str async.LaunchResultBase.async_job_id: This response indicates that
|
||||||
|
the processing is asynchronous. The string is an id that can be used to
|
||||||
|
obtain the status of the asynchronous job.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def async_job_id(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``async_job_id`` tag with
|
||||||
|
value ``val``.
|
||||||
|
|
||||||
|
:param str val:
|
||||||
|
:rtype: LaunchResultBase
|
||||||
|
"""
|
||||||
|
return cls('async_job_id', val)
|
||||||
|
|
||||||
|
def is_async_job_id(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``async_job_id``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'async_job_id'
|
||||||
|
|
||||||
|
def get_async_job_id(self):
|
||||||
|
"""
|
||||||
|
This response indicates that the processing is asynchronous. The string
|
||||||
|
is an id that can be used to obtain the status of the asynchronous job.
|
||||||
|
|
||||||
|
Only call this if :meth:`is_async_job_id` is true.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if not self.is_async_job_id():
|
||||||
|
raise AttributeError("tag 'async_job_id' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(LaunchResultBase, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'LaunchResultBase(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
LaunchResultBase_validator = bv.Union(LaunchResultBase)
|
||||||
|
|
||||||
|
class LaunchEmptyResult(LaunchResultBase):
|
||||||
|
"""
|
||||||
|
Result returned by methods that may either launch an asynchronous job or
|
||||||
|
complete synchronously. Upon synchronous completion of the job, no
|
||||||
|
additional information is returned.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar async.LaunchEmptyResult.complete: The job finished synchronously and
|
||||||
|
successfully.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
complete = None
|
||||||
|
|
||||||
|
def is_complete(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``complete``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'complete'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(LaunchEmptyResult, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'LaunchEmptyResult(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
LaunchEmptyResult_validator = bv.Union(LaunchEmptyResult)
|
||||||
|
|
||||||
|
class PollArg(bb.Struct):
|
||||||
|
"""
|
||||||
|
Arguments for methods that poll the status of an asynchronous job.
|
||||||
|
|
||||||
|
:ivar async.PollArg.async_job_id: Id of the asynchronous job. This is the
|
||||||
|
value of a response returned from the method that launched the job.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_async_job_id_value',
|
||||||
|
'_async_job_id_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
async_job_id=None):
|
||||||
|
self._async_job_id_value = None
|
||||||
|
self._async_job_id_present = False
|
||||||
|
if async_job_id is not None:
|
||||||
|
self.async_job_id = async_job_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def async_job_id(self):
|
||||||
|
"""
|
||||||
|
Id of the asynchronous job. This is the value of a response returned
|
||||||
|
from the method that launched the job.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._async_job_id_present:
|
||||||
|
return self._async_job_id_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'async_job_id'")
|
||||||
|
|
||||||
|
@async_job_id.setter
|
||||||
|
def async_job_id(self, val):
|
||||||
|
val = self._async_job_id_validator.validate(val)
|
||||||
|
self._async_job_id_value = val
|
||||||
|
self._async_job_id_present = True
|
||||||
|
|
||||||
|
@async_job_id.deleter
|
||||||
|
def async_job_id(self):
|
||||||
|
self._async_job_id_value = None
|
||||||
|
self._async_job_id_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PollArg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PollArg(async_job_id={!r})'.format(
|
||||||
|
self._async_job_id_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
PollArg_validator = bv.Struct(PollArg)
|
||||||
|
|
||||||
|
class PollResultBase(bb.Union):
|
||||||
|
"""
|
||||||
|
Result returned by methods that poll for the status of an asynchronous job.
|
||||||
|
Unions that extend this union should add a 'complete' field with a type of
|
||||||
|
the information returned upon job completion. See :class:`PollEmptyResult`
|
||||||
|
for an example.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar async.PollResultBase.in_progress: The asynchronous job is still in
|
||||||
|
progress.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
in_progress = None
|
||||||
|
|
||||||
|
def is_in_progress(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``in_progress``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'in_progress'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PollResultBase, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PollResultBase(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PollResultBase_validator = bv.Union(PollResultBase)
|
||||||
|
|
||||||
|
class PollEmptyResult(PollResultBase):
|
||||||
|
"""
|
||||||
|
Result returned by methods that poll for the status of an asynchronous job.
|
||||||
|
Upon completion of the job, no additional information is returned.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar async.PollEmptyResult.complete: The asynchronous job has completed
|
||||||
|
successfully.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
complete = None
|
||||||
|
|
||||||
|
def is_complete(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``complete``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'complete'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PollEmptyResult, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PollEmptyResult(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PollEmptyResult_validator = bv.Union(PollEmptyResult)
|
||||||
|
|
||||||
|
class PollError(bb.Union):
|
||||||
|
"""
|
||||||
|
Error returned by methods for polling the status of asynchronous job.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar async.PollError.invalid_async_job_id: The job ID is invalid.
|
||||||
|
:ivar async.PollError.internal_error: Something went wrong with the job on
|
||||||
|
Dropbox's end. You'll need to verify that the action you were taking
|
||||||
|
succeeded, and if not, try again. This should happen very rarely.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
invalid_async_job_id = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
internal_error = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_invalid_async_job_id(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_async_job_id``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_async_job_id'
|
||||||
|
|
||||||
|
def is_internal_error(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``internal_error``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'internal_error'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PollError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PollError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PollError_validator = bv.Union(PollError)
|
||||||
|
|
||||||
|
AsyncJobId_validator = bv.String(min_length=1)
|
||||||
|
LaunchResultBase._async_job_id_validator = AsyncJobId_validator
|
||||||
|
LaunchResultBase._tagmap = {
|
||||||
|
'async_job_id': LaunchResultBase._async_job_id_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
LaunchEmptyResult._complete_validator = bv.Void()
|
||||||
|
LaunchEmptyResult._tagmap = {
|
||||||
|
'complete': LaunchEmptyResult._complete_validator,
|
||||||
|
}
|
||||||
|
LaunchEmptyResult._tagmap.update(LaunchResultBase._tagmap)
|
||||||
|
|
||||||
|
LaunchEmptyResult.complete = LaunchEmptyResult('complete')
|
||||||
|
|
||||||
|
PollArg._async_job_id_validator = AsyncJobId_validator
|
||||||
|
PollArg._all_field_names_ = set(['async_job_id'])
|
||||||
|
PollArg._all_fields_ = [('async_job_id', PollArg._async_job_id_validator)]
|
||||||
|
|
||||||
|
PollResultBase._in_progress_validator = bv.Void()
|
||||||
|
PollResultBase._tagmap = {
|
||||||
|
'in_progress': PollResultBase._in_progress_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
PollResultBase.in_progress = PollResultBase('in_progress')
|
||||||
|
|
||||||
|
PollEmptyResult._complete_validator = bv.Void()
|
||||||
|
PollEmptyResult._tagmap = {
|
||||||
|
'complete': PollEmptyResult._complete_validator,
|
||||||
|
}
|
||||||
|
PollEmptyResult._tagmap.update(PollResultBase._tagmap)
|
||||||
|
|
||||||
|
PollEmptyResult.complete = PollEmptyResult('complete')
|
||||||
|
|
||||||
|
PollError._invalid_async_job_id_validator = bv.Void()
|
||||||
|
PollError._internal_error_validator = bv.Void()
|
||||||
|
PollError._other_validator = bv.Void()
|
||||||
|
PollError._tagmap = {
|
||||||
|
'invalid_async_job_id': PollError._invalid_async_job_id_validator,
|
||||||
|
'internal_error': PollError._internal_error_validator,
|
||||||
|
'other': PollError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
PollError.invalid_async_job_id = PollError('invalid_async_job_id')
|
||||||
|
PollError.internal_error = PollError('internal_error')
|
||||||
|
PollError.other = PollError('other')
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
}
|
||||||
|
|
||||||
862
resources/lib/dropbox/auth.py
Normal file
862
resources/lib/dropbox/auth.py
Normal file
@@ -0,0 +1,862 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
class AccessError(bb.Union):
|
||||||
|
"""
|
||||||
|
Error occurred because the account doesn't have permission to access the
|
||||||
|
resource.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar InvalidAccountTypeError AccessError.invalid_account_type: Current
|
||||||
|
account type cannot access the resource.
|
||||||
|
:ivar PaperAccessError AccessError.paper_access_denied: Current account
|
||||||
|
cannot access Paper.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def invalid_account_type(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``invalid_account_type`` tag
|
||||||
|
with value ``val``.
|
||||||
|
|
||||||
|
:param InvalidAccountTypeError val:
|
||||||
|
:rtype: AccessError
|
||||||
|
"""
|
||||||
|
return cls('invalid_account_type', val)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def paper_access_denied(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``paper_access_denied`` tag
|
||||||
|
with value ``val``.
|
||||||
|
|
||||||
|
:param PaperAccessError val:
|
||||||
|
:rtype: AccessError
|
||||||
|
"""
|
||||||
|
return cls('paper_access_denied', val)
|
||||||
|
|
||||||
|
def is_invalid_account_type(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_account_type``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_account_type'
|
||||||
|
|
||||||
|
def is_paper_access_denied(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``paper_access_denied``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'paper_access_denied'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def get_invalid_account_type(self):
|
||||||
|
"""
|
||||||
|
Current account type cannot access the resource.
|
||||||
|
|
||||||
|
Only call this if :meth:`is_invalid_account_type` is true.
|
||||||
|
|
||||||
|
:rtype: InvalidAccountTypeError
|
||||||
|
"""
|
||||||
|
if not self.is_invalid_account_type():
|
||||||
|
raise AttributeError("tag 'invalid_account_type' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def get_paper_access_denied(self):
|
||||||
|
"""
|
||||||
|
Current account cannot access Paper.
|
||||||
|
|
||||||
|
Only call this if :meth:`is_paper_access_denied` is true.
|
||||||
|
|
||||||
|
:rtype: PaperAccessError
|
||||||
|
"""
|
||||||
|
if not self.is_paper_access_denied():
|
||||||
|
raise AttributeError("tag 'paper_access_denied' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(AccessError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'AccessError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
AccessError_validator = bv.Union(AccessError)
|
||||||
|
|
||||||
|
class AuthError(bb.Union):
|
||||||
|
"""
|
||||||
|
Errors occurred during authentication.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar auth.AuthError.invalid_access_token: The access token is invalid.
|
||||||
|
:ivar auth.AuthError.invalid_select_user: The user specified in
|
||||||
|
'Dropbox-API-Select-User' is no longer on the team.
|
||||||
|
:ivar auth.AuthError.invalid_select_admin: The user specified in
|
||||||
|
'Dropbox-API-Select-Admin' is not a Dropbox Business team admin.
|
||||||
|
:ivar auth.AuthError.user_suspended: The user has been suspended.
|
||||||
|
:ivar auth.AuthError.expired_access_token: The access token has expired.
|
||||||
|
:ivar TokenScopeError AuthError.missing_scope: The access token does not
|
||||||
|
have the required scope to access the route.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
invalid_access_token = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
invalid_select_user = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
invalid_select_admin = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
user_suspended = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
expired_access_token = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def missing_scope(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``missing_scope`` tag with
|
||||||
|
value ``val``.
|
||||||
|
|
||||||
|
:param TokenScopeError val:
|
||||||
|
:rtype: AuthError
|
||||||
|
"""
|
||||||
|
return cls('missing_scope', val)
|
||||||
|
|
||||||
|
def is_invalid_access_token(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_access_token``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_access_token'
|
||||||
|
|
||||||
|
def is_invalid_select_user(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_select_user``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_select_user'
|
||||||
|
|
||||||
|
def is_invalid_select_admin(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_select_admin``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_select_admin'
|
||||||
|
|
||||||
|
def is_user_suspended(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``user_suspended``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'user_suspended'
|
||||||
|
|
||||||
|
def is_expired_access_token(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``expired_access_token``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'expired_access_token'
|
||||||
|
|
||||||
|
def is_missing_scope(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``missing_scope``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'missing_scope'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def get_missing_scope(self):
|
||||||
|
"""
|
||||||
|
The access token does not have the required scope to access the route.
|
||||||
|
|
||||||
|
Only call this if :meth:`is_missing_scope` is true.
|
||||||
|
|
||||||
|
:rtype: TokenScopeError
|
||||||
|
"""
|
||||||
|
if not self.is_missing_scope():
|
||||||
|
raise AttributeError("tag 'missing_scope' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(AuthError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'AuthError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
AuthError_validator = bv.Union(AuthError)
|
||||||
|
|
||||||
|
class InvalidAccountTypeError(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar auth.InvalidAccountTypeError.endpoint: Current account type doesn't
|
||||||
|
have permission to access this route endpoint.
|
||||||
|
:ivar auth.InvalidAccountTypeError.feature: Current account type doesn't
|
||||||
|
have permission to access this feature.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
endpoint = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
feature = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_endpoint(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``endpoint``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'endpoint'
|
||||||
|
|
||||||
|
def is_feature(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``feature``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'feature'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(InvalidAccountTypeError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'InvalidAccountTypeError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
InvalidAccountTypeError_validator = bv.Union(InvalidAccountTypeError)
|
||||||
|
|
||||||
|
class PaperAccessError(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar auth.PaperAccessError.paper_disabled: Paper is disabled.
|
||||||
|
:ivar auth.PaperAccessError.not_paper_user: The provided user has not used
|
||||||
|
Paper yet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
paper_disabled = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
not_paper_user = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_paper_disabled(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``paper_disabled``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'paper_disabled'
|
||||||
|
|
||||||
|
def is_not_paper_user(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``not_paper_user``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'not_paper_user'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PaperAccessError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PaperAccessError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PaperAccessError_validator = bv.Union(PaperAccessError)
|
||||||
|
|
||||||
|
class RateLimitError(bb.Struct):
|
||||||
|
"""
|
||||||
|
Error occurred because the app is being rate limited.
|
||||||
|
|
||||||
|
:ivar auth.RateLimitError.reason: The reason why the app is being rate
|
||||||
|
limited.
|
||||||
|
:ivar auth.RateLimitError.retry_after: The number of seconds that the app
|
||||||
|
should wait before making another request.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_reason_value',
|
||||||
|
'_reason_present',
|
||||||
|
'_retry_after_value',
|
||||||
|
'_retry_after_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
reason=None,
|
||||||
|
retry_after=None):
|
||||||
|
self._reason_value = None
|
||||||
|
self._reason_present = False
|
||||||
|
self._retry_after_value = None
|
||||||
|
self._retry_after_present = False
|
||||||
|
if reason is not None:
|
||||||
|
self.reason = reason
|
||||||
|
if retry_after is not None:
|
||||||
|
self.retry_after = retry_after
|
||||||
|
|
||||||
|
@property
|
||||||
|
def reason(self):
|
||||||
|
"""
|
||||||
|
The reason why the app is being rate limited.
|
||||||
|
|
||||||
|
:rtype: RateLimitReason
|
||||||
|
"""
|
||||||
|
if self._reason_present:
|
||||||
|
return self._reason_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'reason'")
|
||||||
|
|
||||||
|
@reason.setter
|
||||||
|
def reason(self, val):
|
||||||
|
self._reason_validator.validate_type_only(val)
|
||||||
|
self._reason_value = val
|
||||||
|
self._reason_present = True
|
||||||
|
|
||||||
|
@reason.deleter
|
||||||
|
def reason(self):
|
||||||
|
self._reason_value = None
|
||||||
|
self._reason_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def retry_after(self):
|
||||||
|
"""
|
||||||
|
The number of seconds that the app should wait before making another
|
||||||
|
request.
|
||||||
|
|
||||||
|
:rtype: int
|
||||||
|
"""
|
||||||
|
if self._retry_after_present:
|
||||||
|
return self._retry_after_value
|
||||||
|
else:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
@retry_after.setter
|
||||||
|
def retry_after(self, val):
|
||||||
|
val = self._retry_after_validator.validate(val)
|
||||||
|
self._retry_after_value = val
|
||||||
|
self._retry_after_present = True
|
||||||
|
|
||||||
|
@retry_after.deleter
|
||||||
|
def retry_after(self):
|
||||||
|
self._retry_after_value = None
|
||||||
|
self._retry_after_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(RateLimitError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'RateLimitError(reason={!r}, retry_after={!r})'.format(
|
||||||
|
self._reason_value,
|
||||||
|
self._retry_after_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
RateLimitError_validator = bv.Struct(RateLimitError)
|
||||||
|
|
||||||
|
class RateLimitReason(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar auth.RateLimitReason.too_many_requests: You are making too many
|
||||||
|
requests in the past few minutes.
|
||||||
|
:ivar auth.RateLimitReason.too_many_write_operations: There are currently
|
||||||
|
too many write operations happening in the user's Dropbox.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
too_many_requests = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
too_many_write_operations = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_too_many_requests(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``too_many_requests``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'too_many_requests'
|
||||||
|
|
||||||
|
def is_too_many_write_operations(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``too_many_write_operations``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'too_many_write_operations'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(RateLimitReason, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'RateLimitReason(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
RateLimitReason_validator = bv.Union(RateLimitReason)
|
||||||
|
|
||||||
|
class TokenFromOAuth1Arg(bb.Struct):
|
||||||
|
"""
|
||||||
|
:ivar auth.TokenFromOAuth1Arg.oauth1_token: The supplied OAuth 1.0 access
|
||||||
|
token.
|
||||||
|
:ivar auth.TokenFromOAuth1Arg.oauth1_token_secret: The token secret
|
||||||
|
associated with the supplied access token.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_oauth1_token_value',
|
||||||
|
'_oauth1_token_present',
|
||||||
|
'_oauth1_token_secret_value',
|
||||||
|
'_oauth1_token_secret_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
oauth1_token=None,
|
||||||
|
oauth1_token_secret=None):
|
||||||
|
self._oauth1_token_value = None
|
||||||
|
self._oauth1_token_present = False
|
||||||
|
self._oauth1_token_secret_value = None
|
||||||
|
self._oauth1_token_secret_present = False
|
||||||
|
if oauth1_token is not None:
|
||||||
|
self.oauth1_token = oauth1_token
|
||||||
|
if oauth1_token_secret is not None:
|
||||||
|
self.oauth1_token_secret = oauth1_token_secret
|
||||||
|
|
||||||
|
@property
|
||||||
|
def oauth1_token(self):
|
||||||
|
"""
|
||||||
|
The supplied OAuth 1.0 access token.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._oauth1_token_present:
|
||||||
|
return self._oauth1_token_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'oauth1_token'")
|
||||||
|
|
||||||
|
@oauth1_token.setter
|
||||||
|
def oauth1_token(self, val):
|
||||||
|
val = self._oauth1_token_validator.validate(val)
|
||||||
|
self._oauth1_token_value = val
|
||||||
|
self._oauth1_token_present = True
|
||||||
|
|
||||||
|
@oauth1_token.deleter
|
||||||
|
def oauth1_token(self):
|
||||||
|
self._oauth1_token_value = None
|
||||||
|
self._oauth1_token_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def oauth1_token_secret(self):
|
||||||
|
"""
|
||||||
|
The token secret associated with the supplied access token.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._oauth1_token_secret_present:
|
||||||
|
return self._oauth1_token_secret_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'oauth1_token_secret'")
|
||||||
|
|
||||||
|
@oauth1_token_secret.setter
|
||||||
|
def oauth1_token_secret(self, val):
|
||||||
|
val = self._oauth1_token_secret_validator.validate(val)
|
||||||
|
self._oauth1_token_secret_value = val
|
||||||
|
self._oauth1_token_secret_present = True
|
||||||
|
|
||||||
|
@oauth1_token_secret.deleter
|
||||||
|
def oauth1_token_secret(self):
|
||||||
|
self._oauth1_token_secret_value = None
|
||||||
|
self._oauth1_token_secret_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(TokenFromOAuth1Arg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'TokenFromOAuth1Arg(oauth1_token={!r}, oauth1_token_secret={!r})'.format(
|
||||||
|
self._oauth1_token_value,
|
||||||
|
self._oauth1_token_secret_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
TokenFromOAuth1Arg_validator = bv.Struct(TokenFromOAuth1Arg)
|
||||||
|
|
||||||
|
class TokenFromOAuth1Error(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar auth.TokenFromOAuth1Error.invalid_oauth1_token_info: Part or all of
|
||||||
|
the OAuth 1.0 access token info is invalid.
|
||||||
|
:ivar auth.TokenFromOAuth1Error.app_id_mismatch: The authorized app does not
|
||||||
|
match the app associated with the supplied access token.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
invalid_oauth1_token_info = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
app_id_mismatch = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_invalid_oauth1_token_info(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_oauth1_token_info``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_oauth1_token_info'
|
||||||
|
|
||||||
|
def is_app_id_mismatch(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``app_id_mismatch``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'app_id_mismatch'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(TokenFromOAuth1Error, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'TokenFromOAuth1Error(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
TokenFromOAuth1Error_validator = bv.Union(TokenFromOAuth1Error)
|
||||||
|
|
||||||
|
class TokenFromOAuth1Result(bb.Struct):
|
||||||
|
"""
|
||||||
|
:ivar auth.TokenFromOAuth1Result.oauth2_token: The OAuth 2.0 token generated
|
||||||
|
from the supplied OAuth 1.0 token.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_oauth2_token_value',
|
||||||
|
'_oauth2_token_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
oauth2_token=None):
|
||||||
|
self._oauth2_token_value = None
|
||||||
|
self._oauth2_token_present = False
|
||||||
|
if oauth2_token is not None:
|
||||||
|
self.oauth2_token = oauth2_token
|
||||||
|
|
||||||
|
@property
|
||||||
|
def oauth2_token(self):
|
||||||
|
"""
|
||||||
|
The OAuth 2.0 token generated from the supplied OAuth 1.0 token.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._oauth2_token_present:
|
||||||
|
return self._oauth2_token_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'oauth2_token'")
|
||||||
|
|
||||||
|
@oauth2_token.setter
|
||||||
|
def oauth2_token(self, val):
|
||||||
|
val = self._oauth2_token_validator.validate(val)
|
||||||
|
self._oauth2_token_value = val
|
||||||
|
self._oauth2_token_present = True
|
||||||
|
|
||||||
|
@oauth2_token.deleter
|
||||||
|
def oauth2_token(self):
|
||||||
|
self._oauth2_token_value = None
|
||||||
|
self._oauth2_token_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(TokenFromOAuth1Result, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'TokenFromOAuth1Result(oauth2_token={!r})'.format(
|
||||||
|
self._oauth2_token_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
TokenFromOAuth1Result_validator = bv.Struct(TokenFromOAuth1Result)
|
||||||
|
|
||||||
|
class TokenScopeError(bb.Struct):
|
||||||
|
"""
|
||||||
|
:ivar auth.TokenScopeError.required_scope: The required scope to access the
|
||||||
|
route.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_required_scope_value',
|
||||||
|
'_required_scope_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
required_scope=None):
|
||||||
|
self._required_scope_value = None
|
||||||
|
self._required_scope_present = False
|
||||||
|
if required_scope is not None:
|
||||||
|
self.required_scope = required_scope
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_scope(self):
|
||||||
|
"""
|
||||||
|
The required scope to access the route.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._required_scope_present:
|
||||||
|
return self._required_scope_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'required_scope'")
|
||||||
|
|
||||||
|
@required_scope.setter
|
||||||
|
def required_scope(self, val):
|
||||||
|
val = self._required_scope_validator.validate(val)
|
||||||
|
self._required_scope_value = val
|
||||||
|
self._required_scope_present = True
|
||||||
|
|
||||||
|
@required_scope.deleter
|
||||||
|
def required_scope(self):
|
||||||
|
self._required_scope_value = None
|
||||||
|
self._required_scope_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(TokenScopeError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'TokenScopeError(required_scope={!r})'.format(
|
||||||
|
self._required_scope_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
TokenScopeError_validator = bv.Struct(TokenScopeError)
|
||||||
|
|
||||||
|
AccessError._invalid_account_type_validator = InvalidAccountTypeError_validator
|
||||||
|
AccessError._paper_access_denied_validator = PaperAccessError_validator
|
||||||
|
AccessError._other_validator = bv.Void()
|
||||||
|
AccessError._tagmap = {
|
||||||
|
'invalid_account_type': AccessError._invalid_account_type_validator,
|
||||||
|
'paper_access_denied': AccessError._paper_access_denied_validator,
|
||||||
|
'other': AccessError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
AccessError.other = AccessError('other')
|
||||||
|
|
||||||
|
AuthError._invalid_access_token_validator = bv.Void()
|
||||||
|
AuthError._invalid_select_user_validator = bv.Void()
|
||||||
|
AuthError._invalid_select_admin_validator = bv.Void()
|
||||||
|
AuthError._user_suspended_validator = bv.Void()
|
||||||
|
AuthError._expired_access_token_validator = bv.Void()
|
||||||
|
AuthError._missing_scope_validator = TokenScopeError_validator
|
||||||
|
AuthError._other_validator = bv.Void()
|
||||||
|
AuthError._tagmap = {
|
||||||
|
'invalid_access_token': AuthError._invalid_access_token_validator,
|
||||||
|
'invalid_select_user': AuthError._invalid_select_user_validator,
|
||||||
|
'invalid_select_admin': AuthError._invalid_select_admin_validator,
|
||||||
|
'user_suspended': AuthError._user_suspended_validator,
|
||||||
|
'expired_access_token': AuthError._expired_access_token_validator,
|
||||||
|
'missing_scope': AuthError._missing_scope_validator,
|
||||||
|
'other': AuthError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
AuthError.invalid_access_token = AuthError('invalid_access_token')
|
||||||
|
AuthError.invalid_select_user = AuthError('invalid_select_user')
|
||||||
|
AuthError.invalid_select_admin = AuthError('invalid_select_admin')
|
||||||
|
AuthError.user_suspended = AuthError('user_suspended')
|
||||||
|
AuthError.expired_access_token = AuthError('expired_access_token')
|
||||||
|
AuthError.other = AuthError('other')
|
||||||
|
|
||||||
|
InvalidAccountTypeError._endpoint_validator = bv.Void()
|
||||||
|
InvalidAccountTypeError._feature_validator = bv.Void()
|
||||||
|
InvalidAccountTypeError._other_validator = bv.Void()
|
||||||
|
InvalidAccountTypeError._tagmap = {
|
||||||
|
'endpoint': InvalidAccountTypeError._endpoint_validator,
|
||||||
|
'feature': InvalidAccountTypeError._feature_validator,
|
||||||
|
'other': InvalidAccountTypeError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
InvalidAccountTypeError.endpoint = InvalidAccountTypeError('endpoint')
|
||||||
|
InvalidAccountTypeError.feature = InvalidAccountTypeError('feature')
|
||||||
|
InvalidAccountTypeError.other = InvalidAccountTypeError('other')
|
||||||
|
|
||||||
|
PaperAccessError._paper_disabled_validator = bv.Void()
|
||||||
|
PaperAccessError._not_paper_user_validator = bv.Void()
|
||||||
|
PaperAccessError._other_validator = bv.Void()
|
||||||
|
PaperAccessError._tagmap = {
|
||||||
|
'paper_disabled': PaperAccessError._paper_disabled_validator,
|
||||||
|
'not_paper_user': PaperAccessError._not_paper_user_validator,
|
||||||
|
'other': PaperAccessError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
PaperAccessError.paper_disabled = PaperAccessError('paper_disabled')
|
||||||
|
PaperAccessError.not_paper_user = PaperAccessError('not_paper_user')
|
||||||
|
PaperAccessError.other = PaperAccessError('other')
|
||||||
|
|
||||||
|
RateLimitError._reason_validator = RateLimitReason_validator
|
||||||
|
RateLimitError._retry_after_validator = bv.UInt64()
|
||||||
|
RateLimitError._all_field_names_ = set([
|
||||||
|
'reason',
|
||||||
|
'retry_after',
|
||||||
|
])
|
||||||
|
RateLimitError._all_fields_ = [
|
||||||
|
('reason', RateLimitError._reason_validator),
|
||||||
|
('retry_after', RateLimitError._retry_after_validator),
|
||||||
|
]
|
||||||
|
|
||||||
|
RateLimitReason._too_many_requests_validator = bv.Void()
|
||||||
|
RateLimitReason._too_many_write_operations_validator = bv.Void()
|
||||||
|
RateLimitReason._other_validator = bv.Void()
|
||||||
|
RateLimitReason._tagmap = {
|
||||||
|
'too_many_requests': RateLimitReason._too_many_requests_validator,
|
||||||
|
'too_many_write_operations': RateLimitReason._too_many_write_operations_validator,
|
||||||
|
'other': RateLimitReason._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
RateLimitReason.too_many_requests = RateLimitReason('too_many_requests')
|
||||||
|
RateLimitReason.too_many_write_operations = RateLimitReason('too_many_write_operations')
|
||||||
|
RateLimitReason.other = RateLimitReason('other')
|
||||||
|
|
||||||
|
TokenFromOAuth1Arg._oauth1_token_validator = bv.String(min_length=1)
|
||||||
|
TokenFromOAuth1Arg._oauth1_token_secret_validator = bv.String(min_length=1)
|
||||||
|
TokenFromOAuth1Arg._all_field_names_ = set([
|
||||||
|
'oauth1_token',
|
||||||
|
'oauth1_token_secret',
|
||||||
|
])
|
||||||
|
TokenFromOAuth1Arg._all_fields_ = [
|
||||||
|
('oauth1_token', TokenFromOAuth1Arg._oauth1_token_validator),
|
||||||
|
('oauth1_token_secret', TokenFromOAuth1Arg._oauth1_token_secret_validator),
|
||||||
|
]
|
||||||
|
|
||||||
|
TokenFromOAuth1Error._invalid_oauth1_token_info_validator = bv.Void()
|
||||||
|
TokenFromOAuth1Error._app_id_mismatch_validator = bv.Void()
|
||||||
|
TokenFromOAuth1Error._other_validator = bv.Void()
|
||||||
|
TokenFromOAuth1Error._tagmap = {
|
||||||
|
'invalid_oauth1_token_info': TokenFromOAuth1Error._invalid_oauth1_token_info_validator,
|
||||||
|
'app_id_mismatch': TokenFromOAuth1Error._app_id_mismatch_validator,
|
||||||
|
'other': TokenFromOAuth1Error._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
TokenFromOAuth1Error.invalid_oauth1_token_info = TokenFromOAuth1Error('invalid_oauth1_token_info')
|
||||||
|
TokenFromOAuth1Error.app_id_mismatch = TokenFromOAuth1Error('app_id_mismatch')
|
||||||
|
TokenFromOAuth1Error.other = TokenFromOAuth1Error('other')
|
||||||
|
|
||||||
|
TokenFromOAuth1Result._oauth2_token_validator = bv.String(min_length=1)
|
||||||
|
TokenFromOAuth1Result._all_field_names_ = set(['oauth2_token'])
|
||||||
|
TokenFromOAuth1Result._all_fields_ = [('oauth2_token', TokenFromOAuth1Result._oauth2_token_validator)]
|
||||||
|
|
||||||
|
TokenScopeError._required_scope_validator = bv.String()
|
||||||
|
TokenScopeError._all_field_names_ = set(['required_scope'])
|
||||||
|
TokenScopeError._all_fields_ = [('required_scope', TokenScopeError._required_scope_validator)]
|
||||||
|
|
||||||
|
token_from_oauth1 = bb.Route(
|
||||||
|
'token/from_oauth1',
|
||||||
|
1,
|
||||||
|
False,
|
||||||
|
TokenFromOAuth1Arg_validator,
|
||||||
|
TokenFromOAuth1Result_validator,
|
||||||
|
TokenFromOAuth1Error_validator,
|
||||||
|
{'host': u'api',
|
||||||
|
'style': u'rpc'},
|
||||||
|
)
|
||||||
|
token_revoke = bb.Route(
|
||||||
|
'token/revoke',
|
||||||
|
1,
|
||||||
|
False,
|
||||||
|
bv.Void(),
|
||||||
|
bv.Void(),
|
||||||
|
bv.Void(),
|
||||||
|
{'host': u'api',
|
||||||
|
'style': u'rpc'},
|
||||||
|
)
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
'token/from_oauth1': token_from_oauth1,
|
||||||
|
'token/revoke': token_revoke,
|
||||||
|
}
|
||||||
|
|
||||||
4526
resources/lib/dropbox/base.py
Normal file
4526
resources/lib/dropbox/base.py
Normal file
File diff suppressed because it is too large
Load Diff
1999
resources/lib/dropbox/base_team.py
Normal file
1999
resources/lib/dropbox/base_team.py
Normal file
File diff suppressed because it is too large
Load Diff
458
resources/lib/dropbox/common.py
Normal file
458
resources/lib/dropbox/common.py
Normal file
@@ -0,0 +1,458 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
class PathRoot(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar common.PathRoot.home: Paths are relative to the authenticating user's
|
||||||
|
home namespace, whether or not that user belongs to a team.
|
||||||
|
:ivar str common.PathRoot.root: Paths are relative to the authenticating
|
||||||
|
user's root namespace (This results in
|
||||||
|
:field:`PathRootError.invalid_root` if the user's root namespace has
|
||||||
|
changed.).
|
||||||
|
:ivar str common.PathRoot.namespace_id: Paths are relative to given
|
||||||
|
namespace id (This results in :field:`PathRootError.no_permission` if
|
||||||
|
you don't have access to this namespace.).
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
home = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def root(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``root`` tag with value
|
||||||
|
``val``.
|
||||||
|
|
||||||
|
:param str val:
|
||||||
|
:rtype: PathRoot
|
||||||
|
"""
|
||||||
|
return cls('root', val)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def namespace_id(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``namespace_id`` tag with
|
||||||
|
value ``val``.
|
||||||
|
|
||||||
|
:param str val:
|
||||||
|
:rtype: PathRoot
|
||||||
|
"""
|
||||||
|
return cls('namespace_id', val)
|
||||||
|
|
||||||
|
def is_home(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``home``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'home'
|
||||||
|
|
||||||
|
def is_root(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``root``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'root'
|
||||||
|
|
||||||
|
def is_namespace_id(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``namespace_id``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'namespace_id'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def get_root(self):
|
||||||
|
"""
|
||||||
|
Paths are relative to the authenticating user's root namespace (This
|
||||||
|
results in ``PathRootError.invalid_root`` if the user's root namespace
|
||||||
|
has changed.).
|
||||||
|
|
||||||
|
Only call this if :meth:`is_root` is true.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if not self.is_root():
|
||||||
|
raise AttributeError("tag 'root' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def get_namespace_id(self):
|
||||||
|
"""
|
||||||
|
Paths are relative to given namespace id (This results in
|
||||||
|
``PathRootError.no_permission`` if you don't have access to this
|
||||||
|
namespace.).
|
||||||
|
|
||||||
|
Only call this if :meth:`is_namespace_id` is true.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if not self.is_namespace_id():
|
||||||
|
raise AttributeError("tag 'namespace_id' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PathRoot, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PathRoot(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PathRoot_validator = bv.Union(PathRoot)
|
||||||
|
|
||||||
|
class PathRootError(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar RootInfo PathRootError.invalid_root: The root namespace id in
|
||||||
|
Dropbox-API-Path-Root header is not valid. The value of this error is
|
||||||
|
use's latest root info.
|
||||||
|
:ivar common.PathRootError.no_permission: You don't have permission to
|
||||||
|
access the namespace id in Dropbox-API-Path-Root header.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
no_permission = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def invalid_root(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``invalid_root`` tag with
|
||||||
|
value ``val``.
|
||||||
|
|
||||||
|
:param RootInfo val:
|
||||||
|
:rtype: PathRootError
|
||||||
|
"""
|
||||||
|
return cls('invalid_root', val)
|
||||||
|
|
||||||
|
def is_invalid_root(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``invalid_root``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'invalid_root'
|
||||||
|
|
||||||
|
def is_no_permission(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``no_permission``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'no_permission'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def get_invalid_root(self):
|
||||||
|
"""
|
||||||
|
The root namespace id in Dropbox-API-Path-Root header is not valid. The
|
||||||
|
value of this error is use's latest root info.
|
||||||
|
|
||||||
|
Only call this if :meth:`is_invalid_root` is true.
|
||||||
|
|
||||||
|
:rtype: RootInfo
|
||||||
|
"""
|
||||||
|
if not self.is_invalid_root():
|
||||||
|
raise AttributeError("tag 'invalid_root' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PathRootError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PathRootError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PathRootError_validator = bv.Union(PathRootError)
|
||||||
|
|
||||||
|
class RootInfo(bb.Struct):
|
||||||
|
"""
|
||||||
|
Information about current user's root.
|
||||||
|
|
||||||
|
:ivar common.RootInfo.root_namespace_id: The namespace ID for user's root
|
||||||
|
namespace. It will be the namespace ID of the shared team root if the
|
||||||
|
user is member of a team with a separate team root. Otherwise it will be
|
||||||
|
same as ``RootInfo.home_namespace_id``.
|
||||||
|
:ivar common.RootInfo.home_namespace_id: The namespace ID for user's home
|
||||||
|
namespace.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_root_namespace_id_value',
|
||||||
|
'_root_namespace_id_present',
|
||||||
|
'_home_namespace_id_value',
|
||||||
|
'_home_namespace_id_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
root_namespace_id=None,
|
||||||
|
home_namespace_id=None):
|
||||||
|
self._root_namespace_id_value = None
|
||||||
|
self._root_namespace_id_present = False
|
||||||
|
self._home_namespace_id_value = None
|
||||||
|
self._home_namespace_id_present = False
|
||||||
|
if root_namespace_id is not None:
|
||||||
|
self.root_namespace_id = root_namespace_id
|
||||||
|
if home_namespace_id is not None:
|
||||||
|
self.home_namespace_id = home_namespace_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def root_namespace_id(self):
|
||||||
|
"""
|
||||||
|
The namespace ID for user's root namespace. It will be the namespace ID
|
||||||
|
of the shared team root if the user is member of a team with a separate
|
||||||
|
team root. Otherwise it will be same as ``RootInfo.home_namespace_id``.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._root_namespace_id_present:
|
||||||
|
return self._root_namespace_id_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'root_namespace_id'")
|
||||||
|
|
||||||
|
@root_namespace_id.setter
|
||||||
|
def root_namespace_id(self, val):
|
||||||
|
val = self._root_namespace_id_validator.validate(val)
|
||||||
|
self._root_namespace_id_value = val
|
||||||
|
self._root_namespace_id_present = True
|
||||||
|
|
||||||
|
@root_namespace_id.deleter
|
||||||
|
def root_namespace_id(self):
|
||||||
|
self._root_namespace_id_value = None
|
||||||
|
self._root_namespace_id_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def home_namespace_id(self):
|
||||||
|
"""
|
||||||
|
The namespace ID for user's home namespace.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._home_namespace_id_present:
|
||||||
|
return self._home_namespace_id_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'home_namespace_id'")
|
||||||
|
|
||||||
|
@home_namespace_id.setter
|
||||||
|
def home_namespace_id(self, val):
|
||||||
|
val = self._home_namespace_id_validator.validate(val)
|
||||||
|
self._home_namespace_id_value = val
|
||||||
|
self._home_namespace_id_present = True
|
||||||
|
|
||||||
|
@home_namespace_id.deleter
|
||||||
|
def home_namespace_id(self):
|
||||||
|
self._home_namespace_id_value = None
|
||||||
|
self._home_namespace_id_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(RootInfo, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'RootInfo(root_namespace_id={!r}, home_namespace_id={!r})'.format(
|
||||||
|
self._root_namespace_id_value,
|
||||||
|
self._home_namespace_id_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
RootInfo_validator = bv.StructTree(RootInfo)
|
||||||
|
|
||||||
|
class TeamRootInfo(RootInfo):
|
||||||
|
"""
|
||||||
|
Root info when user is member of a team with a separate root namespace ID.
|
||||||
|
|
||||||
|
:ivar common.TeamRootInfo.home_path: The path for user's home directory
|
||||||
|
under the shared team root.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_home_path_value',
|
||||||
|
'_home_path_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
root_namespace_id=None,
|
||||||
|
home_namespace_id=None,
|
||||||
|
home_path=None):
|
||||||
|
super(TeamRootInfo, self).__init__(root_namespace_id,
|
||||||
|
home_namespace_id)
|
||||||
|
self._home_path_value = None
|
||||||
|
self._home_path_present = False
|
||||||
|
if home_path is not None:
|
||||||
|
self.home_path = home_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def home_path(self):
|
||||||
|
"""
|
||||||
|
The path for user's home directory under the shared team root.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._home_path_present:
|
||||||
|
return self._home_path_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'home_path'")
|
||||||
|
|
||||||
|
@home_path.setter
|
||||||
|
def home_path(self, val):
|
||||||
|
val = self._home_path_validator.validate(val)
|
||||||
|
self._home_path_value = val
|
||||||
|
self._home_path_present = True
|
||||||
|
|
||||||
|
@home_path.deleter
|
||||||
|
def home_path(self):
|
||||||
|
self._home_path_value = None
|
||||||
|
self._home_path_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(TeamRootInfo, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'TeamRootInfo(root_namespace_id={!r}, home_namespace_id={!r}, home_path={!r})'.format(
|
||||||
|
self._root_namespace_id_value,
|
||||||
|
self._home_namespace_id_value,
|
||||||
|
self._home_path_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
TeamRootInfo_validator = bv.Struct(TeamRootInfo)
|
||||||
|
|
||||||
|
class UserRootInfo(RootInfo):
|
||||||
|
"""
|
||||||
|
Root info when user is not member of a team or the user is a member of a
|
||||||
|
team and the team does not have a separate root namespace.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
root_namespace_id=None,
|
||||||
|
home_namespace_id=None):
|
||||||
|
super(UserRootInfo, self).__init__(root_namespace_id,
|
||||||
|
home_namespace_id)
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(UserRootInfo, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'UserRootInfo(root_namespace_id={!r}, home_namespace_id={!r})'.format(
|
||||||
|
self._root_namespace_id_value,
|
||||||
|
self._home_namespace_id_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
UserRootInfo_validator = bv.Struct(UserRootInfo)
|
||||||
|
|
||||||
|
Date_validator = bv.Timestamp(u'%Y-%m-%d')
|
||||||
|
DisplayName_validator = bv.String(min_length=1, pattern=u'[^/:?*<>"|]*')
|
||||||
|
DisplayNameLegacy_validator = bv.String()
|
||||||
|
DropboxTimestamp_validator = bv.Timestamp(u'%Y-%m-%dT%H:%M:%SZ')
|
||||||
|
EmailAddress_validator = bv.String(max_length=255, pattern=u"^['&A-Za-z0-9._%+-]+@[A-Za-z0-9-][A-Za-z0-9.-]*\\.[A-Za-z]{2,15}$")
|
||||||
|
# A ISO639-1 code.
|
||||||
|
LanguageCode_validator = bv.String(min_length=2)
|
||||||
|
NamePart_validator = bv.String(min_length=1, max_length=100, pattern=u'[^/:?*<>"|]*')
|
||||||
|
NamespaceId_validator = bv.String(pattern=u'[-_0-9a-zA-Z:]+')
|
||||||
|
OptionalNamePart_validator = bv.String(max_length=100, pattern=u'[^/:?*<>"|]*')
|
||||||
|
SessionId_validator = bv.String()
|
||||||
|
SharedFolderId_validator = NamespaceId_validator
|
||||||
|
PathRoot._home_validator = bv.Void()
|
||||||
|
PathRoot._root_validator = NamespaceId_validator
|
||||||
|
PathRoot._namespace_id_validator = NamespaceId_validator
|
||||||
|
PathRoot._other_validator = bv.Void()
|
||||||
|
PathRoot._tagmap = {
|
||||||
|
'home': PathRoot._home_validator,
|
||||||
|
'root': PathRoot._root_validator,
|
||||||
|
'namespace_id': PathRoot._namespace_id_validator,
|
||||||
|
'other': PathRoot._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
PathRoot.home = PathRoot('home')
|
||||||
|
PathRoot.other = PathRoot('other')
|
||||||
|
|
||||||
|
PathRootError._invalid_root_validator = RootInfo_validator
|
||||||
|
PathRootError._no_permission_validator = bv.Void()
|
||||||
|
PathRootError._other_validator = bv.Void()
|
||||||
|
PathRootError._tagmap = {
|
||||||
|
'invalid_root': PathRootError._invalid_root_validator,
|
||||||
|
'no_permission': PathRootError._no_permission_validator,
|
||||||
|
'other': PathRootError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
PathRootError.no_permission = PathRootError('no_permission')
|
||||||
|
PathRootError.other = PathRootError('other')
|
||||||
|
|
||||||
|
RootInfo._root_namespace_id_validator = NamespaceId_validator
|
||||||
|
RootInfo._home_namespace_id_validator = NamespaceId_validator
|
||||||
|
RootInfo._field_names_ = set([
|
||||||
|
'root_namespace_id',
|
||||||
|
'home_namespace_id',
|
||||||
|
])
|
||||||
|
RootInfo._all_field_names_ = RootInfo._field_names_
|
||||||
|
RootInfo._fields_ = [
|
||||||
|
('root_namespace_id', RootInfo._root_namespace_id_validator),
|
||||||
|
('home_namespace_id', RootInfo._home_namespace_id_validator),
|
||||||
|
]
|
||||||
|
RootInfo._all_fields_ = RootInfo._fields_
|
||||||
|
|
||||||
|
RootInfo._tag_to_subtype_ = {
|
||||||
|
(u'team',): TeamRootInfo_validator,
|
||||||
|
(u'user',): UserRootInfo_validator,
|
||||||
|
}
|
||||||
|
RootInfo._pytype_to_tag_and_subtype_ = {
|
||||||
|
TeamRootInfo: ((u'team',), TeamRootInfo_validator),
|
||||||
|
UserRootInfo: ((u'user',), UserRootInfo_validator),
|
||||||
|
}
|
||||||
|
RootInfo._is_catch_all_ = True
|
||||||
|
|
||||||
|
TeamRootInfo._home_path_validator = bv.String()
|
||||||
|
TeamRootInfo._field_names_ = set(['home_path'])
|
||||||
|
TeamRootInfo._all_field_names_ = RootInfo._all_field_names_.union(TeamRootInfo._field_names_)
|
||||||
|
TeamRootInfo._fields_ = [('home_path', TeamRootInfo._home_path_validator)]
|
||||||
|
TeamRootInfo._all_fields_ = RootInfo._all_fields_ + TeamRootInfo._fields_
|
||||||
|
|
||||||
|
UserRootInfo._field_names_ = set([])
|
||||||
|
UserRootInfo._all_field_names_ = RootInfo._all_field_names_.union(UserRootInfo._field_names_)
|
||||||
|
UserRootInfo._fields_ = []
|
||||||
|
UserRootInfo._all_fields_ = RootInfo._all_fields_ + UserRootInfo._fields_
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
}
|
||||||
|
|
||||||
176
resources/lib/dropbox/contacts.py
Normal file
176
resources/lib/dropbox/contacts.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import (
|
||||||
|
common,
|
||||||
|
)
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
import common
|
||||||
|
|
||||||
|
class DeleteManualContactsArg(bb.Struct):
|
||||||
|
"""
|
||||||
|
:ivar contacts.DeleteManualContactsArg.email_addresses: List of manually
|
||||||
|
added contacts to be deleted.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_email_addresses_value',
|
||||||
|
'_email_addresses_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
email_addresses=None):
|
||||||
|
self._email_addresses_value = None
|
||||||
|
self._email_addresses_present = False
|
||||||
|
if email_addresses is not None:
|
||||||
|
self.email_addresses = email_addresses
|
||||||
|
|
||||||
|
@property
|
||||||
|
def email_addresses(self):
|
||||||
|
"""
|
||||||
|
List of manually added contacts to be deleted.
|
||||||
|
|
||||||
|
:rtype: list of [str]
|
||||||
|
"""
|
||||||
|
if self._email_addresses_present:
|
||||||
|
return self._email_addresses_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'email_addresses'")
|
||||||
|
|
||||||
|
@email_addresses.setter
|
||||||
|
def email_addresses(self, val):
|
||||||
|
val = self._email_addresses_validator.validate(val)
|
||||||
|
self._email_addresses_value = val
|
||||||
|
self._email_addresses_present = True
|
||||||
|
|
||||||
|
@email_addresses.deleter
|
||||||
|
def email_addresses(self):
|
||||||
|
self._email_addresses_value = None
|
||||||
|
self._email_addresses_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(DeleteManualContactsArg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'DeleteManualContactsArg(email_addresses={!r})'.format(
|
||||||
|
self._email_addresses_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
DeleteManualContactsArg_validator = bv.Struct(DeleteManualContactsArg)
|
||||||
|
|
||||||
|
class DeleteManualContactsError(bb.Union):
|
||||||
|
"""
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar list of [str] contacts.DeleteManualContactsError.contacts_not_found:
|
||||||
|
Can't delete contacts from this list. Make sure the list only has
|
||||||
|
manually added contacts. The deletion was cancelled.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def contacts_not_found(cls, val):
|
||||||
|
"""
|
||||||
|
Create an instance of this class set to the ``contacts_not_found`` tag
|
||||||
|
with value ``val``.
|
||||||
|
|
||||||
|
:param list of [str] val:
|
||||||
|
:rtype: DeleteManualContactsError
|
||||||
|
"""
|
||||||
|
return cls('contacts_not_found', val)
|
||||||
|
|
||||||
|
def is_contacts_not_found(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``contacts_not_found``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'contacts_not_found'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def get_contacts_not_found(self):
|
||||||
|
"""
|
||||||
|
Can't delete contacts from this list. Make sure the list only has
|
||||||
|
manually added contacts. The deletion was cancelled.
|
||||||
|
|
||||||
|
Only call this if :meth:`is_contacts_not_found` is true.
|
||||||
|
|
||||||
|
:rtype: list of [str]
|
||||||
|
"""
|
||||||
|
if not self.is_contacts_not_found():
|
||||||
|
raise AttributeError("tag 'contacts_not_found' not set")
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(DeleteManualContactsError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'DeleteManualContactsError(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
DeleteManualContactsError_validator = bv.Union(DeleteManualContactsError)
|
||||||
|
|
||||||
|
DeleteManualContactsArg._email_addresses_validator = bv.List(common.EmailAddress_validator)
|
||||||
|
DeleteManualContactsArg._all_field_names_ = set(['email_addresses'])
|
||||||
|
DeleteManualContactsArg._all_fields_ = [('email_addresses', DeleteManualContactsArg._email_addresses_validator)]
|
||||||
|
|
||||||
|
DeleteManualContactsError._contacts_not_found_validator = bv.List(common.EmailAddress_validator)
|
||||||
|
DeleteManualContactsError._other_validator = bv.Void()
|
||||||
|
DeleteManualContactsError._tagmap = {
|
||||||
|
'contacts_not_found': DeleteManualContactsError._contacts_not_found_validator,
|
||||||
|
'other': DeleteManualContactsError._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
DeleteManualContactsError.other = DeleteManualContactsError('other')
|
||||||
|
|
||||||
|
delete_manual_contacts = bb.Route(
|
||||||
|
'delete_manual_contacts',
|
||||||
|
1,
|
||||||
|
False,
|
||||||
|
bv.Void(),
|
||||||
|
bv.Void(),
|
||||||
|
bv.Void(),
|
||||||
|
{'host': u'api',
|
||||||
|
'style': u'rpc'},
|
||||||
|
)
|
||||||
|
delete_manual_contacts_batch = bb.Route(
|
||||||
|
'delete_manual_contacts_batch',
|
||||||
|
1,
|
||||||
|
False,
|
||||||
|
DeleteManualContactsArg_validator,
|
||||||
|
bv.Void(),
|
||||||
|
DeleteManualContactsError_validator,
|
||||||
|
{'host': u'api',
|
||||||
|
'style': u'rpc'},
|
||||||
|
)
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
'delete_manual_contacts': delete_manual_contacts,
|
||||||
|
'delete_manual_contacts_batch': delete_manual_contacts_batch,
|
||||||
|
}
|
||||||
|
|
||||||
609
resources/lib/dropbox/dropbox.py
Normal file
609
resources/lib/dropbox/dropbox.py
Normal file
@@ -0,0 +1,609 @@
|
|||||||
|
__all__ = [
|
||||||
|
'Dropbox',
|
||||||
|
'DropboxTeam',
|
||||||
|
'create_session',
|
||||||
|
]
|
||||||
|
|
||||||
|
# This should always be 0.0.0 in master. Only update this after tagging
|
||||||
|
# before release.
|
||||||
|
__version__ = '9.4.0'
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import six
|
||||||
|
|
||||||
|
from . import files, stone_serializers
|
||||||
|
from .auth import (
|
||||||
|
AuthError_validator,
|
||||||
|
RateLimitError_validator,
|
||||||
|
)
|
||||||
|
from .common import (
|
||||||
|
PathRoot,
|
||||||
|
PathRoot_validator,
|
||||||
|
PathRootError_validator
|
||||||
|
)
|
||||||
|
from .base import DropboxBase
|
||||||
|
from .base_team import DropboxTeamBase
|
||||||
|
from .exceptions import (
|
||||||
|
ApiError,
|
||||||
|
AuthError,
|
||||||
|
BadInputError,
|
||||||
|
HttpError,
|
||||||
|
PathRootError,
|
||||||
|
InternalServerError,
|
||||||
|
RateLimitError,
|
||||||
|
)
|
||||||
|
from .session import (
|
||||||
|
API_HOST,
|
||||||
|
API_CONTENT_HOST,
|
||||||
|
API_NOTIFICATION_HOST,
|
||||||
|
HOST_API,
|
||||||
|
HOST_CONTENT,
|
||||||
|
HOST_NOTIFY,
|
||||||
|
pinned_session,
|
||||||
|
)
|
||||||
|
|
||||||
|
PATH_ROOT_HEADER = 'Dropbox-API-Path-Root'
|
||||||
|
HTTP_STATUS_INVALID_PATH_ROOT = 422
|
||||||
|
|
||||||
|
class RouteResult(object):
|
||||||
|
"""The successful result of a call to a route."""
|
||||||
|
|
||||||
|
def __init__(self, obj_result, http_resp=None):
|
||||||
|
"""
|
||||||
|
:param str obj_result: The result of a route not including the binary
|
||||||
|
payload portion, if one exists. Must be serialized JSON.
|
||||||
|
:param requests.models.Response http_resp: A raw HTTP response. It will
|
||||||
|
be used to stream the binary-body payload of the response.
|
||||||
|
"""
|
||||||
|
assert isinstance(obj_result, six.string_types), \
|
||||||
|
'obj_result: expected string, got %r' % type(obj_result)
|
||||||
|
if http_resp is not None:
|
||||||
|
assert isinstance(http_resp, requests.models.Response), \
|
||||||
|
'http_resp: expected requests.models.Response, got %r' % \
|
||||||
|
type(http_resp)
|
||||||
|
self.obj_result = obj_result
|
||||||
|
self.http_resp = http_resp
|
||||||
|
|
||||||
|
class RouteErrorResult(object):
|
||||||
|
"""The error result of a call to a route."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, obj_result):
|
||||||
|
"""
|
||||||
|
:param str request_id: A request_id can be shared with Dropbox Support
|
||||||
|
to pinpoint the exact request that returns an error.
|
||||||
|
:param str obj_result: The result of a route not including the binary
|
||||||
|
payload portion, if one exists.
|
||||||
|
"""
|
||||||
|
self.request_id = request_id
|
||||||
|
self.obj_result = obj_result
|
||||||
|
|
||||||
|
def create_session(max_connections=8, proxies=None):
|
||||||
|
"""
|
||||||
|
Creates a session object that can be used by multiple :class:`Dropbox` and
|
||||||
|
:class:`DropboxTeam` instances. This lets you share a connection pool
|
||||||
|
amongst them, as well as proxy parameters.
|
||||||
|
|
||||||
|
:param int max_connections: Maximum connection pool size.
|
||||||
|
:param dict proxies: See the `requests module
|
||||||
|
<http://docs.python-requests.org/en/latest/user/advanced/#proxies>`_
|
||||||
|
for more details.
|
||||||
|
:rtype: :class:`requests.sessions.Session`. `See the requests module
|
||||||
|
<http://docs.python-requests.org/en/latest/user/advanced/#session-objects>`_
|
||||||
|
for more details.
|
||||||
|
"""
|
||||||
|
# We only need as many pool_connections as we have unique hostnames.
|
||||||
|
session = pinned_session(pool_maxsize=max_connections)
|
||||||
|
if proxies:
|
||||||
|
session.proxies = proxies
|
||||||
|
return session
|
||||||
|
|
||||||
|
class _DropboxTransport(object):
|
||||||
|
"""
|
||||||
|
Responsible for implementing the wire protocol for making requests to the
|
||||||
|
Dropbox API.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_API_VERSION = '2'
|
||||||
|
|
||||||
|
# Download style means that the route argument goes in a Dropbox-API-Arg
|
||||||
|
# header, and the result comes back in a Dropbox-API-Result header. The
|
||||||
|
# HTTP response body contains a binary payload.
|
||||||
|
_ROUTE_STYLE_DOWNLOAD = 'download'
|
||||||
|
|
||||||
|
# Upload style means that the route argument goes in a Dropbox-API-Arg
|
||||||
|
# header. The HTTP request body contains a binary payload. The result
|
||||||
|
# comes back in a Dropbox-API-Result header.
|
||||||
|
_ROUTE_STYLE_UPLOAD = 'upload'
|
||||||
|
|
||||||
|
# RPC style means that the argument and result of a route are contained in
|
||||||
|
# the HTTP body.
|
||||||
|
_ROUTE_STYLE_RPC = 'rpc'
|
||||||
|
|
||||||
|
# This is the default longest time we'll block on receiving data from the server
|
||||||
|
_DEFAULT_TIMEOUT = 30
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
oauth2_access_token,
|
||||||
|
max_retries_on_error=4,
|
||||||
|
max_retries_on_rate_limit=None,
|
||||||
|
user_agent=None,
|
||||||
|
session=None,
|
||||||
|
headers=None,
|
||||||
|
timeout=_DEFAULT_TIMEOUT):
|
||||||
|
"""
|
||||||
|
:param str oauth2_access_token: OAuth2 access token for making client
|
||||||
|
requests.
|
||||||
|
|
||||||
|
:param int max_retries_on_error: On 5xx errors, the number of times to
|
||||||
|
retry.
|
||||||
|
:param Optional[int] max_retries_on_rate_limit: On 429 errors, the
|
||||||
|
number of times to retry. If `None`, always retries.
|
||||||
|
:param str user_agent: The user agent to use when making requests. This
|
||||||
|
helps us identify requests coming from your application. We
|
||||||
|
recommend you use the format "AppName/Version". If set, we append
|
||||||
|
"/OfficialDropboxPythonSDKv2/__version__" to the user_agent,
|
||||||
|
:param session: If not provided, a new session (connection pool) is
|
||||||
|
created. To share a session across multiple clients, use
|
||||||
|
:func:`create_session`.
|
||||||
|
:type session: :class:`requests.sessions.Session`
|
||||||
|
:param dict headers: Additional headers to add to requests.
|
||||||
|
:param Optional[float] timeout: Maximum duration in seconds that
|
||||||
|
client will wait for any single packet from the
|
||||||
|
server. After the timeout the client will give up on
|
||||||
|
connection. If `None`, client will wait forever. Defaults
|
||||||
|
to 30 seconds.
|
||||||
|
"""
|
||||||
|
assert len(oauth2_access_token) > 0, \
|
||||||
|
'OAuth2 access token cannot be empty.'
|
||||||
|
assert headers is None or isinstance(headers, dict), \
|
||||||
|
'Expected dict, got %r' % headers
|
||||||
|
self._oauth2_access_token = oauth2_access_token
|
||||||
|
|
||||||
|
self._max_retries_on_error = max_retries_on_error
|
||||||
|
self._max_retries_on_rate_limit = max_retries_on_rate_limit
|
||||||
|
if session:
|
||||||
|
assert isinstance(session, requests.sessions.Session), \
|
||||||
|
'Expected requests.sessions.Session, got %r' % session
|
||||||
|
self._session = session
|
||||||
|
else:
|
||||||
|
self._session = create_session()
|
||||||
|
self._headers = headers
|
||||||
|
|
||||||
|
base_user_agent = 'OfficialDropboxPythonSDKv2/' + __version__
|
||||||
|
if user_agent:
|
||||||
|
self._raw_user_agent = user_agent
|
||||||
|
self._user_agent = '{}/{}'.format(user_agent, base_user_agent)
|
||||||
|
else:
|
||||||
|
self._raw_user_agent = None
|
||||||
|
self._user_agent = base_user_agent
|
||||||
|
|
||||||
|
self._logger = logging.getLogger('dropbox')
|
||||||
|
|
||||||
|
self._host_map = {HOST_API: API_HOST,
|
||||||
|
HOST_CONTENT: API_CONTENT_HOST,
|
||||||
|
HOST_NOTIFY: API_NOTIFICATION_HOST}
|
||||||
|
|
||||||
|
self._timeout = timeout
|
||||||
|
|
||||||
|
def clone(
|
||||||
|
self,
|
||||||
|
oauth2_access_token=None,
|
||||||
|
max_retries_on_error=None,
|
||||||
|
max_retries_on_rate_limit=None,
|
||||||
|
user_agent=None,
|
||||||
|
session=None,
|
||||||
|
headers=None,
|
||||||
|
timeout=None):
|
||||||
|
"""
|
||||||
|
Creates a new copy of the Dropbox client with the same defaults unless modified by
|
||||||
|
arguments to clone()
|
||||||
|
|
||||||
|
See constructor for original parameter descriptions.
|
||||||
|
|
||||||
|
:return: New instance of Dropbox clent
|
||||||
|
:rtype: Dropbox
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.__class__(
|
||||||
|
oauth2_access_token or self._oauth2_access_token,
|
||||||
|
max_retries_on_error or self._max_retries_on_error,
|
||||||
|
max_retries_on_rate_limit or self._max_retries_on_rate_limit,
|
||||||
|
user_agent or self._user_agent,
|
||||||
|
session or self._session,
|
||||||
|
headers or self._headers,
|
||||||
|
timeout or self._timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
def request(self,
|
||||||
|
route,
|
||||||
|
namespace,
|
||||||
|
request_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=None):
|
||||||
|
"""
|
||||||
|
Makes a request to the Dropbox API and in the process validates that
|
||||||
|
the route argument and result are the expected data types. The
|
||||||
|
request_arg is converted to JSON based on the arg_data_type. Likewise,
|
||||||
|
the response is deserialized from JSON and converted to an object based
|
||||||
|
on the {result,error}_data_type.
|
||||||
|
|
||||||
|
:param host: The Dropbox API host to connect to.
|
||||||
|
:param route: The route to make the request to.
|
||||||
|
:type route: :class:`.datatypes.stone_base.Route`
|
||||||
|
:param request_arg: Argument for the route that conforms to the
|
||||||
|
validator specified by route.arg_type.
|
||||||
|
:param request_binary: String or file pointer representing the binary
|
||||||
|
payload. Use None if there is no binary payload.
|
||||||
|
:param Optional[float] timeout: Maximum duration in seconds
|
||||||
|
that client will wait for any single packet from the
|
||||||
|
server. After the timeout the client will give up on
|
||||||
|
connection. If `None`, will use default timeout set on
|
||||||
|
Dropbox object. Defaults to `None`.
|
||||||
|
:return: The route's result.
|
||||||
|
"""
|
||||||
|
host = route.attrs['host'] or 'api'
|
||||||
|
route_name = namespace + '/' + route.name
|
||||||
|
if route.version > 1:
|
||||||
|
route_name += '_v{}'.format(route.version)
|
||||||
|
route_style = route.attrs['style'] or 'rpc'
|
||||||
|
serialized_arg = stone_serializers.json_encode(route.arg_type,
|
||||||
|
request_arg)
|
||||||
|
|
||||||
|
if (timeout is None and
|
||||||
|
route == files.list_folder_longpoll):
|
||||||
|
# The client normally sends a timeout value to the
|
||||||
|
# longpoll route. The server will respond after
|
||||||
|
# <timeout> + random(0, 90) seconds. We increase the
|
||||||
|
# socket timeout to the longpoll timeout value plus 90
|
||||||
|
# seconds so that we don't cut the server response short
|
||||||
|
# due to a shorter socket timeout.
|
||||||
|
# NB: This is done here because base.py is auto-generated
|
||||||
|
timeout = request_arg.timeout + 90
|
||||||
|
|
||||||
|
res = self.request_json_string_with_retry(host,
|
||||||
|
route_name,
|
||||||
|
route_style,
|
||||||
|
serialized_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=timeout)
|
||||||
|
decoded_obj_result = json.loads(res.obj_result)
|
||||||
|
if isinstance(res, RouteResult):
|
||||||
|
returned_data_type = route.result_type
|
||||||
|
obj = decoded_obj_result
|
||||||
|
elif isinstance(res, RouteErrorResult):
|
||||||
|
returned_data_type = route.error_type
|
||||||
|
obj = decoded_obj_result['error']
|
||||||
|
user_message = decoded_obj_result.get('user_message')
|
||||||
|
user_message_text = user_message and user_message.get('text')
|
||||||
|
user_message_locale = user_message and user_message.get('locale')
|
||||||
|
else:
|
||||||
|
raise AssertionError('Expected RouteResult or RouteErrorResult, '
|
||||||
|
'but res is %s' % type(res))
|
||||||
|
|
||||||
|
deserialized_result = stone_serializers.json_compat_obj_decode(
|
||||||
|
returned_data_type, obj, strict=False)
|
||||||
|
|
||||||
|
if isinstance(res, RouteErrorResult):
|
||||||
|
raise ApiError(res.request_id,
|
||||||
|
deserialized_result,
|
||||||
|
user_message_text,
|
||||||
|
user_message_locale)
|
||||||
|
elif route_style == self._ROUTE_STYLE_DOWNLOAD:
|
||||||
|
return (deserialized_result, res.http_resp)
|
||||||
|
else:
|
||||||
|
return deserialized_result
|
||||||
|
|
||||||
|
def request_json_object(self,
|
||||||
|
host,
|
||||||
|
route_name,
|
||||||
|
route_style,
|
||||||
|
request_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=None):
|
||||||
|
"""
|
||||||
|
Makes a request to the Dropbox API, taking a JSON-serializable Python
|
||||||
|
object as an argument, and returning one as a response.
|
||||||
|
|
||||||
|
:param host: The Dropbox API host to connect to.
|
||||||
|
:param route_name: The name of the route to invoke.
|
||||||
|
:param route_style: The style of the route.
|
||||||
|
:param str request_arg: A JSON-serializable Python object representing
|
||||||
|
the argument for the route.
|
||||||
|
:param Optional[bytes] request_binary: Bytes representing the binary
|
||||||
|
payload. Use None if there is no binary payload.
|
||||||
|
:param Optional[float] timeout: Maximum duration in seconds
|
||||||
|
that client will wait for any single packet from the
|
||||||
|
server. After the timeout the client will give up on
|
||||||
|
connection. If `None`, will use default timeout set on
|
||||||
|
Dropbox object. Defaults to `None`.
|
||||||
|
:return: The route's result as a JSON-serializable Python object.
|
||||||
|
"""
|
||||||
|
serialized_arg = json.dumps(request_arg)
|
||||||
|
res = self.request_json_string_with_retry(host,
|
||||||
|
route_name,
|
||||||
|
route_style,
|
||||||
|
serialized_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=timeout)
|
||||||
|
# This can throw a ValueError if the result is not deserializable,
|
||||||
|
# but that would be completely unexpected.
|
||||||
|
deserialized_result = json.loads(res.obj_result)
|
||||||
|
if isinstance(res, RouteResult) and res.http_resp is not None:
|
||||||
|
return (deserialized_result, res.http_resp)
|
||||||
|
else:
|
||||||
|
return deserialized_result
|
||||||
|
|
||||||
|
def request_json_string_with_retry(self,
|
||||||
|
host,
|
||||||
|
route_name,
|
||||||
|
route_style,
|
||||||
|
request_json_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=None):
|
||||||
|
"""
|
||||||
|
See :meth:`request_json_object` for description of parameters.
|
||||||
|
|
||||||
|
:param request_json_arg: A string representing the serialized JSON
|
||||||
|
argument to the route.
|
||||||
|
"""
|
||||||
|
attempt = 0
|
||||||
|
rate_limit_errors = 0
|
||||||
|
while True:
|
||||||
|
self._logger.info('Request to %s', route_name)
|
||||||
|
try:
|
||||||
|
return self.request_json_string(host,
|
||||||
|
route_name,
|
||||||
|
route_style,
|
||||||
|
request_json_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=timeout)
|
||||||
|
except InternalServerError as e:
|
||||||
|
attempt += 1
|
||||||
|
if attempt <= self._max_retries_on_error:
|
||||||
|
# Use exponential backoff
|
||||||
|
backoff = 2**attempt * random.random()
|
||||||
|
self._logger.info(
|
||||||
|
'HttpError status_code=%s: Retrying in %.1f seconds',
|
||||||
|
e.status_code, backoff)
|
||||||
|
time.sleep(backoff)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
except RateLimitError as e:
|
||||||
|
rate_limit_errors += 1
|
||||||
|
if (self._max_retries_on_rate_limit is None or
|
||||||
|
self._max_retries_on_rate_limit >= rate_limit_errors):
|
||||||
|
# Set default backoff to 5 seconds.
|
||||||
|
backoff = e.backoff if e.backoff is not None else 5.0
|
||||||
|
self._logger.info(
|
||||||
|
'Ratelimit: Retrying in %.1f seconds.', backoff)
|
||||||
|
time.sleep(backoff)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def request_json_string(self,
|
||||||
|
host,
|
||||||
|
func_name,
|
||||||
|
route_style,
|
||||||
|
request_json_arg,
|
||||||
|
request_binary,
|
||||||
|
timeout=None):
|
||||||
|
"""
|
||||||
|
See :meth:`request_json_string_with_retry` for description of
|
||||||
|
parameters.
|
||||||
|
"""
|
||||||
|
if host not in self._host_map:
|
||||||
|
raise ValueError('Unknown value for host: %r' % host)
|
||||||
|
|
||||||
|
if not isinstance(request_binary, (six.binary_type, type(None))):
|
||||||
|
# Disallow streams and file-like objects even though the underlying
|
||||||
|
# requests library supports them. This is to prevent incorrect
|
||||||
|
# behavior when a non-rewindable stream is read from, but the
|
||||||
|
# request fails and needs to be re-tried at a later time.
|
||||||
|
raise TypeError('expected request_binary as binary type, got %s' %
|
||||||
|
type(request_binary))
|
||||||
|
|
||||||
|
# Fully qualified hostname
|
||||||
|
fq_hostname = self._host_map[host]
|
||||||
|
url = self._get_route_url(fq_hostname, func_name)
|
||||||
|
|
||||||
|
headers = {'User-Agent': self._user_agent}
|
||||||
|
if host != HOST_NOTIFY:
|
||||||
|
headers['Authorization'] = 'Bearer %s' % self._oauth2_access_token
|
||||||
|
if self._headers:
|
||||||
|
headers.update(self._headers)
|
||||||
|
|
||||||
|
# The contents of the body of the HTTP request
|
||||||
|
body = None
|
||||||
|
# Whether the response should be streamed incrementally, or buffered
|
||||||
|
# entirely. If stream is True, the caller is responsible for closing
|
||||||
|
# the HTTP response.
|
||||||
|
stream = False
|
||||||
|
|
||||||
|
if route_style == self._ROUTE_STYLE_RPC:
|
||||||
|
headers['Content-Type'] = 'application/json'
|
||||||
|
body = request_json_arg
|
||||||
|
elif route_style == self._ROUTE_STYLE_DOWNLOAD:
|
||||||
|
headers['Dropbox-API-Arg'] = request_json_arg
|
||||||
|
stream = True
|
||||||
|
elif route_style == self._ROUTE_STYLE_UPLOAD:
|
||||||
|
headers['Content-Type'] = 'application/octet-stream'
|
||||||
|
headers['Dropbox-API-Arg'] = request_json_arg
|
||||||
|
body = request_binary
|
||||||
|
else:
|
||||||
|
raise ValueError('Unknown operation style: %r' % route_style)
|
||||||
|
|
||||||
|
if timeout is None:
|
||||||
|
timeout = self._timeout
|
||||||
|
|
||||||
|
r = self._session.post(url,
|
||||||
|
headers=headers,
|
||||||
|
data=body,
|
||||||
|
stream=stream,
|
||||||
|
verify=True,
|
||||||
|
timeout=timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
request_id = r.headers.get('x-dropbox-request-id')
|
||||||
|
if r.status_code >= 500:
|
||||||
|
raise InternalServerError(request_id, r.status_code, r.text)
|
||||||
|
elif r.status_code == 400:
|
||||||
|
raise BadInputError(request_id, r.text)
|
||||||
|
elif r.status_code == 401:
|
||||||
|
assert r.headers.get('content-type') == 'application/json', (
|
||||||
|
'Expected content-type to be application/json, got %r' %
|
||||||
|
r.headers.get('content-type'))
|
||||||
|
err = stone_serializers.json_compat_obj_decode(
|
||||||
|
AuthError_validator, r.json()['error'])
|
||||||
|
raise AuthError(request_id, err)
|
||||||
|
elif r.status_code == HTTP_STATUS_INVALID_PATH_ROOT:
|
||||||
|
err = stone_serializers.json_compat_obj_decode(
|
||||||
|
PathRootError_validator, r.json()['error'])
|
||||||
|
raise PathRootError(request_id, err)
|
||||||
|
elif r.status_code == 429:
|
||||||
|
err = None
|
||||||
|
if r.headers.get('content-type') == 'application/json':
|
||||||
|
err = stone_serializers.json_compat_obj_decode(
|
||||||
|
RateLimitError_validator, r.json()['error'])
|
||||||
|
retry_after = err.retry_after
|
||||||
|
else:
|
||||||
|
retry_after_str = r.headers.get('retry-after')
|
||||||
|
if retry_after_str is not None:
|
||||||
|
retry_after = int(retry_after_str)
|
||||||
|
else:
|
||||||
|
retry_after = None
|
||||||
|
raise RateLimitError(request_id, err, retry_after)
|
||||||
|
elif 200 <= r.status_code <= 299:
|
||||||
|
if route_style == self._ROUTE_STYLE_DOWNLOAD:
|
||||||
|
raw_resp = r.headers['dropbox-api-result']
|
||||||
|
else:
|
||||||
|
assert r.headers.get('content-type') == 'application/json', (
|
||||||
|
'Expected content-type to be application/json, got %r' %
|
||||||
|
r.headers.get('content-type'))
|
||||||
|
raw_resp = r.content.decode('utf-8')
|
||||||
|
if route_style == self._ROUTE_STYLE_DOWNLOAD:
|
||||||
|
return RouteResult(raw_resp, r)
|
||||||
|
else:
|
||||||
|
return RouteResult(raw_resp)
|
||||||
|
elif r.status_code in (403, 404, 409):
|
||||||
|
raw_resp = r.content.decode('utf-8')
|
||||||
|
return RouteErrorResult(request_id, raw_resp)
|
||||||
|
else:
|
||||||
|
raise HttpError(request_id, r.status_code, r.text)
|
||||||
|
|
||||||
|
def _get_route_url(self, hostname, route_name):
|
||||||
|
"""Returns the URL of the route.
|
||||||
|
|
||||||
|
:param str hostname: Hostname to make the request to.
|
||||||
|
:param str route_name: Name of the route.
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
return 'https://{hostname}/{version}/{route_name}'.format(
|
||||||
|
hostname=hostname,
|
||||||
|
version=Dropbox._API_VERSION,
|
||||||
|
route_name=route_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _save_body_to_file(self, download_path, http_resp, chunksize=2**16):
|
||||||
|
"""
|
||||||
|
Saves the body of an HTTP response to a file.
|
||||||
|
|
||||||
|
:param str download_path: Local path to save data to.
|
||||||
|
:param http_resp: The HTTP response whose body will be saved.
|
||||||
|
:type http_resp: :class:`requests.models.Response`
|
||||||
|
:rtype: None
|
||||||
|
"""
|
||||||
|
with open(download_path, 'wb') as f:
|
||||||
|
with contextlib.closing(http_resp):
|
||||||
|
for c in http_resp.iter_content(chunksize):
|
||||||
|
f.write(c)
|
||||||
|
|
||||||
|
def with_path_root(self, path_root):
|
||||||
|
"""
|
||||||
|
Creates a clone of the Dropbox instance with the Dropbox-API-Path-Root header
|
||||||
|
as the appropriate serialized instance of PathRoot.
|
||||||
|
|
||||||
|
For more information, see
|
||||||
|
https://www.dropbox.com/developers/reference/namespace-guide#pathrootmodes
|
||||||
|
|
||||||
|
:param PathRoot path_root: instance of PathRoot to serialize into the headers field
|
||||||
|
:return: A :class: `Dropbox`
|
||||||
|
:rtype: Dropbox
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not isinstance(path_root, PathRoot):
|
||||||
|
raise ValueError("path_root must be an instance of PathRoot")
|
||||||
|
|
||||||
|
return self.clone(
|
||||||
|
headers={
|
||||||
|
PATH_ROOT_HEADER: stone_serializers.json_encode(PathRoot_validator, path_root)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
class Dropbox(_DropboxTransport, DropboxBase):
|
||||||
|
"""
|
||||||
|
Use this class to make requests to the Dropbox API using a user's access
|
||||||
|
token. Methods of this class are meant to act on the corresponding user's
|
||||||
|
Dropbox.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class DropboxTeam(_DropboxTransport, DropboxTeamBase):
|
||||||
|
"""
|
||||||
|
Use this class to make requests to the Dropbox API using a team's access
|
||||||
|
token. Methods of this class are meant to act on the team, but there is
|
||||||
|
also an :meth:`as_user` method for assuming a team member's identity.
|
||||||
|
"""
|
||||||
|
def as_admin(self, team_member_id):
|
||||||
|
"""
|
||||||
|
Allows a team credential to assume the identity of an administrator on the team
|
||||||
|
and perform operations on any team-owned content.
|
||||||
|
|
||||||
|
:param str team_member_id: team member id of administrator to perform actions with
|
||||||
|
:return: A :class:`Dropbox` object that can be used to query on behalf
|
||||||
|
of this admin of the team.
|
||||||
|
:rtype: Dropbox
|
||||||
|
"""
|
||||||
|
return self._get_dropbox_client_with_select_header('Dropbox-API-Select-Admin',
|
||||||
|
team_member_id)
|
||||||
|
|
||||||
|
def as_user(self, team_member_id):
|
||||||
|
"""
|
||||||
|
Allows a team credential to assume the identity of a member of the
|
||||||
|
team.
|
||||||
|
|
||||||
|
:param str team_member_id: team member id of team member to perform actions with
|
||||||
|
:return: A :class:`Dropbox` object that can be used to query on behalf
|
||||||
|
of this member of the team.
|
||||||
|
:rtype: Dropbox
|
||||||
|
"""
|
||||||
|
return self._get_dropbox_client_with_select_header('Dropbox-API-Select-User',
|
||||||
|
team_member_id)
|
||||||
|
|
||||||
|
def _get_dropbox_client_with_select_header(self, select_header_name, team_member_id):
|
||||||
|
"""
|
||||||
|
Get Dropbox client with modified headers
|
||||||
|
|
||||||
|
:param str select_header_name: Header name used to select users
|
||||||
|
:param str team_member_id: team member id of team member to perform actions with
|
||||||
|
:return: A :class:`Dropbox` object that can be used to query on behalf
|
||||||
|
of a member or admin of the team
|
||||||
|
:rtype: Dropbox
|
||||||
|
"""
|
||||||
|
|
||||||
|
new_headers = self._headers.copy() if self._headers else {}
|
||||||
|
new_headers[select_header_name] = team_member_id
|
||||||
|
return Dropbox(
|
||||||
|
self._oauth2_access_token,
|
||||||
|
max_retries_on_error=self._max_retries_on_error,
|
||||||
|
max_retries_on_rate_limit=self._max_retries_on_rate_limit,
|
||||||
|
timeout=self._timeout,
|
||||||
|
user_agent=self._raw_user_agent,
|
||||||
|
session=self._session,
|
||||||
|
headers=new_headers,
|
||||||
|
)
|
||||||
100
resources/lib/dropbox/exceptions.py
Normal file
100
resources/lib/dropbox/exceptions.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
class DropboxException(Exception):
|
||||||
|
"""All errors related to making an API request extend this."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, *args, **kwargs):
|
||||||
|
# A request_id can be shared with Dropbox Support to pinpoint the exact
|
||||||
|
# request that returns an error.
|
||||||
|
super(DropboxException, self).__init__(request_id, *args, **kwargs)
|
||||||
|
self.request_id = request_id
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return repr(self)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiError(DropboxException):
|
||||||
|
"""Errors produced by the Dropbox API."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, error, user_message_text, user_message_locale):
|
||||||
|
"""
|
||||||
|
:param (str) request_id: A request_id can be shared with Dropbox
|
||||||
|
Support to pinpoint the exact request that returns an error.
|
||||||
|
:param error: An instance of the error data type for the route.
|
||||||
|
:param (str) user_message_text: A human-readable message that can be
|
||||||
|
displayed to the end user. Is None, if unavailable.
|
||||||
|
:param (str) user_message_locale: The locale of ``user_message_text``,
|
||||||
|
if present.
|
||||||
|
"""
|
||||||
|
super(ApiError, self).__init__(request_id, error)
|
||||||
|
self.error = error
|
||||||
|
self.user_message_text = user_message_text
|
||||||
|
self.user_message_locale = user_message_locale
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'ApiError({!r}, {})'.format(self.request_id, self.error)
|
||||||
|
|
||||||
|
|
||||||
|
class HttpError(DropboxException):
|
||||||
|
"""Errors produced at the HTTP layer."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, status_code, body):
|
||||||
|
super(HttpError, self).__init__(request_id, status_code, body)
|
||||||
|
self.status_code = status_code
|
||||||
|
self.body = body
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'HttpError({!r}, {}, {!r})'.format(self.request_id,
|
||||||
|
self.status_code, self.body)
|
||||||
|
|
||||||
|
|
||||||
|
class PathRootError(HttpError):
|
||||||
|
"""Error caused by an invalid path root."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, error=None):
|
||||||
|
super(PathRootError, self).__init__(request_id, 422, None)
|
||||||
|
self.error = error
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PathRootError({!r}, {!r})'.format(self.request_id, self.error)
|
||||||
|
|
||||||
|
|
||||||
|
class BadInputError(HttpError):
|
||||||
|
"""Errors due to bad input parameters to an API Operation."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, message):
|
||||||
|
super(BadInputError, self).__init__(request_id, 400, message)
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'BadInputError({!r}, {!r})'.format(self.request_id, self.message)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthError(HttpError):
|
||||||
|
"""Errors due to invalid authentication credentials."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, error):
|
||||||
|
super(AuthError, self).__init__(request_id, 401, None)
|
||||||
|
self.error = error
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'AuthError({!r}, {!r})'.format(self.request_id, self.error)
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitError(HttpError):
|
||||||
|
"""Error caused by rate limiting."""
|
||||||
|
|
||||||
|
def __init__(self, request_id, error=None, backoff=None):
|
||||||
|
super(RateLimitError, self).__init__(request_id, 429, None)
|
||||||
|
self.error = error
|
||||||
|
self.backoff = backoff
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'RateLimitError({!r}, {!r}, {!r})'.format(
|
||||||
|
self.request_id, self.error, self.backoff)
|
||||||
|
|
||||||
|
|
||||||
|
class InternalServerError(HttpError):
|
||||||
|
"""Errors due to a problem on Dropbox."""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'InternalServerError({!r}, {}, {!r})'.format(
|
||||||
|
self.request_id, self.status_code, self.body)
|
||||||
3433
resources/lib/dropbox/file_properties.py
Normal file
3433
resources/lib/dropbox/file_properties.py
Normal file
File diff suppressed because it is too large
Load Diff
2172
resources/lib/dropbox/file_requests.py
Normal file
2172
resources/lib/dropbox/file_requests.py
Normal file
File diff suppressed because it is too large
Load Diff
14449
resources/lib/dropbox/files.py
Normal file
14449
resources/lib/dropbox/files.py
Normal file
File diff suppressed because it is too large
Load Diff
515
resources/lib/dropbox/oauth.py
Normal file
515
resources/lib/dropbox/oauth.py
Normal file
@@ -0,0 +1,515 @@
|
|||||||
|
__all__ = [
|
||||||
|
'BadRequestException',
|
||||||
|
'BadStateException',
|
||||||
|
'CsrfException',
|
||||||
|
'DropboxOAuth2Flow',
|
||||||
|
'DropboxOAuth2FlowNoRedirect',
|
||||||
|
'NotApprovedException',
|
||||||
|
'OAuth2FlowNoRedirectResult',
|
||||||
|
'OAuth2FlowResult',
|
||||||
|
'ProviderException',
|
||||||
|
]
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import six
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
from .session import (
|
||||||
|
API_HOST,
|
||||||
|
WEB_HOST,
|
||||||
|
pinned_session,
|
||||||
|
)
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
url_path_quote = urllib.parse.quote # pylint: disable=no-member,useless-suppression
|
||||||
|
url_encode = urllib.parse.urlencode # pylint: disable=no-member,useless-suppression
|
||||||
|
else:
|
||||||
|
url_path_quote = urllib.quote # pylint: disable=no-member,useless-suppression
|
||||||
|
url_encode = urllib.urlencode # pylint: disable=no-member,useless-suppression
|
||||||
|
|
||||||
|
|
||||||
|
class OAuth2FlowNoRedirectResult(object):
|
||||||
|
"""
|
||||||
|
Authorization information for an OAuth2Flow performed with no redirect.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, access_token, account_id, user_id):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
access_token (str): Token to be used to authenticate later
|
||||||
|
requests.
|
||||||
|
account_id (str): The Dropbox user's account ID.
|
||||||
|
user_id (str): Deprecated (use account_id instead).
|
||||||
|
"""
|
||||||
|
self.access_token = access_token
|
||||||
|
self.account_id = account_id
|
||||||
|
self.user_id = user_id
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'OAuth2FlowNoRedirectResult(%r, %r, %r)' % (
|
||||||
|
self.access_token,
|
||||||
|
self.account_id,
|
||||||
|
self.user_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OAuth2FlowResult(OAuth2FlowNoRedirectResult):
|
||||||
|
"""
|
||||||
|
Authorization information for an OAuth2Flow with redirect.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, access_token, account_id, user_id, url_state):
|
||||||
|
"""
|
||||||
|
Same as OAuth2FlowNoRedirectResult but with url_state.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url_state (str): The url state that was set by
|
||||||
|
:meth:`DropboxOAuth2Flow.start`.
|
||||||
|
"""
|
||||||
|
super(OAuth2FlowResult, self).__init__(
|
||||||
|
access_token, account_id, user_id)
|
||||||
|
self.url_state = url_state
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_no_redirect_result(cls, result, url_state):
|
||||||
|
assert isinstance(result, OAuth2FlowNoRedirectResult)
|
||||||
|
return cls(
|
||||||
|
result.access_token, result.account_id, result.user_id, url_state)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'OAuth2FlowResult(%r, %r, %r, %r)' % (
|
||||||
|
self.access_token,
|
||||||
|
self.account_id,
|
||||||
|
self.user_id,
|
||||||
|
self.url_state,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DropboxOAuth2FlowBase(object):
|
||||||
|
|
||||||
|
def __init__(self, consumer_key, consumer_secret, locale=None):
|
||||||
|
self.consumer_key = consumer_key
|
||||||
|
self.consumer_secret = consumer_secret
|
||||||
|
self.locale = locale
|
||||||
|
self.requests_session = pinned_session()
|
||||||
|
|
||||||
|
def _get_authorize_url(self, redirect_uri, state):
|
||||||
|
params = dict(response_type='code',
|
||||||
|
client_id=self.consumer_key)
|
||||||
|
if redirect_uri is not None:
|
||||||
|
params['redirect_uri'] = redirect_uri
|
||||||
|
if state is not None:
|
||||||
|
params['state'] = state
|
||||||
|
|
||||||
|
return self.build_url('/oauth2/authorize', params, WEB_HOST)
|
||||||
|
|
||||||
|
def _finish(self, code, redirect_uri):
|
||||||
|
url = self.build_url('/oauth2/token')
|
||||||
|
params = {'grant_type': 'authorization_code',
|
||||||
|
'code': code,
|
||||||
|
'client_id': self.consumer_key,
|
||||||
|
'client_secret': self.consumer_secret,
|
||||||
|
}
|
||||||
|
if self.locale is not None:
|
||||||
|
params['locale'] = self.locale
|
||||||
|
if redirect_uri is not None:
|
||||||
|
params['redirect_uri'] = redirect_uri
|
||||||
|
|
||||||
|
resp = self.requests_session.post(url, data=params)
|
||||||
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
d = resp.json()
|
||||||
|
|
||||||
|
if 'team_id' in d:
|
||||||
|
account_id = d['team_id']
|
||||||
|
else:
|
||||||
|
account_id = d['account_id']
|
||||||
|
|
||||||
|
access_token = d['access_token']
|
||||||
|
uid = d['uid']
|
||||||
|
|
||||||
|
return OAuth2FlowNoRedirectResult(
|
||||||
|
access_token,
|
||||||
|
account_id,
|
||||||
|
uid)
|
||||||
|
|
||||||
|
def build_path(self, target, params=None):
|
||||||
|
"""Build the path component for an API URL.
|
||||||
|
|
||||||
|
This method urlencodes the parameters, adds them
|
||||||
|
to the end of the target url, and puts a marker for the API
|
||||||
|
version in front.
|
||||||
|
|
||||||
|
:param str target: A target url (e.g. '/files') to build upon.
|
||||||
|
:param dict params: Optional dictionary of parameters (name to value).
|
||||||
|
:return: The path and parameters components of an API URL.
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if six.PY2 and isinstance(target, six.text_type):
|
||||||
|
target = target.encode('utf8')
|
||||||
|
|
||||||
|
target_path = url_path_quote(target)
|
||||||
|
|
||||||
|
params = params or {}
|
||||||
|
params = params.copy()
|
||||||
|
|
||||||
|
if self.locale:
|
||||||
|
params['locale'] = self.locale
|
||||||
|
|
||||||
|
if params:
|
||||||
|
query_string = _params_to_urlencoded(params)
|
||||||
|
return "%s?%s" % (target_path, query_string)
|
||||||
|
else:
|
||||||
|
return target_path
|
||||||
|
|
||||||
|
def build_url(self, target, params=None, host=API_HOST):
|
||||||
|
"""Build an API URL.
|
||||||
|
|
||||||
|
This method adds scheme and hostname to the path
|
||||||
|
returned from build_path.
|
||||||
|
|
||||||
|
:param str target: A target url (e.g. '/files') to build upon.
|
||||||
|
:param dict params: Optional dictionary of parameters (name to value).
|
||||||
|
:return: The full API URL.
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
return "https://%s%s" % (host, self.build_path(target, params))
|
||||||
|
|
||||||
|
|
||||||
|
class DropboxOAuth2FlowNoRedirect(DropboxOAuth2FlowBase):
|
||||||
|
"""
|
||||||
|
OAuth 2 authorization helper for apps that can't provide a redirect URI
|
||||||
|
(such as the command-line example apps).
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
from dropbox import DropboxOAuth2FlowNoRedirect
|
||||||
|
|
||||||
|
auth_flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET)
|
||||||
|
|
||||||
|
authorize_url = auth_flow.start()
|
||||||
|
print "1. Go to: " + authorize_url
|
||||||
|
print "2. Click \\"Allow\\" (you might have to log in first)."
|
||||||
|
print "3. Copy the authorization code."
|
||||||
|
auth_code = raw_input("Enter the authorization code here: ").strip()
|
||||||
|
|
||||||
|
try:
|
||||||
|
oauth_result = auth_flow.finish(auth_code)
|
||||||
|
except Exception, e:
|
||||||
|
print('Error: %s' % (e,))
|
||||||
|
return
|
||||||
|
|
||||||
|
dbx = Dropbox(oauth_result.access_token)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, consumer_key, consumer_secret, locale=None): # noqa: E501; pylint: disable=useless-super-delegation
|
||||||
|
"""
|
||||||
|
Construct an instance.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
:param str consumer_key: Your API app's "app key".
|
||||||
|
:param str consumer_secret: Your API app's "app secret".
|
||||||
|
:param str locale: The locale of the user of your application. For
|
||||||
|
example "en" or "en_US". Some API calls return localized data and
|
||||||
|
error messages; this setting tells the server which locale to use.
|
||||||
|
By default, the server uses "en_US".
|
||||||
|
"""
|
||||||
|
# pylint: disable=useless-super-delegation
|
||||||
|
super(DropboxOAuth2FlowNoRedirect, self).__init__(
|
||||||
|
consumer_key,
|
||||||
|
consumer_secret,
|
||||||
|
locale,
|
||||||
|
)
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""
|
||||||
|
Starts the OAuth 2 authorization process.
|
||||||
|
|
||||||
|
:return: The URL for a page on Dropbox's website. This page will let
|
||||||
|
the user "approve" your app, which gives your app permission to
|
||||||
|
access the user's Dropbox account. Tell the user to visit this URL
|
||||||
|
and approve your app.
|
||||||
|
"""
|
||||||
|
return self._get_authorize_url(None, None)
|
||||||
|
|
||||||
|
def finish(self, code):
|
||||||
|
"""
|
||||||
|
If the user approves your app, they will be presented with an
|
||||||
|
"authorization code". Have the user copy/paste that authorization code
|
||||||
|
into your app and then call this method to get an access token.
|
||||||
|
|
||||||
|
:param str code: The authorization code shown to the user when they
|
||||||
|
approved your app.
|
||||||
|
:rtype: OAuth2FlowNoRedirectResult
|
||||||
|
:raises: The same exceptions as :meth:`DropboxOAuth2Flow.finish()`.
|
||||||
|
"""
|
||||||
|
return self._finish(code, None)
|
||||||
|
|
||||||
|
|
||||||
|
class DropboxOAuth2Flow(DropboxOAuth2FlowBase):
|
||||||
|
"""
|
||||||
|
OAuth 2 authorization helper. Use this for web apps.
|
||||||
|
|
||||||
|
OAuth 2 has a two-step authorization process. The first step is having the
|
||||||
|
user authorize your app. The second involves getting an OAuth 2 access
|
||||||
|
token from Dropbox.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
from dropbox import DropboxOAuth2Flow
|
||||||
|
|
||||||
|
def get_dropbox_auth_flow(web_app_session):
|
||||||
|
redirect_uri = "https://my-web-server.org/dropbox-auth-finish"
|
||||||
|
return DropboxOAuth2Flow(
|
||||||
|
APP_KEY, APP_SECRET, redirect_uri, web_app_session,
|
||||||
|
"dropbox-auth-csrf-token")
|
||||||
|
|
||||||
|
# URL handler for /dropbox-auth-start
|
||||||
|
def dropbox_auth_start(web_app_session, request):
|
||||||
|
authorize_url = get_dropbox_auth_flow(web_app_session).start()
|
||||||
|
redirect_to(authorize_url)
|
||||||
|
|
||||||
|
# URL handler for /dropbox-auth-finish
|
||||||
|
def dropbox_auth_finish(web_app_session, request):
|
||||||
|
try:
|
||||||
|
oauth_result = \\
|
||||||
|
get_dropbox_auth_flow(web_app_session).finish(
|
||||||
|
request.query_params)
|
||||||
|
except BadRequestException, e:
|
||||||
|
http_status(400)
|
||||||
|
except BadStateException, e:
|
||||||
|
# Start the auth flow again.
|
||||||
|
redirect_to("/dropbox-auth-start")
|
||||||
|
except CsrfException, e:
|
||||||
|
http_status(403)
|
||||||
|
except NotApprovedException, e:
|
||||||
|
flash('Not approved? Why not?')
|
||||||
|
return redirect_to("/home")
|
||||||
|
except ProviderException, e:
|
||||||
|
logger.log("Auth error: %s" % (e,))
|
||||||
|
http_status(403)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, consumer_key, consumer_secret, redirect_uri, session,
|
||||||
|
csrf_token_session_key, locale=None):
|
||||||
|
"""
|
||||||
|
Construct an instance.
|
||||||
|
|
||||||
|
:param str consumer_key: Your API app's "app key".
|
||||||
|
:param str consumer_secret: Your API app's "app secret".
|
||||||
|
:param str redirect_uri: The URI that the Dropbox server will redirect
|
||||||
|
the user to after the user finishes authorizing your app. This URI
|
||||||
|
must be HTTPS-based and pre-registered with the Dropbox servers,
|
||||||
|
though localhost URIs are allowed without pre-registration and can
|
||||||
|
be either HTTP or HTTPS.
|
||||||
|
:param dict session: A dict-like object that represents the current
|
||||||
|
user's web session (will be used to save the CSRF token).
|
||||||
|
:param str csrf_token_session_key: The key to use when storing the CSRF
|
||||||
|
token in the session (for example: "dropbox-auth-csrf-token").
|
||||||
|
:param str locale: The locale of the user of your application. For
|
||||||
|
example "en" or "en_US". Some API calls return localized data and
|
||||||
|
error messages; this setting tells the server which locale to use.
|
||||||
|
By default, the server uses "en_US".
|
||||||
|
"""
|
||||||
|
super(DropboxOAuth2Flow, self).__init__(consumer_key, consumer_secret, locale)
|
||||||
|
self.redirect_uri = redirect_uri
|
||||||
|
self.session = session
|
||||||
|
self.csrf_token_session_key = csrf_token_session_key
|
||||||
|
|
||||||
|
def start(self, url_state=None):
|
||||||
|
"""
|
||||||
|
Starts the OAuth 2 authorization process.
|
||||||
|
|
||||||
|
This function builds an "authorization URL". You should redirect your
|
||||||
|
user's browser to this URL, which will give them an opportunity to
|
||||||
|
grant your app access to their Dropbox account. When the user
|
||||||
|
completes this process, they will be automatically redirected to the
|
||||||
|
``redirect_uri`` you passed in to the constructor.
|
||||||
|
|
||||||
|
This function will also save a CSRF token to
|
||||||
|
``session[csrf_token_session_key]`` (as provided to the constructor).
|
||||||
|
This CSRF token will be checked on :meth:`finish()` to prevent request
|
||||||
|
forgery.
|
||||||
|
|
||||||
|
:param str url_state: Any data that you would like to keep in the URL
|
||||||
|
through the authorization process. This exact value will be
|
||||||
|
returned to you by :meth:`finish()`.
|
||||||
|
:return: The URL for a page on Dropbox's website. This page will let
|
||||||
|
the user "approve" your app, which gives your app permission to
|
||||||
|
access the user's Dropbox account. Tell the user to visit this URL
|
||||||
|
and approve your app.
|
||||||
|
"""
|
||||||
|
csrf_token = base64.urlsafe_b64encode(os.urandom(16)).decode('ascii')
|
||||||
|
state = csrf_token
|
||||||
|
if url_state is not None:
|
||||||
|
state += "|" + url_state
|
||||||
|
self.session[self.csrf_token_session_key] = csrf_token
|
||||||
|
|
||||||
|
return self._get_authorize_url(self.redirect_uri, state)
|
||||||
|
|
||||||
|
def finish(self, query_params):
|
||||||
|
"""
|
||||||
|
Call this after the user has visited the authorize URL (see
|
||||||
|
:meth:`start()`), approved your app and was redirected to your redirect
|
||||||
|
URI.
|
||||||
|
|
||||||
|
:param dict query_params: The query parameters on the GET request to
|
||||||
|
your redirect URI.
|
||||||
|
:rtype: OAuth2FlowResult
|
||||||
|
:raises: :class:`BadRequestException` If the redirect URL was missing
|
||||||
|
parameters or if the given parameters were not valid.
|
||||||
|
:raises: :class:`BadStateException` If there's no CSRF token in the
|
||||||
|
session.
|
||||||
|
:raises: :class:`CsrfException` If the ``state`` query parameter
|
||||||
|
doesn't contain the CSRF token from the user's session.
|
||||||
|
:raises: :class:`NotApprovedException` If the user chose not to
|
||||||
|
approve your app.
|
||||||
|
:raises: :class:`ProviderException` If Dropbox redirected to your
|
||||||
|
redirect URI with some unexpected error identifier and error message.
|
||||||
|
"""
|
||||||
|
# Check well-formedness of request.
|
||||||
|
|
||||||
|
state = query_params.get('state')
|
||||||
|
if state is None:
|
||||||
|
raise BadRequestException("Missing query parameter 'state'.")
|
||||||
|
|
||||||
|
error = query_params.get('error')
|
||||||
|
error_description = query_params.get('error_description')
|
||||||
|
code = query_params.get('code')
|
||||||
|
|
||||||
|
if error is not None and code is not None:
|
||||||
|
raise BadRequestException(
|
||||||
|
"Query parameters 'code' and 'error' are both set; "
|
||||||
|
"only one must be set.")
|
||||||
|
if error is None and code is None:
|
||||||
|
raise BadRequestException(
|
||||||
|
"Neither query parameter 'code' or 'error' is set.")
|
||||||
|
|
||||||
|
# Check CSRF token
|
||||||
|
|
||||||
|
if self.csrf_token_session_key not in self.session:
|
||||||
|
raise BadStateException('Missing CSRF token in session.')
|
||||||
|
csrf_token_from_session = self.session[self.csrf_token_session_key]
|
||||||
|
if len(csrf_token_from_session) <= 20:
|
||||||
|
raise AssertionError('CSRF token unexpectedly short: %r' %
|
||||||
|
csrf_token_from_session)
|
||||||
|
|
||||||
|
split_pos = state.find('|')
|
||||||
|
if split_pos < 0:
|
||||||
|
given_csrf_token = state
|
||||||
|
url_state = None
|
||||||
|
else:
|
||||||
|
given_csrf_token = state[0:split_pos]
|
||||||
|
url_state = state[split_pos + 1:]
|
||||||
|
|
||||||
|
if not _safe_equals(csrf_token_from_session, given_csrf_token):
|
||||||
|
raise CsrfException('expected %r, got %r' %
|
||||||
|
(csrf_token_from_session, given_csrf_token))
|
||||||
|
|
||||||
|
del self.session[self.csrf_token_session_key]
|
||||||
|
|
||||||
|
# Check for error identifier
|
||||||
|
|
||||||
|
if error is not None:
|
||||||
|
if error == 'access_denied':
|
||||||
|
# The user clicked "Deny"
|
||||||
|
if error_description is None:
|
||||||
|
raise NotApprovedException(
|
||||||
|
'No additional description from Dropbox')
|
||||||
|
else:
|
||||||
|
raise NotApprovedException(
|
||||||
|
'Additional description from Dropbox: %s' %
|
||||||
|
error_description)
|
||||||
|
else:
|
||||||
|
# All other errors
|
||||||
|
full_message = error
|
||||||
|
if error_description is not None:
|
||||||
|
full_message += ": " + error_description
|
||||||
|
raise ProviderException(full_message)
|
||||||
|
|
||||||
|
# If everything went ok, make the network call to get an access token.
|
||||||
|
|
||||||
|
no_redirect_result = self._finish(code, self.redirect_uri)
|
||||||
|
return OAuth2FlowResult.from_no_redirect_result(
|
||||||
|
no_redirect_result, url_state)
|
||||||
|
|
||||||
|
|
||||||
|
class BadRequestException(Exception):
|
||||||
|
"""
|
||||||
|
Thrown if the redirect URL was missing parameters or if the
|
||||||
|
given parameters were not valid.
|
||||||
|
|
||||||
|
The recommended action is to show an HTTP 400 error page.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BadStateException(Exception):
|
||||||
|
"""
|
||||||
|
Thrown if all the parameters are correct, but there's no CSRF token in the
|
||||||
|
session. This probably means that the session expired.
|
||||||
|
|
||||||
|
The recommended action is to redirect the user's browser to try the
|
||||||
|
approval process again.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CsrfException(Exception):
|
||||||
|
"""
|
||||||
|
Thrown if the given 'state' parameter doesn't contain the CSRF token from
|
||||||
|
the user's session. This is blocked to prevent CSRF attacks.
|
||||||
|
|
||||||
|
The recommended action is to respond with an HTTP 403 error page.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotApprovedException(Exception):
|
||||||
|
"""
|
||||||
|
The user chose not to approve your app.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderException(Exception):
|
||||||
|
"""
|
||||||
|
Dropbox redirected to your redirect URI with some unexpected error
|
||||||
|
identifier and error message.
|
||||||
|
|
||||||
|
The recommended action is to log the error, tell the user something went
|
||||||
|
wrong, and let them try again.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_equals(a, b):
|
||||||
|
if len(a) != len(b):
|
||||||
|
return False
|
||||||
|
res = 0
|
||||||
|
for ca, cb in zip(a, b):
|
||||||
|
res |= ord(ca) ^ ord(cb)
|
||||||
|
return res == 0
|
||||||
|
|
||||||
|
|
||||||
|
def _params_to_urlencoded(params):
|
||||||
|
"""
|
||||||
|
Returns a application/x-www-form-urlencoded ``str`` representing the
|
||||||
|
key/value pairs in ``params``.
|
||||||
|
|
||||||
|
Keys are values are ``str()``'d before calling ``urllib.urlencode``, with
|
||||||
|
the exception of unicode objects which are utf8-encoded.
|
||||||
|
"""
|
||||||
|
def encode(o):
|
||||||
|
if isinstance(o, six.binary_type):
|
||||||
|
return o
|
||||||
|
else:
|
||||||
|
if isinstance(o, six.text_type):
|
||||||
|
return o.encode('utf-8')
|
||||||
|
else:
|
||||||
|
return str(o).encode('utf-8')
|
||||||
|
|
||||||
|
utf8_params = {encode(k): encode(v) for k, v in six.iteritems(params)}
|
||||||
|
return url_encode(utf8_params)
|
||||||
4575
resources/lib/dropbox/paper.py
Normal file
4575
resources/lib/dropbox/paper.py
Normal file
File diff suppressed because it is too large
Load Diff
2
resources/lib/dropbox/pkg_resources.py
Normal file
2
resources/lib/dropbox/pkg_resources.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
def resource_filename(package_or_requirement, resource_name):
|
||||||
|
return resource_name
|
||||||
158
resources/lib/dropbox/seen_state.py
Normal file
158
resources/lib/dropbox/seen_state.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
class PlatformType(bb.Union):
|
||||||
|
"""
|
||||||
|
Possible platforms on which a user may view content.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar seen_state.PlatformType.web: The content was viewed on the web.
|
||||||
|
:ivar seen_state.PlatformType.desktop: The content was viewed on a desktop
|
||||||
|
client.
|
||||||
|
:ivar seen_state.PlatformType.mobile_ios: The content was viewed on a mobile
|
||||||
|
iOS client.
|
||||||
|
:ivar seen_state.PlatformType.mobile_android: The content was viewed on a
|
||||||
|
mobile android client.
|
||||||
|
:ivar seen_state.PlatformType.api: The content was viewed from an API
|
||||||
|
client.
|
||||||
|
:ivar seen_state.PlatformType.unknown: The content was viewed on an unknown
|
||||||
|
platform.
|
||||||
|
:ivar seen_state.PlatformType.mobile: The content was viewed on a mobile
|
||||||
|
client. DEPRECATED: Use mobile_ios or mobile_android instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
web = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
desktop = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
mobile_ios = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
mobile_android = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
api = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
unknown = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
mobile = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_web(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``web``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'web'
|
||||||
|
|
||||||
|
def is_desktop(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``desktop``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'desktop'
|
||||||
|
|
||||||
|
def is_mobile_ios(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``mobile_ios``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'mobile_ios'
|
||||||
|
|
||||||
|
def is_mobile_android(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``mobile_android``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'mobile_android'
|
||||||
|
|
||||||
|
def is_api(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``api``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'api'
|
||||||
|
|
||||||
|
def is_unknown(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``unknown``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'unknown'
|
||||||
|
|
||||||
|
def is_mobile(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``mobile``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'mobile'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(PlatformType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PlatformType(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
PlatformType_validator = bv.Union(PlatformType)
|
||||||
|
|
||||||
|
PlatformType._web_validator = bv.Void()
|
||||||
|
PlatformType._desktop_validator = bv.Void()
|
||||||
|
PlatformType._mobile_ios_validator = bv.Void()
|
||||||
|
PlatformType._mobile_android_validator = bv.Void()
|
||||||
|
PlatformType._api_validator = bv.Void()
|
||||||
|
PlatformType._unknown_validator = bv.Void()
|
||||||
|
PlatformType._mobile_validator = bv.Void()
|
||||||
|
PlatformType._other_validator = bv.Void()
|
||||||
|
PlatformType._tagmap = {
|
||||||
|
'web': PlatformType._web_validator,
|
||||||
|
'desktop': PlatformType._desktop_validator,
|
||||||
|
'mobile_ios': PlatformType._mobile_ios_validator,
|
||||||
|
'mobile_android': PlatformType._mobile_android_validator,
|
||||||
|
'api': PlatformType._api_validator,
|
||||||
|
'unknown': PlatformType._unknown_validator,
|
||||||
|
'mobile': PlatformType._mobile_validator,
|
||||||
|
'other': PlatformType._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
PlatformType.web = PlatformType('web')
|
||||||
|
PlatformType.desktop = PlatformType('desktop')
|
||||||
|
PlatformType.mobile_ios = PlatformType('mobile_ios')
|
||||||
|
PlatformType.mobile_android = PlatformType('mobile_android')
|
||||||
|
PlatformType.api = PlatformType('api')
|
||||||
|
PlatformType.unknown = PlatformType('unknown')
|
||||||
|
PlatformType.mobile = PlatformType('mobile')
|
||||||
|
PlatformType.other = PlatformType('other')
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
}
|
||||||
|
|
||||||
51
resources/lib/dropbox/session.py
Normal file
51
resources/lib/dropbox/session.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
from . import pkg_resources
|
||||||
|
import os
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
from urllib3.poolmanager import PoolManager
|
||||||
|
|
||||||
|
API_DOMAIN = os.environ.get('DROPBOX_API_DOMAIN',
|
||||||
|
os.environ.get('DROPBOX_DOMAIN', '.dropboxapi.com'))
|
||||||
|
|
||||||
|
WEB_DOMAIN = os.environ.get('DROPBOX_WEB_DOMAIN',
|
||||||
|
os.environ.get('DROPBOX_DOMAIN', '.dropbox.com'))
|
||||||
|
|
||||||
|
# Default short hostname for RPC-style routes.
|
||||||
|
HOST_API = 'api'
|
||||||
|
|
||||||
|
# Default short hostname for upload and download-style routes.
|
||||||
|
HOST_CONTENT = 'content'
|
||||||
|
|
||||||
|
# Default short hostname for longpoll routes.
|
||||||
|
HOST_NOTIFY = 'notify'
|
||||||
|
|
||||||
|
# Default short hostname for the Drobox website.
|
||||||
|
HOST_WWW = 'www'
|
||||||
|
|
||||||
|
API_HOST = os.environ.get('DROPBOX_API_HOST', HOST_API + API_DOMAIN)
|
||||||
|
API_CONTENT_HOST = os.environ.get('DROPBOX_API_CONTENT_HOST', HOST_CONTENT + API_DOMAIN)
|
||||||
|
API_NOTIFICATION_HOST = os.environ.get('DROPBOX_API_NOTIFY_HOST', HOST_NOTIFY + API_DOMAIN)
|
||||||
|
WEB_HOST = os.environ.get('DROPBOX_WEB_HOST', HOST_WWW + WEB_DOMAIN)
|
||||||
|
|
||||||
|
_TRUSTED_CERT_FILE = pkg_resources.resource_filename(__name__, 'trusted-certs.crt')
|
||||||
|
|
||||||
|
# TODO(kelkabany): We probably only want to instantiate this once so that even
|
||||||
|
# if multiple Dropbox objects are instantiated, they all share the same pool.
|
||||||
|
class _SSLAdapter(HTTPAdapter):
|
||||||
|
def init_poolmanager(self, connections, maxsize, block=False, **_):
|
||||||
|
self.poolmanager = PoolManager(
|
||||||
|
num_pools=connections,
|
||||||
|
maxsize=maxsize,
|
||||||
|
block=block,
|
||||||
|
cert_reqs=ssl.CERT_REQUIRED,
|
||||||
|
ca_certs=_TRUSTED_CERT_FILE,
|
||||||
|
)
|
||||||
|
|
||||||
|
def pinned_session(pool_maxsize=8):
|
||||||
|
http_adapter = _SSLAdapter(pool_connections=4, pool_maxsize=pool_maxsize)
|
||||||
|
_session = requests.session()
|
||||||
|
_session.mount('https://', http_adapter)
|
||||||
|
|
||||||
|
return _session
|
||||||
19911
resources/lib/dropbox/sharing.py
Normal file
19911
resources/lib/dropbox/sharing.py
Normal file
File diff suppressed because it is too large
Load Diff
152
resources/lib/dropbox/stone_base.py
Normal file
152
resources/lib/dropbox/stone_base.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
"""
|
||||||
|
Helpers for representing Stone data types in Python.
|
||||||
|
|
||||||
|
This module should be dropped into a project that requires the use of Stone. In
|
||||||
|
the future, this could be imported from a pre-installed Python package, rather
|
||||||
|
than being added to a project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
|
import functools
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv # type: ignore
|
||||||
|
|
||||||
|
_MYPY = False
|
||||||
|
if _MYPY:
|
||||||
|
import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression
|
||||||
|
|
||||||
|
class AnnotationType(object):
|
||||||
|
# This is a base class for all annotation types.
|
||||||
|
pass
|
||||||
|
|
||||||
|
if _MYPY:
|
||||||
|
T = typing.TypeVar('T', bound=AnnotationType)
|
||||||
|
U = typing.TypeVar('U')
|
||||||
|
|
||||||
|
class Struct(object):
|
||||||
|
# This is a base class for all classes representing Stone structs.
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
# type: (typing.Type[T], typing.Text, typing.Callable[[T, U], U]) -> None
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Union(object):
|
||||||
|
# TODO(kelkabany): Possible optimization is to remove _value if a
|
||||||
|
# union is composed of only symbols.
|
||||||
|
__slots__ = ['_tag', '_value']
|
||||||
|
_tagmap = {} # type: typing.Dict[typing.Text, bv.Validator]
|
||||||
|
_permissioned_tagmaps = set() # type: typing.Set[typing.Text]
|
||||||
|
|
||||||
|
def __init__(self, tag, value=None):
|
||||||
|
validator = None
|
||||||
|
tagmap_names = ['_{}_tagmap'.format(map_name) for map_name in self._permissioned_tagmaps]
|
||||||
|
for tagmap_name in ['_tagmap'] + tagmap_names:
|
||||||
|
if tag in getattr(self, tagmap_name):
|
||||||
|
validator = getattr(self, tagmap_name)[tag]
|
||||||
|
assert validator is not None, 'Invalid tag %r.' % tag
|
||||||
|
if isinstance(validator, bv.Void):
|
||||||
|
assert value is None, 'Void type union member must have None value.'
|
||||||
|
elif isinstance(validator, (bv.Struct, bv.Union)):
|
||||||
|
validator.validate_type_only(value)
|
||||||
|
else:
|
||||||
|
validator.validate(value)
|
||||||
|
self._tag = tag
|
||||||
|
self._value = value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
# Also need to check if one class is a subclass of another. If one union extends another,
|
||||||
|
# the common fields should be able to be compared to each other.
|
||||||
|
return (
|
||||||
|
isinstance(other, Union) and
|
||||||
|
(isinstance(self, other.__class__) or isinstance(other, self.__class__)) and
|
||||||
|
self._tag == other._tag and self._value == other._value
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self._tag, self._value))
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
# type: (typing.Type[T], typing.Text, typing.Callable[[T, U], U]) -> None
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _is_tag_present(cls, tag, caller_permissions):
|
||||||
|
assert tag, 'tag value should not be None'
|
||||||
|
|
||||||
|
if tag in cls._tagmap:
|
||||||
|
return True
|
||||||
|
|
||||||
|
for extra_permission in caller_permissions.permissions:
|
||||||
|
tagmap_name = '_{}_tagmap'.format(extra_permission)
|
||||||
|
if hasattr(cls, tagmap_name) and tag in getattr(cls, tagmap_name):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_val_data_type(cls, tag, caller_permissions):
|
||||||
|
assert tag, 'tag value should not be None'
|
||||||
|
|
||||||
|
for extra_permission in caller_permissions.permissions:
|
||||||
|
tagmap_name = '_{}_tagmap'.format(extra_permission)
|
||||||
|
if hasattr(cls, tagmap_name) and tag in getattr(cls, tagmap_name):
|
||||||
|
return getattr(cls, tagmap_name)[tag]
|
||||||
|
|
||||||
|
return cls._tagmap[tag]
|
||||||
|
|
||||||
|
class Route(object):
|
||||||
|
|
||||||
|
def __init__(self, name, version, deprecated, arg_type, result_type, error_type, attrs):
|
||||||
|
self.name = name
|
||||||
|
self.version = version
|
||||||
|
self.deprecated = deprecated
|
||||||
|
self.arg_type = arg_type
|
||||||
|
self.result_type = result_type
|
||||||
|
self.error_type = error_type
|
||||||
|
assert isinstance(attrs, dict), 'Expected dict, got %r' % attrs
|
||||||
|
self.attrs = attrs
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'Route({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})'.format(
|
||||||
|
self.name,
|
||||||
|
self.version,
|
||||||
|
self.deprecated,
|
||||||
|
self.arg_type,
|
||||||
|
self.result_type,
|
||||||
|
self.error_type,
|
||||||
|
self.attrs)
|
||||||
|
|
||||||
|
# helper functions used when constructing custom annotation processors
|
||||||
|
|
||||||
|
# put this here so that every other file doesn't need to import functools
|
||||||
|
partially_apply = functools.partial
|
||||||
|
|
||||||
|
def make_struct_annotation_processor(annotation_type, processor):
|
||||||
|
def g(field_path, struct):
|
||||||
|
if struct is None:
|
||||||
|
return struct
|
||||||
|
struct._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
return struct
|
||||||
|
return g
|
||||||
|
|
||||||
|
def make_list_annotation_processor(processor):
|
||||||
|
def g(field_path, list_):
|
||||||
|
if list_ is None:
|
||||||
|
return list_
|
||||||
|
return [processor('{}[{}]'.format(field_path, idx), x) for idx, x in enumerate(list_)]
|
||||||
|
return g
|
||||||
|
|
||||||
|
def make_map_value_annotation_processor(processor):
|
||||||
|
def g(field_path, map_):
|
||||||
|
if map_ is None:
|
||||||
|
return map_
|
||||||
|
return {k: processor('{}[{}]'.format(field_path, repr(k)), v) for k, v in map_.items()}
|
||||||
|
return g
|
||||||
1081
resources/lib/dropbox/stone_serializers.py
Normal file
1081
resources/lib/dropbox/stone_serializers.py
Normal file
File diff suppressed because it is too large
Load Diff
673
resources/lib/dropbox/stone_validators.py
Normal file
673
resources/lib/dropbox/stone_validators.py
Normal file
@@ -0,0 +1,673 @@
|
|||||||
|
"""
|
||||||
|
Defines classes to represent each Stone type in Python. These classes should
|
||||||
|
be used to validate Python objects and normalize them for a given type.
|
||||||
|
|
||||||
|
The data types defined here should not be specific to an RPC or serialization
|
||||||
|
format.
|
||||||
|
|
||||||
|
This module should be dropped into a project that requires the use of Stone. In
|
||||||
|
the future, this could be imported from a pre-installed Python package, rather
|
||||||
|
than being added to a project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
import datetime
|
||||||
|
import hashlib
|
||||||
|
import math
|
||||||
|
import numbers
|
||||||
|
import re
|
||||||
|
import six
|
||||||
|
|
||||||
|
_MYPY = False
|
||||||
|
if _MYPY:
|
||||||
|
import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression
|
||||||
|
|
||||||
|
# See <http://python3porting.com/differences.html#buffer>
|
||||||
|
if six.PY3:
|
||||||
|
_binary_types = (bytes, memoryview) # noqa: E501,F821 # pylint: disable=undefined-variable,useless-suppression
|
||||||
|
else:
|
||||||
|
_binary_types = (bytes, buffer) # noqa: E501,F821 # pylint: disable=undefined-variable,useless-suppression
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(Exception):
|
||||||
|
"""Raised when a value doesn't pass validation by its validator."""
|
||||||
|
|
||||||
|
def __init__(self, message, parent=None):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
message (str): Error message detailing validation failure.
|
||||||
|
parent (str): Adds the parent as the closest reference point for
|
||||||
|
the error. Use :meth:`add_parent` to add more.
|
||||||
|
"""
|
||||||
|
super(ValidationError, self).__init__(message)
|
||||||
|
self.message = message
|
||||||
|
self._parents = []
|
||||||
|
if parent:
|
||||||
|
self._parents.append(parent)
|
||||||
|
|
||||||
|
def add_parent(self, parent):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
parent (str): Adds the parent to the top of the tree of references
|
||||||
|
that lead to the validator that failed.
|
||||||
|
"""
|
||||||
|
self._parents.append(parent)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"""
|
||||||
|
Returns:
|
||||||
|
str: A descriptive message of the validation error that may also
|
||||||
|
include the path to the validator that failed.
|
||||||
|
"""
|
||||||
|
if self._parents:
|
||||||
|
return '{}: {}'.format('.'.join(self._parents[::-1]), self.message)
|
||||||
|
else:
|
||||||
|
return self.message
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# Not a perfect repr, but includes the error location information.
|
||||||
|
return 'ValidationError(%r)' % six.text_type(self)
|
||||||
|
|
||||||
|
|
||||||
|
def generic_type_name(v):
|
||||||
|
"""Return a descriptive type name that isn't Python specific. For example,
|
||||||
|
an int value will return 'integer' rather than 'int'."""
|
||||||
|
if isinstance(v, bool):
|
||||||
|
# Must come before any numbers checks since booleans are integers too
|
||||||
|
return 'boolean'
|
||||||
|
elif isinstance(v, numbers.Integral):
|
||||||
|
# Must come before real numbers check since integrals are reals too
|
||||||
|
return 'integer'
|
||||||
|
elif isinstance(v, numbers.Real):
|
||||||
|
return 'float'
|
||||||
|
elif isinstance(v, (tuple, list)):
|
||||||
|
return 'list'
|
||||||
|
elif isinstance(v, six.string_types):
|
||||||
|
return 'string'
|
||||||
|
elif v is None:
|
||||||
|
return 'null'
|
||||||
|
else:
|
||||||
|
return type(v).__name__
|
||||||
|
|
||||||
|
|
||||||
|
class Validator(object):
|
||||||
|
"""All primitive and composite data types should be a subclass of this."""
|
||||||
|
__metaclass__ = ABCMeta
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def validate(self, val):
|
||||||
|
"""Validates that val is of this data type.
|
||||||
|
|
||||||
|
Returns: A normalized value if validation succeeds.
|
||||||
|
Raises: ValidationError
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def has_default(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_default(self):
|
||||||
|
raise AssertionError('No default available.')
|
||||||
|
|
||||||
|
|
||||||
|
class Primitive(Validator):
|
||||||
|
"""A basic type that is defined by Stone."""
|
||||||
|
# pylint: disable=abstract-method
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Boolean(Primitive):
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, bool):
|
||||||
|
raise ValidationError('%r is not a valid boolean' % val)
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
class Integer(Primitive):
|
||||||
|
"""
|
||||||
|
Do not use this class directly. Extend it and specify a 'minimum' and
|
||||||
|
'maximum' value as class variables for a more restrictive integer range.
|
||||||
|
"""
|
||||||
|
minimum = None # type: typing.Optional[int]
|
||||||
|
maximum = None # type: typing.Optional[int]
|
||||||
|
|
||||||
|
def __init__(self, min_value=None, max_value=None):
|
||||||
|
"""
|
||||||
|
A more restrictive minimum or maximum value can be specified than the
|
||||||
|
range inherent to the defined type.
|
||||||
|
"""
|
||||||
|
if min_value is not None:
|
||||||
|
assert isinstance(min_value, numbers.Integral), \
|
||||||
|
'min_value must be an integral number'
|
||||||
|
assert min_value >= self.minimum, \
|
||||||
|
'min_value cannot be less than the minimum value for this ' \
|
||||||
|
'type (%d < %d)' % (min_value, self.minimum)
|
||||||
|
self.minimum = min_value
|
||||||
|
if max_value is not None:
|
||||||
|
assert isinstance(max_value, numbers.Integral), \
|
||||||
|
'max_value must be an integral number'
|
||||||
|
assert max_value <= self.maximum, \
|
||||||
|
'max_value cannot be greater than the maximum value for ' \
|
||||||
|
'this type (%d < %d)' % (max_value, self.maximum)
|
||||||
|
self.maximum = max_value
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, numbers.Integral):
|
||||||
|
raise ValidationError('expected integer, got %s'
|
||||||
|
% generic_type_name(val))
|
||||||
|
elif not (self.minimum <= val <= self.maximum):
|
||||||
|
raise ValidationError('%d is not within range [%d, %d]'
|
||||||
|
% (val, self.minimum, self.maximum))
|
||||||
|
return val
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s()' % self.__class__.__name__
|
||||||
|
|
||||||
|
|
||||||
|
class Int32(Integer):
|
||||||
|
minimum = -2**31
|
||||||
|
maximum = 2**31 - 1
|
||||||
|
|
||||||
|
|
||||||
|
class UInt32(Integer):
|
||||||
|
minimum = 0
|
||||||
|
maximum = 2**32 - 1
|
||||||
|
|
||||||
|
|
||||||
|
class Int64(Integer):
|
||||||
|
minimum = -2**63
|
||||||
|
maximum = 2**63 - 1
|
||||||
|
|
||||||
|
|
||||||
|
class UInt64(Integer):
|
||||||
|
minimum = 0
|
||||||
|
maximum = 2**64 - 1
|
||||||
|
|
||||||
|
|
||||||
|
class Real(Primitive):
|
||||||
|
"""
|
||||||
|
Do not use this class directly. Extend it and optionally set a 'minimum'
|
||||||
|
and 'maximum' value to enforce a range that's a subset of the Python float
|
||||||
|
implementation. Python floats are doubles.
|
||||||
|
"""
|
||||||
|
minimum = None # type: typing.Optional[float]
|
||||||
|
maximum = None # type: typing.Optional[float]
|
||||||
|
|
||||||
|
def __init__(self, min_value=None, max_value=None):
|
||||||
|
"""
|
||||||
|
A more restrictive minimum or maximum value can be specified than the
|
||||||
|
range inherent to the defined type.
|
||||||
|
"""
|
||||||
|
if min_value is not None:
|
||||||
|
assert isinstance(min_value, numbers.Real), \
|
||||||
|
'min_value must be a real number'
|
||||||
|
if not isinstance(min_value, float):
|
||||||
|
try:
|
||||||
|
min_value = float(min_value)
|
||||||
|
except OverflowError:
|
||||||
|
raise AssertionError('min_value is too small for a float')
|
||||||
|
if self.minimum is not None and min_value < self.minimum:
|
||||||
|
raise AssertionError('min_value cannot be less than the '
|
||||||
|
'minimum value for this type (%f < %f)' %
|
||||||
|
(min_value, self.minimum))
|
||||||
|
self.minimum = min_value
|
||||||
|
if max_value is not None:
|
||||||
|
assert isinstance(max_value, numbers.Real), \
|
||||||
|
'max_value must be a real number'
|
||||||
|
if not isinstance(max_value, float):
|
||||||
|
try:
|
||||||
|
max_value = float(max_value)
|
||||||
|
except OverflowError:
|
||||||
|
raise AssertionError('max_value is too large for a float')
|
||||||
|
if self.maximum is not None and max_value > self.maximum:
|
||||||
|
raise AssertionError('max_value cannot be greater than the '
|
||||||
|
'maximum value for this type (%f < %f)' %
|
||||||
|
(max_value, self.maximum))
|
||||||
|
self.maximum = max_value
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, numbers.Real):
|
||||||
|
raise ValidationError('expected real number, got %s' %
|
||||||
|
generic_type_name(val))
|
||||||
|
if not isinstance(val, float):
|
||||||
|
# This checks for the case where a number is passed in with a
|
||||||
|
# magnitude larger than supported by float64.
|
||||||
|
try:
|
||||||
|
val = float(val)
|
||||||
|
except OverflowError:
|
||||||
|
raise ValidationError('too large for float')
|
||||||
|
if math.isnan(val) or math.isinf(val):
|
||||||
|
raise ValidationError('%f values are not supported' % val)
|
||||||
|
if self.minimum is not None and val < self.minimum:
|
||||||
|
raise ValidationError('%f is not greater than %f' %
|
||||||
|
(val, self.minimum))
|
||||||
|
if self.maximum is not None and val > self.maximum:
|
||||||
|
raise ValidationError('%f is not less than %f' %
|
||||||
|
(val, self.maximum))
|
||||||
|
return val
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s()' % self.__class__.__name__
|
||||||
|
|
||||||
|
|
||||||
|
class Float32(Real):
|
||||||
|
# Maximum and minimums from the IEEE 754-1985 standard
|
||||||
|
minimum = -3.40282 * 10**38
|
||||||
|
maximum = 3.40282 * 10**38
|
||||||
|
|
||||||
|
|
||||||
|
class Float64(Real):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class String(Primitive):
|
||||||
|
"""Represents a unicode string."""
|
||||||
|
|
||||||
|
def __init__(self, min_length=None, max_length=None, pattern=None):
|
||||||
|
if min_length is not None:
|
||||||
|
assert isinstance(min_length, numbers.Integral), \
|
||||||
|
'min_length must be an integral number'
|
||||||
|
assert min_length >= 0, 'min_length must be >= 0'
|
||||||
|
if max_length is not None:
|
||||||
|
assert isinstance(max_length, numbers.Integral), \
|
||||||
|
'max_length must be an integral number'
|
||||||
|
assert max_length > 0, 'max_length must be > 0'
|
||||||
|
if min_length and max_length:
|
||||||
|
assert max_length >= min_length, 'max_length must be >= min_length'
|
||||||
|
if pattern is not None:
|
||||||
|
assert isinstance(pattern, six.string_types), \
|
||||||
|
'pattern must be a string'
|
||||||
|
|
||||||
|
self.min_length = min_length
|
||||||
|
self.max_length = max_length
|
||||||
|
self.pattern = pattern
|
||||||
|
self.pattern_re = None
|
||||||
|
|
||||||
|
if pattern:
|
||||||
|
try:
|
||||||
|
self.pattern_re = re.compile(r"\A(?:" + pattern + r")\Z")
|
||||||
|
except re.error as e:
|
||||||
|
raise AssertionError('Regex {!r} failed: {}'.format(
|
||||||
|
pattern, e.args[0]))
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
"""
|
||||||
|
A unicode string of the correct length and pattern will pass validation.
|
||||||
|
In PY2, we enforce that a str type must be valid utf-8, and a unicode
|
||||||
|
string will be returned.
|
||||||
|
"""
|
||||||
|
if not isinstance(val, six.string_types):
|
||||||
|
raise ValidationError("'%s' expected to be a string, got %s"
|
||||||
|
% (val, generic_type_name(val)))
|
||||||
|
if not six.PY3 and isinstance(val, str):
|
||||||
|
try:
|
||||||
|
val = val.decode('utf-8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise ValidationError("'%s' was not valid utf-8")
|
||||||
|
|
||||||
|
if self.max_length is not None and len(val) > self.max_length:
|
||||||
|
raise ValidationError("'%s' must be at most %d characters, got %d"
|
||||||
|
% (val, self.max_length, len(val)))
|
||||||
|
if self.min_length is not None and len(val) < self.min_length:
|
||||||
|
raise ValidationError("'%s' must be at least %d characters, got %d"
|
||||||
|
% (val, self.min_length, len(val)))
|
||||||
|
|
||||||
|
if self.pattern and not self.pattern_re.match(val):
|
||||||
|
raise ValidationError("'%s' did not match pattern '%s'"
|
||||||
|
% (val, self.pattern))
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
class Bytes(Primitive):
|
||||||
|
|
||||||
|
def __init__(self, min_length=None, max_length=None):
|
||||||
|
if min_length is not None:
|
||||||
|
assert isinstance(min_length, numbers.Integral), \
|
||||||
|
'min_length must be an integral number'
|
||||||
|
assert min_length >= 0, 'min_length must be >= 0'
|
||||||
|
if max_length is not None:
|
||||||
|
assert isinstance(max_length, numbers.Integral), \
|
||||||
|
'max_length must be an integral number'
|
||||||
|
assert max_length > 0, 'max_length must be > 0'
|
||||||
|
if min_length is not None and max_length is not None:
|
||||||
|
assert max_length >= min_length, 'max_length must be >= min_length'
|
||||||
|
|
||||||
|
self.min_length = min_length
|
||||||
|
self.max_length = max_length
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, _binary_types):
|
||||||
|
raise ValidationError("expected bytes type, got %s"
|
||||||
|
% generic_type_name(val))
|
||||||
|
elif self.max_length is not None and len(val) > self.max_length:
|
||||||
|
raise ValidationError("'%s' must have at most %d bytes, got %d"
|
||||||
|
% (val, self.max_length, len(val)))
|
||||||
|
elif self.min_length is not None and len(val) < self.min_length:
|
||||||
|
raise ValidationError("'%s' has fewer than %d bytes, got %d"
|
||||||
|
% (val, self.min_length, len(val)))
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
class Timestamp(Primitive):
|
||||||
|
"""Note that while a format is specified, it isn't used in validation
|
||||||
|
since a native Python datetime object is preferred. The format, however,
|
||||||
|
can and should be used by serializers."""
|
||||||
|
|
||||||
|
def __init__(self, fmt):
|
||||||
|
"""fmt must be composed of format codes that the C standard (1989)
|
||||||
|
supports, most notably in its strftime() function."""
|
||||||
|
assert isinstance(fmt, six.text_type), 'format must be a string'
|
||||||
|
self.format = fmt
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, datetime.datetime):
|
||||||
|
raise ValidationError('expected timestamp, got %s'
|
||||||
|
% generic_type_name(val))
|
||||||
|
elif val.tzinfo is not None and \
|
||||||
|
val.tzinfo.utcoffset(val).total_seconds() != 0:
|
||||||
|
raise ValidationError('timestamp should have either a UTC '
|
||||||
|
'timezone or none set at all')
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
class Composite(Validator):
|
||||||
|
"""Validator for a type that builds on other primitive and composite
|
||||||
|
types."""
|
||||||
|
# pylint: disable=abstract-method
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class List(Composite):
|
||||||
|
"""Assumes list contents are homogeneous with respect to types."""
|
||||||
|
|
||||||
|
def __init__(self, item_validator, min_items=None, max_items=None):
|
||||||
|
"""Every list item will be validated with item_validator."""
|
||||||
|
self.item_validator = item_validator
|
||||||
|
if min_items is not None:
|
||||||
|
assert isinstance(min_items, numbers.Integral), \
|
||||||
|
'min_items must be an integral number'
|
||||||
|
assert min_items >= 0, 'min_items must be >= 0'
|
||||||
|
if max_items is not None:
|
||||||
|
assert isinstance(max_items, numbers.Integral), \
|
||||||
|
'max_items must be an integral number'
|
||||||
|
assert max_items > 0, 'max_items must be > 0'
|
||||||
|
if min_items is not None and max_items is not None:
|
||||||
|
assert max_items >= min_items, 'max_items must be >= min_items'
|
||||||
|
|
||||||
|
self.min_items = min_items
|
||||||
|
self.max_items = max_items
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, (tuple, list)):
|
||||||
|
raise ValidationError('%r is not a valid list' % val)
|
||||||
|
elif self.max_items is not None and len(val) > self.max_items:
|
||||||
|
raise ValidationError('%r has more than %s items'
|
||||||
|
% (val, self.max_items))
|
||||||
|
elif self.min_items is not None and len(val) < self.min_items:
|
||||||
|
raise ValidationError('%r has fewer than %s items'
|
||||||
|
% (val, self.min_items))
|
||||||
|
return [self.item_validator.validate(item) for item in val]
|
||||||
|
|
||||||
|
|
||||||
|
class Map(Composite):
|
||||||
|
"""Assumes map keys and values are homogeneous with respect to types."""
|
||||||
|
|
||||||
|
def __init__(self, key_validator, value_validator):
|
||||||
|
"""
|
||||||
|
Every Map key/value pair will be validated with item_validator.
|
||||||
|
key validators must be a subclass of a String validator
|
||||||
|
"""
|
||||||
|
self.key_validator = key_validator
|
||||||
|
self.value_validator = value_validator
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if not isinstance(val, dict):
|
||||||
|
raise ValidationError('%r is not a valid dict' % val)
|
||||||
|
return {
|
||||||
|
self.key_validator.validate(key):
|
||||||
|
self.value_validator.validate(value) for key, value in val.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Struct(Composite):
|
||||||
|
|
||||||
|
def __init__(self, definition):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
definition (class): A generated class representing a Stone struct
|
||||||
|
from a spec. Must have a _fields_ attribute with the following
|
||||||
|
structure:
|
||||||
|
|
||||||
|
_fields_ = [(field_name, validator), ...]
|
||||||
|
|
||||||
|
where
|
||||||
|
field_name: Name of the field (str).
|
||||||
|
validator: Validator object.
|
||||||
|
"""
|
||||||
|
super(Struct, self).__init__()
|
||||||
|
self.definition = definition
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
"""
|
||||||
|
For a val to pass validation, val must be of the correct type and have
|
||||||
|
all required fields present.
|
||||||
|
"""
|
||||||
|
self.validate_type_only(val)
|
||||||
|
self.validate_fields_only(val)
|
||||||
|
return val
|
||||||
|
|
||||||
|
def validate_with_permissions(self, val, caller_permissions):
|
||||||
|
"""
|
||||||
|
For a val to pass validation, val must be of the correct type and have
|
||||||
|
all required permissioned fields present. Should only be called
|
||||||
|
for callers with extra permissions.
|
||||||
|
"""
|
||||||
|
self.validate(val)
|
||||||
|
self.validate_fields_only_with_permissions(val, caller_permissions)
|
||||||
|
return val
|
||||||
|
|
||||||
|
def validate_fields_only(self, val):
|
||||||
|
"""
|
||||||
|
To pass field validation, no required field should be missing.
|
||||||
|
|
||||||
|
This method assumes that the contents of each field have already been
|
||||||
|
validated on assignment, so it's merely a presence check.
|
||||||
|
|
||||||
|
FIXME(kelkabany): Since the definition object does not maintain a list
|
||||||
|
of which fields are required, all fields are scanned.
|
||||||
|
"""
|
||||||
|
for field_name in self.definition._all_field_names_:
|
||||||
|
if not hasattr(val, field_name):
|
||||||
|
raise ValidationError("missing required field '%s'" %
|
||||||
|
field_name)
|
||||||
|
|
||||||
|
def validate_fields_only_with_permissions(self, val, caller_permissions):
|
||||||
|
"""
|
||||||
|
To pass field validation, no required field should be missing.
|
||||||
|
This method assumes that the contents of each field have already been
|
||||||
|
validated on assignment, so it's merely a presence check.
|
||||||
|
Should only be called for callers with extra permissions.
|
||||||
|
"""
|
||||||
|
self.validate_fields_only(val)
|
||||||
|
|
||||||
|
# check if type has been patched
|
||||||
|
for extra_permission in caller_permissions.permissions:
|
||||||
|
all_field_names = '_all_{}_field_names_'.format(extra_permission)
|
||||||
|
for field_name in getattr(self.definition, all_field_names, set()):
|
||||||
|
if not hasattr(val, field_name):
|
||||||
|
raise ValidationError("missing required field '%s'" % field_name)
|
||||||
|
|
||||||
|
def validate_type_only(self, val):
|
||||||
|
"""
|
||||||
|
Use this when you only want to validate that the type of an object
|
||||||
|
is correct, but not yet validate each field.
|
||||||
|
"""
|
||||||
|
# Since the definition maintains the list of fields for serialization,
|
||||||
|
# we're okay with a subclass that might have extra information. This
|
||||||
|
# makes it easier to return one subclass for two routes, one of which
|
||||||
|
# relies on the parent class.
|
||||||
|
if not isinstance(val, self.definition):
|
||||||
|
raise ValidationError('expected type %s, got %s' %
|
||||||
|
(self.definition.__name__, generic_type_name(val)))
|
||||||
|
|
||||||
|
def has_default(self):
|
||||||
|
return not self.definition._has_required_fields
|
||||||
|
|
||||||
|
def get_default(self):
|
||||||
|
assert not self.definition._has_required_fields, 'No default available.'
|
||||||
|
return self.definition()
|
||||||
|
|
||||||
|
|
||||||
|
class StructTree(Struct):
|
||||||
|
"""Validator for structs with enumerated subtypes.
|
||||||
|
|
||||||
|
NOTE: validate_fields_only() validates the fields known to this base
|
||||||
|
struct, but does not do any validation specific to the subtype.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# See PyCQA/pylint#1043 for why this is disabled; this should show up
|
||||||
|
# as a usless-suppression (and can be removed) once a fix is released
|
||||||
|
def __init__(self, definition): # pylint: disable=useless-super-delegation
|
||||||
|
super(StructTree, self).__init__(definition)
|
||||||
|
|
||||||
|
|
||||||
|
class Union(Composite):
|
||||||
|
|
||||||
|
def __init__(self, definition):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
definition (class): A generated class representing a Stone union
|
||||||
|
from a spec. Must have a _tagmap attribute with the following
|
||||||
|
structure:
|
||||||
|
|
||||||
|
_tagmap = {field_name: validator, ...}
|
||||||
|
|
||||||
|
where
|
||||||
|
field_name (str): Tag name.
|
||||||
|
validator (Validator): Tag value validator.
|
||||||
|
"""
|
||||||
|
self.definition = definition
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
"""
|
||||||
|
For a val to pass validation, it must have a _tag set. This assumes
|
||||||
|
that the object validated that _tag is a valid tag, and that any
|
||||||
|
associated value has also been validated.
|
||||||
|
"""
|
||||||
|
self.validate_type_only(val)
|
||||||
|
if not hasattr(val, '_tag') or val._tag is None:
|
||||||
|
raise ValidationError('no tag set')
|
||||||
|
return val
|
||||||
|
|
||||||
|
def validate_type_only(self, val):
|
||||||
|
"""
|
||||||
|
Use this when you only want to validate that the type of an object
|
||||||
|
is correct, but not yet validate each field.
|
||||||
|
|
||||||
|
We check whether val is a Python parent class of the definition. This
|
||||||
|
is because Union subtyping works in the opposite direction of Python
|
||||||
|
inheritance. For example, if a union U2 extends U1 in Python, this
|
||||||
|
validator will accept U1 in places where U2 is expected.
|
||||||
|
"""
|
||||||
|
if not issubclass(self.definition, type(val)):
|
||||||
|
raise ValidationError('expected type %s or subtype, got %s' %
|
||||||
|
(self.definition.__name__, generic_type_name(val)))
|
||||||
|
|
||||||
|
|
||||||
|
class Void(Primitive):
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if val is not None:
|
||||||
|
raise ValidationError('expected NoneType, got %s' %
|
||||||
|
generic_type_name(val))
|
||||||
|
|
||||||
|
def has_default(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_default(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class Nullable(Validator):
|
||||||
|
|
||||||
|
def __init__(self, validator):
|
||||||
|
assert isinstance(validator, (Primitive, Composite)), \
|
||||||
|
'validator must be for a primitive or composite type'
|
||||||
|
assert not isinstance(validator, Nullable), \
|
||||||
|
'nullables cannot be stacked'
|
||||||
|
assert not isinstance(validator, Void), \
|
||||||
|
'void cannot be made nullable'
|
||||||
|
self.validator = validator
|
||||||
|
|
||||||
|
def validate(self, val):
|
||||||
|
if val is None:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
return self.validator.validate(val)
|
||||||
|
|
||||||
|
def validate_type_only(self, val):
|
||||||
|
"""Use this only if Nullable is wrapping a Composite."""
|
||||||
|
if val is None:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
return self.validator.validate_type_only(val)
|
||||||
|
|
||||||
|
def has_default(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_default(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
class Redactor(object):
|
||||||
|
def __init__(self, regex):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
regex: What parts of the field to redact.
|
||||||
|
"""
|
||||||
|
self.regex = regex
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def apply(self, val):
|
||||||
|
"""Redacts information from annotated field.
|
||||||
|
Returns: A redacted version of the string provided.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_matches(self, val):
|
||||||
|
if not self.regex:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return re.search(self.regex, val)
|
||||||
|
except TypeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class HashRedactor(Redactor):
|
||||||
|
def apply(self, val):
|
||||||
|
matches = self._get_matches(val)
|
||||||
|
|
||||||
|
val_to_hash = str(val) if isinstance(val, int) or isinstance(val, float) else val
|
||||||
|
|
||||||
|
try:
|
||||||
|
# add string literal to ensure unicode
|
||||||
|
hashed = hashlib.md5(val_to_hash.encode('utf-8')).hexdigest() + ''
|
||||||
|
except [AttributeError, ValueError]:
|
||||||
|
hashed = None
|
||||||
|
|
||||||
|
if matches:
|
||||||
|
blotted = '***'.join(matches.groups())
|
||||||
|
if hashed:
|
||||||
|
return '{} ({})'.format(hashed, blotted)
|
||||||
|
return blotted
|
||||||
|
return hashed
|
||||||
|
|
||||||
|
|
||||||
|
class BlotRedactor(Redactor):
|
||||||
|
def apply(self, val):
|
||||||
|
matches = self._get_matches(val)
|
||||||
|
if matches:
|
||||||
|
return '***'.join(matches.groups())
|
||||||
|
return '********'
|
||||||
19709
resources/lib/dropbox/team.py
Normal file
19709
resources/lib/dropbox/team.py
Normal file
File diff suppressed because it is too large
Load Diff
572
resources/lib/dropbox/team_common.py
Normal file
572
resources/lib/dropbox/team_common.py
Normal file
@@ -0,0 +1,572 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import (
|
||||||
|
common,
|
||||||
|
)
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
import common
|
||||||
|
|
||||||
|
class GroupManagementType(bb.Union):
|
||||||
|
"""
|
||||||
|
The group type determines how a group is managed.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar team_common.GroupManagementType.user_managed: A group which is managed
|
||||||
|
by selected users.
|
||||||
|
:ivar team_common.GroupManagementType.company_managed: A group which is
|
||||||
|
managed by team admins only.
|
||||||
|
:ivar team_common.GroupManagementType.system_managed: A group which is
|
||||||
|
managed automatically by Dropbox.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
user_managed = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
company_managed = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
system_managed = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_user_managed(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``user_managed``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'user_managed'
|
||||||
|
|
||||||
|
def is_company_managed(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``company_managed``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'company_managed'
|
||||||
|
|
||||||
|
def is_system_managed(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``system_managed``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'system_managed'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(GroupManagementType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'GroupManagementType(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
GroupManagementType_validator = bv.Union(GroupManagementType)
|
||||||
|
|
||||||
|
class GroupSummary(bb.Struct):
|
||||||
|
"""
|
||||||
|
Information about a group.
|
||||||
|
|
||||||
|
:ivar team_common.GroupSummary.group_external_id: External ID of group. This
|
||||||
|
is an arbitrary ID that an admin can attach to a group.
|
||||||
|
:ivar team_common.GroupSummary.member_count: The number of members in the
|
||||||
|
group.
|
||||||
|
:ivar team_common.GroupSummary.group_management_type: Who is allowed to
|
||||||
|
manage the group.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_group_name_value',
|
||||||
|
'_group_name_present',
|
||||||
|
'_group_id_value',
|
||||||
|
'_group_id_present',
|
||||||
|
'_group_external_id_value',
|
||||||
|
'_group_external_id_present',
|
||||||
|
'_member_count_value',
|
||||||
|
'_member_count_present',
|
||||||
|
'_group_management_type_value',
|
||||||
|
'_group_management_type_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = True
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
group_name=None,
|
||||||
|
group_id=None,
|
||||||
|
group_management_type=None,
|
||||||
|
group_external_id=None,
|
||||||
|
member_count=None):
|
||||||
|
self._group_name_value = None
|
||||||
|
self._group_name_present = False
|
||||||
|
self._group_id_value = None
|
||||||
|
self._group_id_present = False
|
||||||
|
self._group_external_id_value = None
|
||||||
|
self._group_external_id_present = False
|
||||||
|
self._member_count_value = None
|
||||||
|
self._member_count_present = False
|
||||||
|
self._group_management_type_value = None
|
||||||
|
self._group_management_type_present = False
|
||||||
|
if group_name is not None:
|
||||||
|
self.group_name = group_name
|
||||||
|
if group_id is not None:
|
||||||
|
self.group_id = group_id
|
||||||
|
if group_external_id is not None:
|
||||||
|
self.group_external_id = group_external_id
|
||||||
|
if member_count is not None:
|
||||||
|
self.member_count = member_count
|
||||||
|
if group_management_type is not None:
|
||||||
|
self.group_management_type = group_management_type
|
||||||
|
|
||||||
|
@property
|
||||||
|
def group_name(self):
|
||||||
|
"""
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._group_name_present:
|
||||||
|
return self._group_name_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'group_name'")
|
||||||
|
|
||||||
|
@group_name.setter
|
||||||
|
def group_name(self, val):
|
||||||
|
val = self._group_name_validator.validate(val)
|
||||||
|
self._group_name_value = val
|
||||||
|
self._group_name_present = True
|
||||||
|
|
||||||
|
@group_name.deleter
|
||||||
|
def group_name(self):
|
||||||
|
self._group_name_value = None
|
||||||
|
self._group_name_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def group_id(self):
|
||||||
|
"""
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._group_id_present:
|
||||||
|
return self._group_id_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'group_id'")
|
||||||
|
|
||||||
|
@group_id.setter
|
||||||
|
def group_id(self, val):
|
||||||
|
val = self._group_id_validator.validate(val)
|
||||||
|
self._group_id_value = val
|
||||||
|
self._group_id_present = True
|
||||||
|
|
||||||
|
@group_id.deleter
|
||||||
|
def group_id(self):
|
||||||
|
self._group_id_value = None
|
||||||
|
self._group_id_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def group_external_id(self):
|
||||||
|
"""
|
||||||
|
External ID of group. This is an arbitrary ID that an admin can attach
|
||||||
|
to a group.
|
||||||
|
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if self._group_external_id_present:
|
||||||
|
return self._group_external_id_value
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@group_external_id.setter
|
||||||
|
def group_external_id(self, val):
|
||||||
|
if val is None:
|
||||||
|
del self.group_external_id
|
||||||
|
return
|
||||||
|
val = self._group_external_id_validator.validate(val)
|
||||||
|
self._group_external_id_value = val
|
||||||
|
self._group_external_id_present = True
|
||||||
|
|
||||||
|
@group_external_id.deleter
|
||||||
|
def group_external_id(self):
|
||||||
|
self._group_external_id_value = None
|
||||||
|
self._group_external_id_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def member_count(self):
|
||||||
|
"""
|
||||||
|
The number of members in the group.
|
||||||
|
|
||||||
|
:rtype: int
|
||||||
|
"""
|
||||||
|
if self._member_count_present:
|
||||||
|
return self._member_count_value
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@member_count.setter
|
||||||
|
def member_count(self, val):
|
||||||
|
if val is None:
|
||||||
|
del self.member_count
|
||||||
|
return
|
||||||
|
val = self._member_count_validator.validate(val)
|
||||||
|
self._member_count_value = val
|
||||||
|
self._member_count_present = True
|
||||||
|
|
||||||
|
@member_count.deleter
|
||||||
|
def member_count(self):
|
||||||
|
self._member_count_value = None
|
||||||
|
self._member_count_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def group_management_type(self):
|
||||||
|
"""
|
||||||
|
Who is allowed to manage the group.
|
||||||
|
|
||||||
|
:rtype: GroupManagementType
|
||||||
|
"""
|
||||||
|
if self._group_management_type_present:
|
||||||
|
return self._group_management_type_value
|
||||||
|
else:
|
||||||
|
raise AttributeError("missing required field 'group_management_type'")
|
||||||
|
|
||||||
|
@group_management_type.setter
|
||||||
|
def group_management_type(self, val):
|
||||||
|
self._group_management_type_validator.validate_type_only(val)
|
||||||
|
self._group_management_type_value = val
|
||||||
|
self._group_management_type_present = True
|
||||||
|
|
||||||
|
@group_management_type.deleter
|
||||||
|
def group_management_type(self):
|
||||||
|
self._group_management_type_value = None
|
||||||
|
self._group_management_type_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(GroupSummary, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'GroupSummary(group_name={!r}, group_id={!r}, group_management_type={!r}, group_external_id={!r}, member_count={!r})'.format(
|
||||||
|
self._group_name_value,
|
||||||
|
self._group_id_value,
|
||||||
|
self._group_management_type_value,
|
||||||
|
self._group_external_id_value,
|
||||||
|
self._member_count_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
GroupSummary_validator = bv.Struct(GroupSummary)
|
||||||
|
|
||||||
|
class GroupType(bb.Union):
|
||||||
|
"""
|
||||||
|
The group type determines how a group is created and managed.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar team_common.GroupType.team: A group to which team members are
|
||||||
|
automatically added. Applicable to `team folders
|
||||||
|
<https://www.dropbox.com/help/986>`_ only.
|
||||||
|
:ivar team_common.GroupType.user_managed: A group is created and managed by
|
||||||
|
a user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
team = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
user_managed = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_team(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``team``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'team'
|
||||||
|
|
||||||
|
def is_user_managed(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``user_managed``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'user_managed'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(GroupType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'GroupType(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
GroupType_validator = bv.Union(GroupType)
|
||||||
|
|
||||||
|
class MemberSpaceLimitType(bb.Union):
|
||||||
|
"""
|
||||||
|
The type of the space limit imposed on a team member.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar team_common.MemberSpaceLimitType.off: The team member does not have
|
||||||
|
imposed space limit.
|
||||||
|
:ivar team_common.MemberSpaceLimitType.alert_only: The team member has soft
|
||||||
|
imposed space limit - the limit is used for display and for
|
||||||
|
notifications.
|
||||||
|
:ivar team_common.MemberSpaceLimitType.stop_sync: The team member has hard
|
||||||
|
imposed space limit - Dropbox file sync will stop after the limit is
|
||||||
|
reached.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = 'other'
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
off = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
alert_only = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
stop_sync = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
other = None
|
||||||
|
|
||||||
|
def is_off(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``off``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'off'
|
||||||
|
|
||||||
|
def is_alert_only(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``alert_only``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'alert_only'
|
||||||
|
|
||||||
|
def is_stop_sync(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``stop_sync``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'stop_sync'
|
||||||
|
|
||||||
|
def is_other(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``other``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'other'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(MemberSpaceLimitType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'MemberSpaceLimitType(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
MemberSpaceLimitType_validator = bv.Union(MemberSpaceLimitType)
|
||||||
|
|
||||||
|
class TimeRange(bb.Struct):
|
||||||
|
"""
|
||||||
|
Time range.
|
||||||
|
|
||||||
|
:ivar team_common.TimeRange.start_time: Optional starting time (inclusive).
|
||||||
|
:ivar team_common.TimeRange.end_time: Optional ending time (exclusive).
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'_start_time_value',
|
||||||
|
'_start_time_present',
|
||||||
|
'_end_time_value',
|
||||||
|
'_end_time_present',
|
||||||
|
]
|
||||||
|
|
||||||
|
_has_required_fields = False
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
start_time=None,
|
||||||
|
end_time=None):
|
||||||
|
self._start_time_value = None
|
||||||
|
self._start_time_present = False
|
||||||
|
self._end_time_value = None
|
||||||
|
self._end_time_present = False
|
||||||
|
if start_time is not None:
|
||||||
|
self.start_time = start_time
|
||||||
|
if end_time is not None:
|
||||||
|
self.end_time = end_time
|
||||||
|
|
||||||
|
@property
|
||||||
|
def start_time(self):
|
||||||
|
"""
|
||||||
|
Optional starting time (inclusive).
|
||||||
|
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
if self._start_time_present:
|
||||||
|
return self._start_time_value
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@start_time.setter
|
||||||
|
def start_time(self, val):
|
||||||
|
if val is None:
|
||||||
|
del self.start_time
|
||||||
|
return
|
||||||
|
val = self._start_time_validator.validate(val)
|
||||||
|
self._start_time_value = val
|
||||||
|
self._start_time_present = True
|
||||||
|
|
||||||
|
@start_time.deleter
|
||||||
|
def start_time(self):
|
||||||
|
self._start_time_value = None
|
||||||
|
self._start_time_present = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def end_time(self):
|
||||||
|
"""
|
||||||
|
Optional ending time (exclusive).
|
||||||
|
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
if self._end_time_present:
|
||||||
|
return self._end_time_value
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@end_time.setter
|
||||||
|
def end_time(self, val):
|
||||||
|
if val is None:
|
||||||
|
del self.end_time
|
||||||
|
return
|
||||||
|
val = self._end_time_validator.validate(val)
|
||||||
|
self._end_time_value = val
|
||||||
|
self._end_time_present = True
|
||||||
|
|
||||||
|
@end_time.deleter
|
||||||
|
def end_time(self):
|
||||||
|
self._end_time_value = None
|
||||||
|
self._end_time_present = False
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(TimeRange, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'TimeRange(start_time={!r}, end_time={!r})'.format(
|
||||||
|
self._start_time_value,
|
||||||
|
self._end_time_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
TimeRange_validator = bv.Struct(TimeRange)
|
||||||
|
|
||||||
|
GroupExternalId_validator = bv.String()
|
||||||
|
GroupId_validator = bv.String()
|
||||||
|
MemberExternalId_validator = bv.String(max_length=64)
|
||||||
|
ResellerId_validator = bv.String()
|
||||||
|
TeamMemberId_validator = bv.String()
|
||||||
|
GroupManagementType._user_managed_validator = bv.Void()
|
||||||
|
GroupManagementType._company_managed_validator = bv.Void()
|
||||||
|
GroupManagementType._system_managed_validator = bv.Void()
|
||||||
|
GroupManagementType._other_validator = bv.Void()
|
||||||
|
GroupManagementType._tagmap = {
|
||||||
|
'user_managed': GroupManagementType._user_managed_validator,
|
||||||
|
'company_managed': GroupManagementType._company_managed_validator,
|
||||||
|
'system_managed': GroupManagementType._system_managed_validator,
|
||||||
|
'other': GroupManagementType._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
GroupManagementType.user_managed = GroupManagementType('user_managed')
|
||||||
|
GroupManagementType.company_managed = GroupManagementType('company_managed')
|
||||||
|
GroupManagementType.system_managed = GroupManagementType('system_managed')
|
||||||
|
GroupManagementType.other = GroupManagementType('other')
|
||||||
|
|
||||||
|
GroupSummary._group_name_validator = bv.String()
|
||||||
|
GroupSummary._group_id_validator = GroupId_validator
|
||||||
|
GroupSummary._group_external_id_validator = bv.Nullable(GroupExternalId_validator)
|
||||||
|
GroupSummary._member_count_validator = bv.Nullable(bv.UInt32())
|
||||||
|
GroupSummary._group_management_type_validator = GroupManagementType_validator
|
||||||
|
GroupSummary._all_field_names_ = set([
|
||||||
|
'group_name',
|
||||||
|
'group_id',
|
||||||
|
'group_external_id',
|
||||||
|
'member_count',
|
||||||
|
'group_management_type',
|
||||||
|
])
|
||||||
|
GroupSummary._all_fields_ = [
|
||||||
|
('group_name', GroupSummary._group_name_validator),
|
||||||
|
('group_id', GroupSummary._group_id_validator),
|
||||||
|
('group_external_id', GroupSummary._group_external_id_validator),
|
||||||
|
('member_count', GroupSummary._member_count_validator),
|
||||||
|
('group_management_type', GroupSummary._group_management_type_validator),
|
||||||
|
]
|
||||||
|
|
||||||
|
GroupType._team_validator = bv.Void()
|
||||||
|
GroupType._user_managed_validator = bv.Void()
|
||||||
|
GroupType._other_validator = bv.Void()
|
||||||
|
GroupType._tagmap = {
|
||||||
|
'team': GroupType._team_validator,
|
||||||
|
'user_managed': GroupType._user_managed_validator,
|
||||||
|
'other': GroupType._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
GroupType.team = GroupType('team')
|
||||||
|
GroupType.user_managed = GroupType('user_managed')
|
||||||
|
GroupType.other = GroupType('other')
|
||||||
|
|
||||||
|
MemberSpaceLimitType._off_validator = bv.Void()
|
||||||
|
MemberSpaceLimitType._alert_only_validator = bv.Void()
|
||||||
|
MemberSpaceLimitType._stop_sync_validator = bv.Void()
|
||||||
|
MemberSpaceLimitType._other_validator = bv.Void()
|
||||||
|
MemberSpaceLimitType._tagmap = {
|
||||||
|
'off': MemberSpaceLimitType._off_validator,
|
||||||
|
'alert_only': MemberSpaceLimitType._alert_only_validator,
|
||||||
|
'stop_sync': MemberSpaceLimitType._stop_sync_validator,
|
||||||
|
'other': MemberSpaceLimitType._other_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
MemberSpaceLimitType.off = MemberSpaceLimitType('off')
|
||||||
|
MemberSpaceLimitType.alert_only = MemberSpaceLimitType('alert_only')
|
||||||
|
MemberSpaceLimitType.stop_sync = MemberSpaceLimitType('stop_sync')
|
||||||
|
MemberSpaceLimitType.other = MemberSpaceLimitType('other')
|
||||||
|
|
||||||
|
TimeRange._start_time_validator = bv.Nullable(common.DropboxTimestamp_validator)
|
||||||
|
TimeRange._end_time_validator = bv.Nullable(common.DropboxTimestamp_validator)
|
||||||
|
TimeRange._all_field_names_ = set([
|
||||||
|
'start_time',
|
||||||
|
'end_time',
|
||||||
|
])
|
||||||
|
TimeRange._all_fields_ = [
|
||||||
|
('start_time', TimeRange._start_time_validator),
|
||||||
|
('end_time', TimeRange._end_time_validator),
|
||||||
|
]
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
}
|
||||||
|
|
||||||
78874
resources/lib/dropbox/team_log.py
Normal file
78874
resources/lib/dropbox/team_log.py
Normal file
File diff suppressed because it is too large
Load Diff
1661
resources/lib/dropbox/team_policies.py
Normal file
1661
resources/lib/dropbox/team_policies.py
Normal file
File diff suppressed because it is too large
Load Diff
1396
resources/lib/dropbox/trusted-certs.crt
Normal file
1396
resources/lib/dropbox/trusted-certs.crt
Normal file
File diff suppressed because it is too large
Load Diff
1881
resources/lib/dropbox/users.py
Normal file
1881
resources/lib/dropbox/users.py
Normal file
File diff suppressed because it is too large
Load Diff
88
resources/lib/dropbox/users_common.py
Normal file
88
resources/lib/dropbox/users_common.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Auto-generated by Stone, do not modify.
|
||||||
|
# @generated
|
||||||
|
# flake8: noqa
|
||||||
|
# pylint: skip-file
|
||||||
|
"""
|
||||||
|
This namespace contains common data types used within the users namespace.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import stone_validators as bv
|
||||||
|
from . import stone_base as bb
|
||||||
|
except (ImportError, SystemError, ValueError):
|
||||||
|
# Catch errors raised when importing a relative module when not in a package.
|
||||||
|
# This makes testing this file directly (outside of a package) easier.
|
||||||
|
import stone_validators as bv
|
||||||
|
import stone_base as bb
|
||||||
|
|
||||||
|
class AccountType(bb.Union):
|
||||||
|
"""
|
||||||
|
What type of account this user has.
|
||||||
|
|
||||||
|
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||||
|
return true. To get the associated value of a tag (if one exists), use the
|
||||||
|
corresponding ``get_*`` method.
|
||||||
|
|
||||||
|
:ivar users_common.AccountType.basic: The basic account type.
|
||||||
|
:ivar users_common.AccountType.pro: The Dropbox Pro account type.
|
||||||
|
:ivar users_common.AccountType.business: The Dropbox Business account type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_catch_all = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
basic = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
pro = None
|
||||||
|
# Attribute is overwritten below the class definition
|
||||||
|
business = None
|
||||||
|
|
||||||
|
def is_basic(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``basic``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'basic'
|
||||||
|
|
||||||
|
def is_pro(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``pro``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'pro'
|
||||||
|
|
||||||
|
def is_business(self):
|
||||||
|
"""
|
||||||
|
Check if the union tag is ``business``.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self._tag == 'business'
|
||||||
|
|
||||||
|
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||||
|
super(AccountType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'AccountType(%r, %r)' % (self._tag, self._value)
|
||||||
|
|
||||||
|
AccountType_validator = bv.Union(AccountType)
|
||||||
|
|
||||||
|
AccountId_validator = bv.String(min_length=40, max_length=40)
|
||||||
|
AccountType._basic_validator = bv.Void()
|
||||||
|
AccountType._pro_validator = bv.Void()
|
||||||
|
AccountType._business_validator = bv.Void()
|
||||||
|
AccountType._tagmap = {
|
||||||
|
'basic': AccountType._basic_validator,
|
||||||
|
'pro': AccountType._pro_validator,
|
||||||
|
'business': AccountType._business_validator,
|
||||||
|
}
|
||||||
|
|
||||||
|
AccountType.basic = AccountType('basic')
|
||||||
|
AccountType.pro = AccountType('pro')
|
||||||
|
AccountType.business = AccountType('business')
|
||||||
|
|
||||||
|
ROUTES = {
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,31 +1,31 @@
|
|||||||
from . import utils as utils
|
from . import utils as utils
|
||||||
|
|
||||||
|
|
||||||
class ZipExtractor:
|
class ZipExtractor:
|
||||||
|
|
||||||
def extract(self,zipFile,outLoc,progressBar):
|
def extract(self, zipFile, outLoc, progressBar):
|
||||||
utils.log("extracting zip archive")
|
utils.log("extracting zip archive")
|
||||||
|
|
||||||
result = True #result is true unless we fail
|
result = True # result is true unless we fail
|
||||||
|
|
||||||
#update the progress bar
|
# update the progress bar
|
||||||
progressBar.updateProgress(0,utils.getString(30100))
|
progressBar.updateProgress(0, utils.getString(30100))
|
||||||
|
|
||||||
#list the files
|
# list the files
|
||||||
fileCount = float(len(zipFile.listFiles()))
|
fileCount = float(len(zipFile.listFiles()))
|
||||||
currentFile = 0
|
currentFile = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for aFile in zipFile.listFiles():
|
for aFile in zipFile.listFiles():
|
||||||
#update the progress bar
|
# update the progress bar
|
||||||
currentFile += 1
|
currentFile += 1
|
||||||
progressBar.updateProgress(int((currentFile/fileCount) * 100),utils.getString(30100))
|
progressBar.updateProgress(int((currentFile / fileCount) * 100), utils.getString(30100))
|
||||||
|
|
||||||
#extract the file
|
# extract the file
|
||||||
zipFile.extract(aFile,outLoc)
|
zipFile.extract(aFile, outLoc)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception:
|
||||||
utils.log("Error extracting file")
|
utils.log("Error extracting file")
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import xbmc,xbmcvfs
|
import xbmc
|
||||||
|
import xbmcvfs
|
||||||
from . import utils as utils
|
from . import utils as utils
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
from xml.parsers.expat import ExpatError
|
from xml.parsers.expat import ExpatError
|
||||||
@@ -7,67 +8,65 @@ from xml.parsers.expat import ExpatError
|
|||||||
|
|
||||||
class GuiSettingsManager:
|
class GuiSettingsManager:
|
||||||
doc = None
|
doc = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
#first make a copy of the file
|
# first make a copy of the file
|
||||||
xbmcvfs.copy(xbmc.translatePath('special://home/userdata/guisettings.xml'), xbmc.translatePath("special://home/userdata/guisettings.xml.restored"))
|
xbmcvfs.copy(xbmc.translatePath('special://home/userdata/guisettings.xml'), xbmc.translatePath("special://home/userdata/guisettings.xml.restored"))
|
||||||
|
|
||||||
#read in the copy
|
# read in the copy
|
||||||
self._readFile(xbmc.translatePath('special://home/userdata/guisettings.xml.restored'))
|
self._readFile(xbmc.translatePath('special://home/userdata/guisettings.xml.restored'))
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
#get a list of all the settings we can manipulate via json
|
# get a list of all the settings we can manipulate via json
|
||||||
json_response = json.loads(xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.GetSettings","params":{"level":"advanced"}}'))
|
json_response = json.loads(xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.GetSettings","params":{"level":"advanced"}}'))
|
||||||
|
|
||||||
settings = json_response['result']['settings']
|
settings = json_response['result']['settings']
|
||||||
currentSettings = {}
|
currentSettings = {}
|
||||||
|
|
||||||
for aSetting in settings:
|
for aSetting in settings:
|
||||||
if('value' in aSetting):
|
if('value' in aSetting):
|
||||||
currentSettings[aSetting['id']] = aSetting['value']
|
currentSettings[aSetting['id']] = aSetting['value']
|
||||||
|
|
||||||
#parse the existing xml file and get all the settings we need to restore
|
# parse the existing xml file and get all the settings we need to restore
|
||||||
restoreSettings = self.__parseNodes(self.doc.getElementsByTagName('setting'))
|
restoreSettings = self.__parseNodes(self.doc.getElementsByTagName('setting'))
|
||||||
|
|
||||||
#get a list where the restore setting value != the current value
|
# get a list where the restore setting value != the current value
|
||||||
updateSettings = {k: v for k, v in list(restoreSettings.items()) if (k in currentSettings and currentSettings[k] != v)}
|
updateSettings = {k: v for k, v in list(restoreSettings.items()) if (k in currentSettings and currentSettings[k] != v)}
|
||||||
|
|
||||||
#go through all the found settings and update them
|
# go through all the found settings and update them
|
||||||
jsonObj = {"jsonrpc":"2.0","id":1,"method":"Settings.SetSettingValue","params":{"setting":"","value":""}}
|
jsonObj = {"jsonrpc": "2.0", "id": 1, "method": "Settings.SetSettingValue", "params": {"setting": "", "value": ""}}
|
||||||
for anId, aValue in list(updateSettings.items()):
|
for anId, aValue in list(updateSettings.items()):
|
||||||
utils.log("updating: " + anId + ", value: " + str(aValue))
|
utils.log("updating: " + anId + ", value: " + str(aValue))
|
||||||
|
|
||||||
jsonObj['params']['setting'] = anId
|
jsonObj['params']['setting'] = anId
|
||||||
jsonObj['params']['value'] = aValue
|
jsonObj['params']['value'] = aValue
|
||||||
|
|
||||||
xbmc.executeJSONRPC(json.dumps(jsonObj))
|
xbmc.executeJSONRPC(json.dumps(jsonObj))
|
||||||
|
|
||||||
def __parseNodes(self,nodeList):
|
def __parseNodes(self, nodeList):
|
||||||
result = {}
|
result = {}
|
||||||
|
|
||||||
for node in nodeList:
|
for node in nodeList:
|
||||||
nodeValue = ''
|
nodeValue = ''
|
||||||
if(node.firstChild != None):
|
if(node.firstChild is not None):
|
||||||
nodeValue = node.firstChild.nodeValue
|
nodeValue = node.firstChild.nodeValue
|
||||||
|
|
||||||
#check for numbers and booleans
|
# check for numbers and booleans
|
||||||
if(nodeValue.isdigit()):
|
if(nodeValue.isdigit()):
|
||||||
nodeValue = int(nodeValue)
|
nodeValue = int(nodeValue)
|
||||||
elif(nodeValue == 'true'):
|
elif(nodeValue == 'true'):
|
||||||
nodeValue = True
|
nodeValue = True
|
||||||
elif(nodeValue == 'false'):
|
elif(nodeValue == 'false'):
|
||||||
nodeValue = False
|
nodeValue = False
|
||||||
|
|
||||||
result[node.getAttribute('id')] = nodeValue
|
result[node.getAttribute('id')] = nodeValue
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _readFile(self,fileLoc):
|
def _readFile(self, fileLoc):
|
||||||
|
|
||||||
if(xbmcvfs.exists(fileLoc)):
|
if(xbmcvfs.exists(fileLoc)):
|
||||||
try:
|
try:
|
||||||
self.doc = minidom.parse(fileLoc)
|
self.doc = minidom.parse(fileLoc)
|
||||||
except ExpatError:
|
except ExpatError:
|
||||||
utils.log("Can't read " + fileLoc)
|
utils.log("Can't read " + fileLoc)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import xbmcgui
|
import xbmcgui
|
||||||
from . import utils as utils
|
from . import utils as utils
|
||||||
|
|
||||||
|
|
||||||
class BackupProgressBar:
|
class BackupProgressBar:
|
||||||
NONE = 2
|
NONE = 2
|
||||||
DIALOG = 0
|
DIALOG = 0
|
||||||
@@ -9,13 +10,13 @@ class BackupProgressBar:
|
|||||||
mode = 2
|
mode = 2
|
||||||
progressBar = None
|
progressBar = None
|
||||||
override = False
|
override = False
|
||||||
|
|
||||||
def __init__(self,progressOverride):
|
def __init__(self, progressOverride):
|
||||||
self.override = progressOverride
|
self.override = progressOverride
|
||||||
|
|
||||||
#check if we should use the progress bar
|
# check if we should use the progress bar
|
||||||
if(int(utils.getSetting('progress_mode')) != 2):
|
if(int(utils.getSetting('progress_mode')) != 2):
|
||||||
#check if background or normal
|
# check if background or normal
|
||||||
if(int(utils.getSetting('progress_mode')) == 0 and not self.override):
|
if(int(utils.getSetting('progress_mode')) == 0 and not self.override):
|
||||||
self.mode = self.DIALOG
|
self.mode = self.DIALOG
|
||||||
self.progressBar = xbmcgui.DialogProgress()
|
self.progressBar = xbmcgui.DialogProgress()
|
||||||
@@ -23,20 +24,20 @@ class BackupProgressBar:
|
|||||||
self.mode = self.BACKGROUND
|
self.mode = self.BACKGROUND
|
||||||
self.progressBar = xbmcgui.DialogProgressBG()
|
self.progressBar = xbmcgui.DialogProgressBG()
|
||||||
|
|
||||||
def create(self,heading,message):
|
def create(self, heading, message):
|
||||||
if(self.mode != self.NONE):
|
if(self.mode != self.NONE):
|
||||||
self.progressBar.create(heading,message)
|
self.progressBar.create(heading, message)
|
||||||
|
|
||||||
def updateProgress(self,percent,message=None):
|
def updateProgress(self, percent, message=None):
|
||||||
|
|
||||||
#update the progress bar
|
# update the progress bar
|
||||||
if(self.mode != self.NONE):
|
if(self.mode != self.NONE):
|
||||||
if(message != None):
|
if(message is not None):
|
||||||
#need different calls for dialog and background bars
|
# need different calls for dialog and background bars
|
||||||
if(self.mode == self.DIALOG):
|
if(self.mode == self.DIALOG):
|
||||||
self.progressBar.update(percent,message)
|
self.progressBar.update(percent, message)
|
||||||
else:
|
else:
|
||||||
self.progressBar.update(percent,message=message)
|
self.progressBar.update(percent, message=message)
|
||||||
else:
|
else:
|
||||||
self.progressBar.update(percent)
|
self.progressBar.update(percent)
|
||||||
|
|
||||||
|
|||||||
@@ -1,185 +0,0 @@
|
|||||||
Copyright 2013 Google Inc. All Rights Reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
@@ -1,174 +0,0 @@
|
|||||||
class ApiAttribute(object):
|
|
||||||
"""A data descriptor that sets and returns values."""
|
|
||||||
|
|
||||||
def __init__(self, name):
|
|
||||||
"""Create an instance of ApiAttribute.
|
|
||||||
|
|
||||||
:param name: name of this attribute.
|
|
||||||
:type name: str.
|
|
||||||
"""
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def __get__(self, obj, type=None):
|
|
||||||
"""Accesses value of this attribute."""
|
|
||||||
return obj.attr.get(self.name)
|
|
||||||
|
|
||||||
def __set__(self, obj, value):
|
|
||||||
"""Write value of this attribute."""
|
|
||||||
obj.attr[self.name] = value
|
|
||||||
if obj.dirty.get(self.name) is not None:
|
|
||||||
obj.dirty[self.name] = True
|
|
||||||
|
|
||||||
def __del__(self, obj=None):
|
|
||||||
"""Delete value of this attribute."""
|
|
||||||
if(obj != None):
|
|
||||||
del obj.attr[self.name]
|
|
||||||
if obj.dirty.get(self.name) is not None:
|
|
||||||
del obj.dirty[self.name]
|
|
||||||
|
|
||||||
|
|
||||||
class ApiAttributeMixin(object):
|
|
||||||
"""Mixin to initialize required global variables to use ApiAttribute."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.attr = {}
|
|
||||||
self.dirty = {}
|
|
||||||
|
|
||||||
|
|
||||||
class ApiResource(dict):
|
|
||||||
"""Super class of all api resources.
|
|
||||||
|
|
||||||
Inherits and behaves as a python dictionary to handle api resources.
|
|
||||||
Save clean copy of metadata in self.metadata as a dictionary.
|
|
||||||
Provides changed metadata elements to efficiently update api resources.
|
|
||||||
"""
|
|
||||||
auth = ApiAttribute('auth')
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
"""Create an instance of ApiResource."""
|
|
||||||
self.update(*args, **kwargs)
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
"""Overwritten method of dictionary.
|
|
||||||
|
|
||||||
:param key: key of the query.
|
|
||||||
:type key: str.
|
|
||||||
:returns: value of the query.
|
|
||||||
"""
|
|
||||||
return dict.__getitem__(self, key)
|
|
||||||
|
|
||||||
def __setitem__(self, key, val):
|
|
||||||
"""Overwritten method of dictionary.
|
|
||||||
|
|
||||||
:param key: key of the query.
|
|
||||||
:type key: str.
|
|
||||||
:param val: value of the query.
|
|
||||||
"""
|
|
||||||
dict.__setitem__(self, key, val)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
"""Overwritten method of dictionary."""
|
|
||||||
dictrepr = dict.__repr__(self)
|
|
||||||
return '%s(%s)' % (type(self).__name__, dictrepr)
|
|
||||||
|
|
||||||
def update(self, *args, **kwargs):
|
|
||||||
"""Overwritten method of dictionary."""
|
|
||||||
for k, v in dict(*args, **kwargs).iteritems():
|
|
||||||
self[k] = v
|
|
||||||
|
|
||||||
def UpdateMetadata(self, metadata=None):
|
|
||||||
"""Update metadata and mark all of them to be clean."""
|
|
||||||
if metadata:
|
|
||||||
self.update(metadata)
|
|
||||||
self.metadata = dict(self)
|
|
||||||
|
|
||||||
def GetChanges(self):
|
|
||||||
"""Returns changed metadata elements to update api resources efficiently.
|
|
||||||
|
|
||||||
:returns: dict -- changed metadata elements.
|
|
||||||
"""
|
|
||||||
dirty = {}
|
|
||||||
for key in self:
|
|
||||||
if self.metadata.get(key) is None:
|
|
||||||
dirty[key] = self[key]
|
|
||||||
elif self.metadata[key] != self[key]:
|
|
||||||
dirty[key] = self[key]
|
|
||||||
return dirty
|
|
||||||
|
|
||||||
|
|
||||||
class ApiResourceList(ApiAttributeMixin, ApiResource):
|
|
||||||
"""Abstract class of all api list resources.
|
|
||||||
|
|
||||||
Inherits ApiResource and builds iterator to list any API resource.
|
|
||||||
"""
|
|
||||||
metadata = ApiAttribute('metadata')
|
|
||||||
|
|
||||||
def __init__(self, auth=None, metadata=None):
|
|
||||||
"""Create an instance of ApiResourceList.
|
|
||||||
|
|
||||||
:param auth: authorized GoogleAuth instance.
|
|
||||||
:type auth: GoogleAuth.
|
|
||||||
:param metadata: parameter to send to list command.
|
|
||||||
:type metadata: dict.
|
|
||||||
"""
|
|
||||||
ApiAttributeMixin.__init__(self)
|
|
||||||
ApiResource.__init__(self)
|
|
||||||
self.auth = auth
|
|
||||||
self.UpdateMetadata()
|
|
||||||
if metadata:
|
|
||||||
self.update(metadata)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
"""Returns iterator object.
|
|
||||||
|
|
||||||
:returns: ApiResourceList -- self
|
|
||||||
"""
|
|
||||||
return self
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
"""Make API call to list resources and return them.
|
|
||||||
|
|
||||||
Auto updates 'pageToken' everytime it makes API call and
|
|
||||||
raises StopIteration when it reached the end of iteration.
|
|
||||||
|
|
||||||
:returns: list -- list of API resources.
|
|
||||||
:raises: StopIteration
|
|
||||||
"""
|
|
||||||
if 'pageToken' in self and self['pageToken'] is None:
|
|
||||||
raise StopIteration
|
|
||||||
result = self._GetList()
|
|
||||||
self['pageToken'] = self.metadata.get('nextPageToken')
|
|
||||||
return result
|
|
||||||
|
|
||||||
def GetList(self):
|
|
||||||
"""Get list of API resources.
|
|
||||||
|
|
||||||
If 'maxResults' is not specified, it will automatically iterate through
|
|
||||||
every resources available. Otherwise, it will make API call once and
|
|
||||||
update 'pageToken'.
|
|
||||||
|
|
||||||
:returns: list -- list of API resources.
|
|
||||||
"""
|
|
||||||
if self.get('maxResults') is None:
|
|
||||||
self['maxResults'] = 1000
|
|
||||||
result = []
|
|
||||||
for x in self:
|
|
||||||
result.extend(x)
|
|
||||||
del self['maxResults']
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
return self.next()
|
|
||||||
|
|
||||||
def _GetList(self):
|
|
||||||
"""Helper function which actually makes API call.
|
|
||||||
|
|
||||||
Should be overwritten.
|
|
||||||
|
|
||||||
:raises: NotImplementedError
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def Reset(self):
|
|
||||||
"""Resets current iteration"""
|
|
||||||
if 'pageToken' in self:
|
|
||||||
del self['pageToken']
|
|
||||||
@@ -1,415 +0,0 @@
|
|||||||
import socket
|
|
||||||
import webbrowser
|
|
||||||
import httplib2
|
|
||||||
import oauth2client.clientsecrets as clientsecrets
|
|
||||||
|
|
||||||
from googleapiclient.discovery import build
|
|
||||||
from functools import wraps
|
|
||||||
from oauth2client.client import FlowExchangeError
|
|
||||||
from oauth2client.client import AccessTokenRefreshError
|
|
||||||
from oauth2client.client import OAuth2WebServerFlow
|
|
||||||
from oauth2client.client import OOB_CALLBACK_URN
|
|
||||||
from oauth2client.file import Storage
|
|
||||||
from oauth2client.tools import ClientRedirectHandler
|
|
||||||
from oauth2client.tools import ClientRedirectServer
|
|
||||||
from oauth2client._helpers import scopes_to_string
|
|
||||||
from .apiattr import ApiAttribute
|
|
||||||
from .apiattr import ApiAttributeMixin
|
|
||||||
from .settings import LoadSettingsFile
|
|
||||||
from .settings import ValidateSettings
|
|
||||||
from .settings import SettingsError
|
|
||||||
from .settings import InvalidConfigError
|
|
||||||
|
|
||||||
|
|
||||||
class AuthError(Exception):
|
|
||||||
"""Base error for authentication/authorization errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidCredentialsError(IOError):
|
|
||||||
"""Error trying to read credentials file."""
|
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationRejected(AuthError):
|
|
||||||
"""User rejected authentication."""
|
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationError(AuthError):
|
|
||||||
"""General authentication error."""
|
|
||||||
|
|
||||||
|
|
||||||
class RefreshError(AuthError):
|
|
||||||
"""Access token refresh error."""
|
|
||||||
|
|
||||||
def LoadAuth(decoratee):
|
|
||||||
"""Decorator to check if the auth is valid and loads auth if not."""
|
|
||||||
@wraps(decoratee)
|
|
||||||
def _decorated(self, *args, **kwargs):
|
|
||||||
if self.auth is None: # Initialize auth if needed.
|
|
||||||
self.auth = GoogleAuth()
|
|
||||||
if self.auth.access_token_expired:
|
|
||||||
self.auth.LocalWebserverAuth()
|
|
||||||
if self.auth.service is None: # Check if drive api is built.
|
|
||||||
self.auth.Authorize()
|
|
||||||
return decoratee(self, *args, **kwargs)
|
|
||||||
return _decorated
|
|
||||||
|
|
||||||
def CheckAuth(decoratee):
|
|
||||||
"""Decorator to check if it requires OAuth2 flow request."""
|
|
||||||
@wraps(decoratee)
|
|
||||||
def _decorated(self, *args, **kwargs):
|
|
||||||
dirty = False
|
|
||||||
code = None
|
|
||||||
save_credentials = self.settings.get('save_credentials')
|
|
||||||
if self.credentials is None and save_credentials:
|
|
||||||
self.LoadCredentials()
|
|
||||||
if self.flow is None:
|
|
||||||
self.GetFlow()
|
|
||||||
if self.credentials is None:
|
|
||||||
code = decoratee(self, *args, **kwargs)
|
|
||||||
dirty = True
|
|
||||||
else:
|
|
||||||
if self.access_token_expired:
|
|
||||||
if self.credentials.refresh_token is not None:
|
|
||||||
self.Refresh()
|
|
||||||
else:
|
|
||||||
code = decoratee(self, *args, **kwargs)
|
|
||||||
dirty = True
|
|
||||||
if code is not None:
|
|
||||||
self.Auth(code)
|
|
||||||
if dirty and save_credentials:
|
|
||||||
self.SaveCredentials()
|
|
||||||
return _decorated
|
|
||||||
|
|
||||||
|
|
||||||
class GoogleAuth(ApiAttributeMixin, object):
|
|
||||||
"""Wrapper class for oauth2client library in google-api-python-client.
|
|
||||||
|
|
||||||
Loads all settings and credentials from one 'settings.yaml' file
|
|
||||||
and performs common OAuth2.0 related functionality such as authentication
|
|
||||||
and authorization.
|
|
||||||
"""
|
|
||||||
DEFAULT_SETTINGS = {
|
|
||||||
'client_config_backend': 'file',
|
|
||||||
'client_config_file': 'client_secrets.json',
|
|
||||||
'save_credentials': False,
|
|
||||||
'oauth_scope': ['https://www.googleapis.com/auth/drive']
|
|
||||||
}
|
|
||||||
CLIENT_CONFIGS_LIST = ['client_id', 'client_secret', 'auth_uri',
|
|
||||||
'token_uri', 'revoke_uri', 'redirect_uri']
|
|
||||||
settings = ApiAttribute('settings')
|
|
||||||
client_config = ApiAttribute('client_config')
|
|
||||||
flow = ApiAttribute('flow')
|
|
||||||
credentials = ApiAttribute('credentials')
|
|
||||||
http = ApiAttribute('http')
|
|
||||||
service = ApiAttribute('service')
|
|
||||||
|
|
||||||
def __init__(self, settings_file='settings.yaml'):
|
|
||||||
"""Create an instance of GoogleAuth.
|
|
||||||
|
|
||||||
This constructor just sets the path of settings file.
|
|
||||||
It does not actually read the file.
|
|
||||||
|
|
||||||
:param settings_file: path of settings file. 'settings.yaml' by default.
|
|
||||||
:type settings_file: str.
|
|
||||||
"""
|
|
||||||
ApiAttributeMixin.__init__(self)
|
|
||||||
self.client_config = {}
|
|
||||||
try:
|
|
||||||
self.settings = LoadSettingsFile(settings_file)
|
|
||||||
except SettingsError:
|
|
||||||
self.settings = self.DEFAULT_SETTINGS
|
|
||||||
else:
|
|
||||||
if self.settings is None:
|
|
||||||
self.settings = self.DEFAULT_SETTINGS
|
|
||||||
else:
|
|
||||||
ValidateSettings(self.settings)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def access_token_expired(self):
|
|
||||||
"""Checks if access token doesn't exist or is expired.
|
|
||||||
|
|
||||||
:returns: bool -- True if access token doesn't exist or is expired.
|
|
||||||
"""
|
|
||||||
if self.credentials is None:
|
|
||||||
return True
|
|
||||||
return self.credentials.access_token_expired
|
|
||||||
|
|
||||||
@CheckAuth
|
|
||||||
def LocalWebserverAuth(self, host_name='localhost',
|
|
||||||
port_numbers=[8080, 8090]):
|
|
||||||
"""Authenticate and authorize from user by creating local webserver and
|
|
||||||
retrieving authentication code.
|
|
||||||
|
|
||||||
This function is not for webserver application. It creates local webserver
|
|
||||||
for user from standalone application.
|
|
||||||
|
|
||||||
:param host_name: host name of the local webserver.
|
|
||||||
:type host_name: str.
|
|
||||||
:param port_numbers: list of port numbers to be tried to used.
|
|
||||||
:type port_numbers: list.
|
|
||||||
:returns: str -- code returned from local webserver
|
|
||||||
:raises: AuthenticationRejected, AuthenticationError
|
|
||||||
"""
|
|
||||||
success = False
|
|
||||||
port_number = 0
|
|
||||||
for port in port_numbers:
|
|
||||||
port_number = port
|
|
||||||
try:
|
|
||||||
httpd = ClientRedirectServer((host_name, port), ClientRedirectHandler)
|
|
||||||
except socket.error as e:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
success = True
|
|
||||||
break
|
|
||||||
if success:
|
|
||||||
oauth_callback = 'http://%s:%s/' % (host_name, port_number)
|
|
||||||
else:
|
|
||||||
raise AuthenticationError()
|
|
||||||
self.flow.redirect_uri = oauth_callback
|
|
||||||
authorize_url = self.GetAuthUrl()
|
|
||||||
webbrowser.open(authorize_url, new=1, autoraise=True)
|
|
||||||
httpd.handle_request()
|
|
||||||
if 'error' in httpd.query_params:
|
|
||||||
raise AuthenticationRejected('User rejected authentication')
|
|
||||||
if 'code' in httpd.query_params:
|
|
||||||
return httpd.query_params['code']
|
|
||||||
else:
|
|
||||||
raise AuthenticationError('No code found in redirect')
|
|
||||||
|
|
||||||
@CheckAuth
|
|
||||||
def CommandLineAuth(self):
|
|
||||||
"""Authenticate and authorize from user by printing authentication url
|
|
||||||
retrieving authentication code from command-line.
|
|
||||||
|
|
||||||
:returns: str -- code returned from commandline.
|
|
||||||
"""
|
|
||||||
self.flow.redirect_uri = OOB_CALLBACK_URN
|
|
||||||
authorize_url = self.GetAuthUrl()
|
|
||||||
return raw_input('Enter verification code: ').strip()
|
|
||||||
|
|
||||||
def LoadCredentials(self, backend=None):
|
|
||||||
"""Loads credentials or create empty credentials if it doesn't exist.
|
|
||||||
|
|
||||||
:param backend: target backend to save credential to.
|
|
||||||
:type backend: str.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
if backend is None:
|
|
||||||
backend = self.settings.get('save_credentials_backend')
|
|
||||||
if backend is None:
|
|
||||||
raise InvalidConfigError('Please specify credential backend')
|
|
||||||
if backend == 'file':
|
|
||||||
self.LoadCredentialsFile()
|
|
||||||
else:
|
|
||||||
raise InvalidConfigError('Unknown save_credentials_backend')
|
|
||||||
|
|
||||||
def LoadCredentialsFile(self, credentials_file=None):
|
|
||||||
"""Loads credentials or create empty credentials if it doesn't exist.
|
|
||||||
|
|
||||||
Loads credentials file from path in settings if not specified.
|
|
||||||
|
|
||||||
:param credentials_file: path of credentials file to read.
|
|
||||||
:type credentials_file: str.
|
|
||||||
:raises: InvalidConfigError, InvalidCredentialsError
|
|
||||||
"""
|
|
||||||
if credentials_file is None:
|
|
||||||
credentials_file = self.settings.get('save_credentials_file')
|
|
||||||
if credentials_file is None:
|
|
||||||
raise InvalidConfigError('Please specify credentials file to read')
|
|
||||||
try:
|
|
||||||
storage = Storage(credentials_file)
|
|
||||||
self.credentials = storage.get()
|
|
||||||
except IOError:
|
|
||||||
raise InvalidCredentialsError('Credentials file cannot be symbolic link')
|
|
||||||
|
|
||||||
def SaveCredentials(self, backend=None):
|
|
||||||
"""Saves credentials according to specified backend.
|
|
||||||
|
|
||||||
If you have any specific credentials backend in mind, don't use this
|
|
||||||
function and use the corresponding function you want.
|
|
||||||
|
|
||||||
:param backend: backend to save credentials.
|
|
||||||
:type backend: str.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
if backend is None:
|
|
||||||
backend = self.settings.get('save_credentials_backend')
|
|
||||||
if backend is None:
|
|
||||||
raise InvalidConfigError('Please specify credential backend')
|
|
||||||
if backend == 'file':
|
|
||||||
self.SaveCredentialsFile()
|
|
||||||
else:
|
|
||||||
raise InvalidConfigError('Unknown save_credentials_backend')
|
|
||||||
|
|
||||||
def SaveCredentialsFile(self, credentials_file=None):
|
|
||||||
"""Saves credentials to the file in JSON format.
|
|
||||||
|
|
||||||
:param credentials_file: destination to save file to.
|
|
||||||
:type credentials_file: str.
|
|
||||||
:raises: InvalidConfigError, InvalidCredentialsError
|
|
||||||
"""
|
|
||||||
if self.credentials is None:
|
|
||||||
raise InvalidCredentialsError('No credentials to save')
|
|
||||||
if credentials_file is None:
|
|
||||||
credentials_file = self.settings.get('save_credentials_file')
|
|
||||||
if credentials_file is None:
|
|
||||||
raise InvalidConfigError('Please specify credentials file to read')
|
|
||||||
try:
|
|
||||||
storage = Storage(credentials_file)
|
|
||||||
storage.put(self.credentials)
|
|
||||||
self.credentials.set_store(storage)
|
|
||||||
except CredentialsFileSymbolicLinkError:
|
|
||||||
raise InvalidCredentialsError('Credentials file cannot be symbolic link')
|
|
||||||
|
|
||||||
def LoadClientConfig(self, backend=None):
|
|
||||||
"""Loads client configuration according to specified backend.
|
|
||||||
|
|
||||||
If you have any specific backend to load client configuration from in mind,
|
|
||||||
don't use this function and use the corresponding function you want.
|
|
||||||
|
|
||||||
:param backend: backend to load client configuration from.
|
|
||||||
:type backend: str.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
if backend is None:
|
|
||||||
backend = self.settings.get('client_config_backend')
|
|
||||||
if backend is None:
|
|
||||||
raise InvalidConfigError('Please specify client config backend')
|
|
||||||
if backend == 'file':
|
|
||||||
self.LoadClientConfigFile()
|
|
||||||
elif backend == 'settings':
|
|
||||||
self.LoadClientConfigSettings()
|
|
||||||
else:
|
|
||||||
raise InvalidConfigError('Unknown client_config_backend')
|
|
||||||
|
|
||||||
def LoadClientConfigFile(self, client_config_file=None):
|
|
||||||
"""Loads client configuration file downloaded from APIs console.
|
|
||||||
|
|
||||||
Loads client config file from path in settings if not specified.
|
|
||||||
|
|
||||||
:param client_config_file: path of client config file to read.
|
|
||||||
:type client_config_file: str.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
if client_config_file is None:
|
|
||||||
client_config_file = self.settings['client_config_file']
|
|
||||||
try:
|
|
||||||
client_type, client_info = clientsecrets.loadfile(client_config_file)
|
|
||||||
except clientsecrets.InvalidClientSecretsError as error:
|
|
||||||
raise InvalidConfigError('Invalid client secrets file %s' % error)
|
|
||||||
if not client_type in (clientsecrets.TYPE_WEB,
|
|
||||||
clientsecrets.TYPE_INSTALLED):
|
|
||||||
raise InvalidConfigError('Unknown client_type of client config file')
|
|
||||||
try:
|
|
||||||
config_index = ['client_id', 'client_secret', 'auth_uri', 'token_uri']
|
|
||||||
for config in config_index:
|
|
||||||
self.client_config[config] = client_info[config]
|
|
||||||
self.client_config['revoke_uri'] = client_info.get('revoke_uri')
|
|
||||||
self.client_config['redirect_uri'] = client_info['redirect_uris'][0]
|
|
||||||
except KeyError:
|
|
||||||
raise InvalidConfigError('Insufficient client config in file')
|
|
||||||
|
|
||||||
def LoadClientConfigSettings(self):
|
|
||||||
"""Loads client configuration from settings file.
|
|
||||||
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
|
|
||||||
for config in self.CLIENT_CONFIGS_LIST:
|
|
||||||
try:
|
|
||||||
self.client_config[config] = self.settings['client_config'][config]
|
|
||||||
|
|
||||||
except KeyError:
|
|
||||||
raise InvalidConfigError('Insufficient client config in settings')
|
|
||||||
|
|
||||||
def GetFlow(self):
|
|
||||||
"""Gets Flow object from client configuration.
|
|
||||||
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
if not all(config in self.client_config \
|
|
||||||
for config in self.CLIENT_CONFIGS_LIST):
|
|
||||||
self.LoadClientConfig()
|
|
||||||
constructor_kwargs = {
|
|
||||||
'redirect_uri': self.client_config['redirect_uri'],
|
|
||||||
'auth_uri': self.client_config['auth_uri'],
|
|
||||||
'token_uri': self.client_config['token_uri'],
|
|
||||||
}
|
|
||||||
if self.client_config['revoke_uri'] is not None:
|
|
||||||
constructor_kwargs['revoke_uri'] = self.client_config['revoke_uri']
|
|
||||||
self.flow = OAuth2WebServerFlow(
|
|
||||||
self.client_config['client_id'],
|
|
||||||
self.client_config['client_secret'],
|
|
||||||
scopes_to_string(self.settings['oauth_scope']),
|
|
||||||
**constructor_kwargs)
|
|
||||||
if self.settings.get('get_refresh_token'):
|
|
||||||
self.flow.params.update({'access_type': 'offline'})
|
|
||||||
|
|
||||||
def Refresh(self):
|
|
||||||
"""Refreshes the access_token.
|
|
||||||
|
|
||||||
:raises: RefreshError
|
|
||||||
"""
|
|
||||||
if self.credentials is None:
|
|
||||||
raise RefreshError('No credential to refresh.')
|
|
||||||
if self.credentials.refresh_token is None:
|
|
||||||
raise RefreshError('No refresh_token found.'
|
|
||||||
'Please set access_type of OAuth to offline.')
|
|
||||||
if self.http is None:
|
|
||||||
self.http = httplib2.Http()
|
|
||||||
try:
|
|
||||||
self.credentials.refresh(self.http)
|
|
||||||
except AccessTokenRefreshError as error:
|
|
||||||
raise RefreshError('Access token refresh failed: %s' % error)
|
|
||||||
|
|
||||||
def GetAuthUrl(self, keys = None):
|
|
||||||
"""Creates authentication url where user visits to grant access.
|
|
||||||
|
|
||||||
:returns: str -- Authentication url.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if(keys != None):
|
|
||||||
#update some of the settings in the client_config dict
|
|
||||||
self.client_config['client_id'] = keys['client_id']
|
|
||||||
self.client_config['client_secret'] = keys['client_secret']
|
|
||||||
|
|
||||||
if self.flow is None:
|
|
||||||
self.GetFlow()
|
|
||||||
|
|
||||||
return self.flow.step1_get_authorize_url()
|
|
||||||
|
|
||||||
def Auth(self, code):
|
|
||||||
"""Authenticate, authorize, and build service.
|
|
||||||
|
|
||||||
:param code: Code for authentication.
|
|
||||||
:type code: str.
|
|
||||||
:raises: AuthenticationError
|
|
||||||
"""
|
|
||||||
self.Authenticate(code)
|
|
||||||
self.Authorize()
|
|
||||||
|
|
||||||
def Authenticate(self, code):
|
|
||||||
"""Authenticates given authentication code back from user.
|
|
||||||
|
|
||||||
:param code: Code for authentication.
|
|
||||||
:type code: str.
|
|
||||||
:raises: AuthenticationError
|
|
||||||
"""
|
|
||||||
if self.flow is None:
|
|
||||||
self.GetFlow()
|
|
||||||
try:
|
|
||||||
self.credentials = self.flow.step2_exchange(code)
|
|
||||||
except FlowExchangeError as e:
|
|
||||||
raise AuthenticationError('OAuth2 code exchange failed: %s' % e)
|
|
||||||
|
|
||||||
def Authorize(self):
|
|
||||||
"""Authorizes and builds service.
|
|
||||||
|
|
||||||
:raises: AuthenticationError
|
|
||||||
"""
|
|
||||||
if self.http is None:
|
|
||||||
self.http = httplib2.Http()
|
|
||||||
if self.access_token_expired:
|
|
||||||
raise AuthenticationError('No valid credentials provided to authorize')
|
|
||||||
self.http = self.credentials.authorize(self.http)
|
|
||||||
self.service = build('drive', 'v2', http=self.http)
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from .apiattr import ApiAttributeMixin
|
|
||||||
from .files import GoogleDriveFile
|
|
||||||
from .files import GoogleDriveFileList
|
|
||||||
|
|
||||||
|
|
||||||
class GoogleDrive(ApiAttributeMixin, object):
|
|
||||||
"""Main Google Drive class."""
|
|
||||||
|
|
||||||
def __init__(self, auth=None):
|
|
||||||
"""Create an instance of GoogleDrive.
|
|
||||||
|
|
||||||
:param auth: authorized GoogleAuth instance.
|
|
||||||
:type auth: pydrive.auth.GoogleAuth.
|
|
||||||
"""
|
|
||||||
ApiAttributeMixin.__init__(self)
|
|
||||||
self.auth = auth
|
|
||||||
|
|
||||||
def CreateFile(self, metadata=None):
|
|
||||||
"""Create an instance of GoogleDriveFile with auth of this instance.
|
|
||||||
|
|
||||||
This method would not upload a file to GoogleDrive.
|
|
||||||
|
|
||||||
:param metadata: file resource to initialize GoogleDriveFile with.
|
|
||||||
:type metadata: dict.
|
|
||||||
:returns: pydrive.files.GoogleDriveFile -- initialized with auth of this instance.
|
|
||||||
"""
|
|
||||||
return GoogleDriveFile(auth=self.auth, metadata=metadata)
|
|
||||||
|
|
||||||
def ListFile(self, param=None):
|
|
||||||
"""Create an instance of GoogleDriveFileList with auth of this instance.
|
|
||||||
|
|
||||||
This method will not fetch from Files.List().
|
|
||||||
|
|
||||||
:param param: parameter to be sent to Files.List().
|
|
||||||
:type param: dict.
|
|
||||||
:returns: pydrive.files.GoogleDriveFileList -- initialized with auth of this instance.
|
|
||||||
"""
|
|
||||||
return GoogleDriveFileList(auth=self.auth, param=param)
|
|
||||||
@@ -1,322 +0,0 @@
|
|||||||
import io
|
|
||||||
import mimetypes
|
|
||||||
|
|
||||||
from googleapiclient import errors
|
|
||||||
from googleapiclient.http import MediaIoBaseUpload
|
|
||||||
from functools import wraps
|
|
||||||
|
|
||||||
from .apiattr import ApiAttribute
|
|
||||||
from .apiattr import ApiAttributeMixin
|
|
||||||
from .apiattr import ApiResource
|
|
||||||
from .apiattr import ApiResourceList
|
|
||||||
from .auth import LoadAuth
|
|
||||||
|
|
||||||
|
|
||||||
class FileNotUploadedError(RuntimeError):
|
|
||||||
"""Error trying to access metadata of file that is not uploaded."""
|
|
||||||
|
|
||||||
|
|
||||||
class ApiRequestError(IOError):
|
|
||||||
"""Error while making any API requests."""
|
|
||||||
|
|
||||||
|
|
||||||
class FileNotDownloadableError(RuntimeError):
|
|
||||||
"""Error trying to download file that is not downloadable."""
|
|
||||||
|
|
||||||
|
|
||||||
def LoadMetadata(decoratee):
|
|
||||||
"""Decorator to check if the file has metadata and fetches it if not.
|
|
||||||
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError
|
|
||||||
"""
|
|
||||||
@wraps(decoratee)
|
|
||||||
def _decorated(self, *args, **kwargs):
|
|
||||||
if not self.uploaded:
|
|
||||||
self.FetchMetadata()
|
|
||||||
return decoratee(self, *args, **kwargs)
|
|
||||||
return _decorated
|
|
||||||
|
|
||||||
|
|
||||||
class GoogleDriveFileList(ApiResourceList):
|
|
||||||
"""Google Drive FileList instance.
|
|
||||||
|
|
||||||
Equivalent to Files.list() in Drive APIs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, auth=None, param=None):
|
|
||||||
"""Create an instance of GoogleDriveFileList."""
|
|
||||||
super(GoogleDriveFileList, self).__init__(auth=auth, metadata=param)
|
|
||||||
|
|
||||||
@LoadAuth
|
|
||||||
def _GetList(self):
|
|
||||||
"""Overwritten method which actually makes API call to list files.
|
|
||||||
|
|
||||||
:returns: list -- list of pydrive.files.GoogleDriveFile.
|
|
||||||
"""
|
|
||||||
self.metadata = self.auth.service.files().list(**dict(self)).execute()
|
|
||||||
result = []
|
|
||||||
for file_metadata in self.metadata['items']:
|
|
||||||
tmp_file = GoogleDriveFile(
|
|
||||||
auth=self.auth,
|
|
||||||
metadata=file_metadata,
|
|
||||||
uploaded=True)
|
|
||||||
result.append(tmp_file)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class GoogleDriveFile(ApiAttributeMixin, ApiResource):
|
|
||||||
"""Google Drive File instance.
|
|
||||||
|
|
||||||
Inherits ApiResource which inherits dict.
|
|
||||||
Can access and modify metadata like dictionary.
|
|
||||||
"""
|
|
||||||
content = ApiAttribute('content')
|
|
||||||
uploaded = ApiAttribute('uploaded')
|
|
||||||
metadata = ApiAttribute('metadata')
|
|
||||||
|
|
||||||
def __init__(self, auth=None, metadata=None, uploaded=False):
|
|
||||||
"""Create an instance of GoogleDriveFile.
|
|
||||||
|
|
||||||
:param auth: authorized GoogleAuth instance.
|
|
||||||
:type auth: pydrive.auth.GoogleAuth
|
|
||||||
:param metadata: file resource to initialize GoogleDirveFile with.
|
|
||||||
:type metadata: dict.
|
|
||||||
:param uploaded: True if this file is confirmed to be uploaded.
|
|
||||||
:type uploaded: bool.
|
|
||||||
"""
|
|
||||||
ApiAttributeMixin.__init__(self)
|
|
||||||
ApiResource.__init__(self)
|
|
||||||
self.metadata = {}
|
|
||||||
self.dirty = {'content': False}
|
|
||||||
self.auth = auth
|
|
||||||
self.uploaded = uploaded
|
|
||||||
if uploaded:
|
|
||||||
self.UpdateMetadata(metadata)
|
|
||||||
elif metadata:
|
|
||||||
self.update(metadata)
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
"""Overwrites manner of accessing Files resource.
|
|
||||||
|
|
||||||
If this file instance is not uploaded and id is specified,
|
|
||||||
it will try to look for metadata with Files.get().
|
|
||||||
|
|
||||||
:param key: key of dictionary query.
|
|
||||||
:type key: str.
|
|
||||||
:returns: value of Files resource
|
|
||||||
:raises: KeyError, FileNotUploadedError
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return dict.__getitem__(self, key)
|
|
||||||
except KeyError as e:
|
|
||||||
if self.uploaded:
|
|
||||||
raise KeyError(e)
|
|
||||||
if self.get('id'):
|
|
||||||
self.FetchMetadata()
|
|
||||||
return dict.__getitem__(self, key)
|
|
||||||
else:
|
|
||||||
raise FileNotUploadedError()
|
|
||||||
|
|
||||||
def SetContentString(self, content):
|
|
||||||
"""Set content of this file to be a string.
|
|
||||||
|
|
||||||
Creates io.BytesIO instance of utf-8 encoded string.
|
|
||||||
Sets mimeType to be 'text/plain' if not specified.
|
|
||||||
|
|
||||||
:param content: content of the file in string.
|
|
||||||
:type content: str.
|
|
||||||
"""
|
|
||||||
self.content = io.BytesIO(content.encode('utf-8'))
|
|
||||||
if self.get('mimeType') is None:
|
|
||||||
self['mimeType'] = 'text/plain'
|
|
||||||
|
|
||||||
def SetContentFile(self, filename):
|
|
||||||
"""Set content of this file from a file.
|
|
||||||
|
|
||||||
Opens the file specified by this method.
|
|
||||||
Will be read, uploaded, and closed by Upload() method.
|
|
||||||
Sets metadata 'title' and 'mimeType' automatically if not specified.
|
|
||||||
|
|
||||||
:param filename: name of the file to be uploaded.
|
|
||||||
:type filename: str.
|
|
||||||
"""
|
|
||||||
self.content = open(filename, 'rb')
|
|
||||||
|
|
||||||
if self.get('title') is None:
|
|
||||||
self['title'] = filename
|
|
||||||
if self.get('mimeType') is None:
|
|
||||||
self['mimeType'] = mimetypes.guess_type(filename)[0]
|
|
||||||
|
|
||||||
def GetContentString(self):
|
|
||||||
"""Get content of this file as a string.
|
|
||||||
|
|
||||||
:returns: str -- utf-8 decoded content of the file
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
|
|
||||||
"""
|
|
||||||
if self.content is None or type(self.content) is not io.BytesIO:
|
|
||||||
self.FetchContent()
|
|
||||||
return self.content.getvalue().decode('utf-8')
|
|
||||||
|
|
||||||
def GetContentFile(self, filename, mimetype=None):
|
|
||||||
"""Save content of this file as a local file.
|
|
||||||
|
|
||||||
:param filename: name of the file to write to.
|
|
||||||
:type filename: str.
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
|
|
||||||
"""
|
|
||||||
if self.content is None or type(self.content) is not io.BytesIO:
|
|
||||||
self.FetchContent(mimetype)
|
|
||||||
f = open(filename, 'wb')
|
|
||||||
f.write(self.content.getvalue())
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
@LoadAuth
|
|
||||||
def FetchMetadata(self):
|
|
||||||
"""Download file's metadata from id using Files.get().
|
|
||||||
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError
|
|
||||||
"""
|
|
||||||
file_id = self.metadata.get('id') or self.get('id')
|
|
||||||
if file_id:
|
|
||||||
try:
|
|
||||||
metadata = self.auth.service.files().get(fileId=file_id).execute()
|
|
||||||
except errors.HttpError as error:
|
|
||||||
raise ApiRequestError(error)
|
|
||||||
else:
|
|
||||||
self.uploaded = True
|
|
||||||
self.UpdateMetadata(metadata)
|
|
||||||
else:
|
|
||||||
raise FileNotUploadedError()
|
|
||||||
|
|
||||||
@LoadMetadata
|
|
||||||
def FetchContent(self, mimetype=None):
|
|
||||||
"""Download file's content from download_url.
|
|
||||||
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
|
|
||||||
"""
|
|
||||||
download_url = self.metadata.get('downloadUrl')
|
|
||||||
if download_url:
|
|
||||||
self.content = io.BytesIO(self._DownloadFromUrl(download_url))
|
|
||||||
self.dirty['content'] = False
|
|
||||||
return
|
|
||||||
|
|
||||||
export_links = self.metadata.get('exportLinks')
|
|
||||||
if export_links and export_links.get(mimetype):
|
|
||||||
self.content = io.BytesIO(
|
|
||||||
self._DownloadFromUrl(export_links.get(mimetype)))
|
|
||||||
self.dirty['content'] = False
|
|
||||||
return
|
|
||||||
|
|
||||||
raise FileNotDownloadableError(
|
|
||||||
'No downloadLink/exportLinks for mimetype found in metadata')
|
|
||||||
|
|
||||||
def Upload(self, param=None):
|
|
||||||
"""Upload/update file by choosing the most efficient method.
|
|
||||||
|
|
||||||
:param param: additional parameter to upload file.
|
|
||||||
:type param: dict.
|
|
||||||
:raises: ApiRequestError
|
|
||||||
"""
|
|
||||||
if self.uploaded or self.get('id') is not None:
|
|
||||||
if self.dirty['content']:
|
|
||||||
self._FilesUpdate(param=param)
|
|
||||||
else:
|
|
||||||
self._FilesPatch(param=param)
|
|
||||||
else:
|
|
||||||
self._FilesInsert(param=param)
|
|
||||||
|
|
||||||
def Delete(self):
|
|
||||||
if self.get('id') is not None:
|
|
||||||
self.auth.service.files().delete(fileId=self.get('id')).execute()
|
|
||||||
|
|
||||||
@LoadAuth
|
|
||||||
def _FilesInsert(self, param=None):
|
|
||||||
"""Upload a new file using Files.insert().
|
|
||||||
|
|
||||||
:param param: additional parameter to upload file.
|
|
||||||
:type param: dict.
|
|
||||||
:raises: ApiRequestError
|
|
||||||
"""
|
|
||||||
if param is None:
|
|
||||||
param = {}
|
|
||||||
param['body'] = self.GetChanges()
|
|
||||||
try:
|
|
||||||
if self.dirty['content']:
|
|
||||||
param['media_body'] = self._BuildMediaBody()
|
|
||||||
metadata = self.auth.service.files().insert(**param).execute()
|
|
||||||
except errors.HttpError as error:
|
|
||||||
raise ApiRequestError(error)
|
|
||||||
else:
|
|
||||||
self.uploaded = True
|
|
||||||
self.dirty['content'] = False
|
|
||||||
self.UpdateMetadata(metadata)
|
|
||||||
|
|
||||||
@LoadAuth
|
|
||||||
@LoadMetadata
|
|
||||||
def _FilesUpdate(self, param=None):
|
|
||||||
"""Update metadata and/or content using Files.Update().
|
|
||||||
|
|
||||||
:param param: additional parameter to upload file.
|
|
||||||
:type param: dict.
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError
|
|
||||||
"""
|
|
||||||
if param is None:
|
|
||||||
param = {}
|
|
||||||
param['body'] = self.GetChanges()
|
|
||||||
param['fileId'] = self.metadata.get('id')
|
|
||||||
try:
|
|
||||||
if self.dirty['content']:
|
|
||||||
param['media_body'] = self._BuildMediaBody()
|
|
||||||
metadata = self.auth.service.files().update(**param).execute()
|
|
||||||
except errors.HttpError as error:
|
|
||||||
raise ApiRequestError(error)
|
|
||||||
else:
|
|
||||||
self.uploaded = True
|
|
||||||
self.dirty['content'] = False
|
|
||||||
self.UpdateMetadata(metadata)
|
|
||||||
|
|
||||||
@LoadAuth
|
|
||||||
@LoadMetadata
|
|
||||||
def _FilesPatch(self, param=None):
|
|
||||||
"""Update metadata using Files.Patch().
|
|
||||||
|
|
||||||
:param param: additional parameter to upload file.
|
|
||||||
:type param: dict.
|
|
||||||
:raises: ApiRequestError, FileNotUploadedError
|
|
||||||
"""
|
|
||||||
if param is None:
|
|
||||||
param = {}
|
|
||||||
param['body'] = self.GetChanges()
|
|
||||||
param['fileId'] = self.metadata.get('id')
|
|
||||||
try:
|
|
||||||
metadata = self.auth.service.files().patch(**param).execute()
|
|
||||||
except errors.HttpError as error:
|
|
||||||
raise ApiRequestError(error)
|
|
||||||
else:
|
|
||||||
self.UpdateMetadata(metadata)
|
|
||||||
|
|
||||||
def _BuildMediaBody(self):
|
|
||||||
"""Build MediaIoBaseUpload to get prepared to upload content of the file.
|
|
||||||
|
|
||||||
Sets mimeType as 'application/octet-stream' if not specified.
|
|
||||||
|
|
||||||
:returns: MediaIoBaseUpload -- instance that will be used to upload content.
|
|
||||||
"""
|
|
||||||
if self.get('mimeType') is None:
|
|
||||||
self['mimeType'] = 'application/octet-stream'
|
|
||||||
|
|
||||||
return MediaIoBaseUpload(self.content, self['mimeType'])
|
|
||||||
|
|
||||||
@LoadAuth
|
|
||||||
def _DownloadFromUrl(self, url):
|
|
||||||
"""Download file from url using provided credential.
|
|
||||||
|
|
||||||
:param url: link of the file to download.
|
|
||||||
:type url: str.
|
|
||||||
:returns: str -- content of downloaded file in string.
|
|
||||||
:raises: ApiRequestError
|
|
||||||
"""
|
|
||||||
resp, content = self.auth.service._http.request(url)
|
|
||||||
if resp.status != 200:
|
|
||||||
raise ApiRequestError('Cannot download file: %s' % resp)
|
|
||||||
return content
|
|
||||||
@@ -1,192 +0,0 @@
|
|||||||
from yaml import load
|
|
||||||
from yaml import YAMLError
|
|
||||||
try:
|
|
||||||
from yaml import CLoader as Loader
|
|
||||||
except ImportError:
|
|
||||||
from yaml import Loader
|
|
||||||
|
|
||||||
SETTINGS_FILE = 'settings.yaml'
|
|
||||||
SETTINGS_STRUCT = {
|
|
||||||
'client_config_backend': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default': 'file',
|
|
||||||
'dependency': [
|
|
||||||
{
|
|
||||||
'value': 'file',
|
|
||||||
'attribute': ['client_config_file']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'value': 'settings',
|
|
||||||
'attribute': ['client_config']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
'save_credentials': {
|
|
||||||
'type': bool,
|
|
||||||
'required': True,
|
|
||||||
'default': False,
|
|
||||||
'dependency': [
|
|
||||||
{
|
|
||||||
'value': True,
|
|
||||||
'attribute': ['save_credentials_backend']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
'get_refresh_token': {
|
|
||||||
'type': bool,
|
|
||||||
'required': False,
|
|
||||||
'default': False
|
|
||||||
},
|
|
||||||
'client_config_file': {
|
|
||||||
'type': str,
|
|
||||||
'required': False,
|
|
||||||
'default': 'client_secrets.json'
|
|
||||||
},
|
|
||||||
'save_credentials_backend': {
|
|
||||||
'type': str,
|
|
||||||
'required': False,
|
|
||||||
'dependency': [
|
|
||||||
{
|
|
||||||
'value': 'file',
|
|
||||||
'attribute': ['save_credentials_file']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
'client_config': {
|
|
||||||
'type': dict,
|
|
||||||
'required': False,
|
|
||||||
'struct': {
|
|
||||||
'client_id': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default':'blank'
|
|
||||||
},
|
|
||||||
'client_secret': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default':'blank'
|
|
||||||
},
|
|
||||||
'auth_uri': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default': 'https://accounts.google.com/o/oauth2/auth'
|
|
||||||
},
|
|
||||||
'token_uri': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default': 'https://accounts.google.com/o/oauth2/token'
|
|
||||||
},
|
|
||||||
'redirect_uri': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default': 'urn:ietf:wg:oauth:2.0:oob'
|
|
||||||
},
|
|
||||||
'revoke_uri': {
|
|
||||||
'type': str,
|
|
||||||
'required': True,
|
|
||||||
'default': None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'oauth_scope': {
|
|
||||||
'type': list,
|
|
||||||
'required': True,
|
|
||||||
'struct': str,
|
|
||||||
'default': ['https://www.googleapis.com/auth/drive']
|
|
||||||
},
|
|
||||||
'save_credentials_file': {
|
|
||||||
'type': str,
|
|
||||||
'required': False,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsError(IOError):
|
|
||||||
"""Error while loading/saving settings"""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidConfigError(IOError):
|
|
||||||
"""Error trying to read client configuration."""
|
|
||||||
|
|
||||||
|
|
||||||
def LoadSettingsFile(filename=SETTINGS_FILE):
|
|
||||||
"""Loads settings file in yaml format given file name.
|
|
||||||
|
|
||||||
:param filename: path for settings file. 'settings.yaml' by default.
|
|
||||||
:type filename: str.
|
|
||||||
:raises: SettingsError
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
stream = file(filename, 'r')
|
|
||||||
data = load(stream, Loader=Loader)
|
|
||||||
except (YAMLError, IOError) as e:
|
|
||||||
raise SettingsError(e)
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def ValidateSettings(data):
|
|
||||||
"""Validates if current settings is valid.
|
|
||||||
|
|
||||||
:param data: dictionary containing all settings.
|
|
||||||
:type data: dict.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
_ValidateSettingsStruct(data, SETTINGS_STRUCT)
|
|
||||||
|
|
||||||
|
|
||||||
def _ValidateSettingsStruct(data, struct):
|
|
||||||
"""Validates if provided data fits provided structure.
|
|
||||||
|
|
||||||
:param data: dictionary containing settings.
|
|
||||||
:type data: dict.
|
|
||||||
:param struct: dictionary containing structure information of settings.
|
|
||||||
:type struct: dict.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
# Validate required elements of the setting.
|
|
||||||
for key in struct:
|
|
||||||
if struct[key]['required']:
|
|
||||||
_ValidateSettingsElement(data, struct, key)
|
|
||||||
|
|
||||||
|
|
||||||
def _ValidateSettingsElement(data, struct, key):
|
|
||||||
"""Validates if provided element of settings data fits provided structure.
|
|
||||||
|
|
||||||
:param data: dictionary containing settings.
|
|
||||||
:type data: dict.
|
|
||||||
:param struct: dictionary containing structure information of settings.
|
|
||||||
:type struct: dict.
|
|
||||||
:param key: key of the settings element to validate.
|
|
||||||
:type key: str.
|
|
||||||
:raises: InvalidConfigError
|
|
||||||
"""
|
|
||||||
# Check if data exists. If not, check if default value exists.
|
|
||||||
value = data.get(key)
|
|
||||||
data_type = struct[key]['type']
|
|
||||||
if value is None:
|
|
||||||
try:
|
|
||||||
default = struct[key]['default']
|
|
||||||
except KeyError:
|
|
||||||
raise InvalidConfigError('Missing required setting %s' % key)
|
|
||||||
else:
|
|
||||||
data[key] = default
|
|
||||||
# If data exists, Check type of the data
|
|
||||||
elif type(value) is not data_type:
|
|
||||||
raise InvalidConfigError('Setting %s should be type %s' % (key, data_type))
|
|
||||||
# If type of this data is dict, check if structure of the data is valid.
|
|
||||||
if data_type is dict:
|
|
||||||
_ValidateSettingsStruct(data[key], struct[key]['struct'])
|
|
||||||
# If type of this data is list, check if all values in the list is valid.
|
|
||||||
elif data_type is list:
|
|
||||||
for element in data[key]:
|
|
||||||
if type(element) is not struct[key]['struct']:
|
|
||||||
raise InvalidConfigError('Setting %s should be list of %s' %
|
|
||||||
(key, struct[key]['struct']))
|
|
||||||
# Check dependency of this attribute.
|
|
||||||
dependencies = struct[key].get('dependency')
|
|
||||||
if dependencies:
|
|
||||||
for dependency in dependencies:
|
|
||||||
if value == dependency['value']:
|
|
||||||
for reqkey in dependency['attribute']:
|
|
||||||
_ValidateSettingsElement(data, struct, reqkey)
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
client_config_backend: 'settings'
|
|
||||||
client_config:
|
|
||||||
client_id: "blank"
|
|
||||||
client_secret: "blank"
|
|
||||||
get_refresh_token: True
|
|
||||||
oauth_scope:
|
|
||||||
- "https://www.googleapis.com/auth/drive.file"
|
|
||||||
@@ -1,430 +0,0 @@
|
|||||||
"""
|
|
||||||
Copyright (c) 2003-2010 Gustavo Niemeyer <gustavo@niemeyer.net>
|
|
||||||
|
|
||||||
This module offers extensions to the standard python 2.3+
|
|
||||||
datetime module.
|
|
||||||
"""
|
|
||||||
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
|
|
||||||
__license__ = "PSF License"
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import calendar
|
|
||||||
|
|
||||||
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
|
||||||
|
|
||||||
class weekday(object):
|
|
||||||
__slots__ = ["weekday", "n"]
|
|
||||||
|
|
||||||
def __init__(self, weekday, n=None):
|
|
||||||
self.weekday = weekday
|
|
||||||
self.n = n
|
|
||||||
|
|
||||||
def __call__(self, n):
|
|
||||||
if n == self.n:
|
|
||||||
return self
|
|
||||||
else:
|
|
||||||
return self.__class__(self.weekday, n)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
try:
|
|
||||||
if self.weekday != other.weekday or self.n != other.n:
|
|
||||||
return False
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
|
||||||
if not self.n:
|
|
||||||
return s
|
|
||||||
else:
|
|
||||||
return "%s(%+d)" % (s, self.n)
|
|
||||||
|
|
||||||
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
|
|
||||||
|
|
||||||
class relativedelta:
|
|
||||||
"""
|
|
||||||
The relativedelta type is based on the specification of the excelent
|
|
||||||
work done by M.-A. Lemburg in his mx.DateTime extension. However,
|
|
||||||
notice that this type does *NOT* implement the same algorithm as
|
|
||||||
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
|
||||||
|
|
||||||
There's two different ways to build a relativedelta instance. The
|
|
||||||
first one is passing it two date/datetime classes:
|
|
||||||
|
|
||||||
relativedelta(datetime1, datetime2)
|
|
||||||
|
|
||||||
And the other way is to use the following keyword arguments:
|
|
||||||
|
|
||||||
year, month, day, hour, minute, second, microsecond:
|
|
||||||
Absolute information.
|
|
||||||
|
|
||||||
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
|
||||||
Relative information, may be negative.
|
|
||||||
|
|
||||||
weekday:
|
|
||||||
One of the weekday instances (MO, TU, etc). These instances may
|
|
||||||
receive a parameter N, specifying the Nth weekday, which could
|
|
||||||
be positive or negative (like MO(+1) or MO(-2). Not specifying
|
|
||||||
it is the same as specifying +1. You can also use an integer,
|
|
||||||
where 0=MO.
|
|
||||||
|
|
||||||
leapdays:
|
|
||||||
Will add given days to the date found, if year is a leap
|
|
||||||
year, and the date found is post 28 of february.
|
|
||||||
|
|
||||||
yearday, nlyearday:
|
|
||||||
Set the yearday or the non-leap year day (jump leap days).
|
|
||||||
These are converted to day/month/leapdays information.
|
|
||||||
|
|
||||||
Here is the behavior of operations with relativedelta:
|
|
||||||
|
|
||||||
1) Calculate the absolute year, using the 'year' argument, or the
|
|
||||||
original datetime year, if the argument is not present.
|
|
||||||
|
|
||||||
2) Add the relative 'years' argument to the absolute year.
|
|
||||||
|
|
||||||
3) Do steps 1 and 2 for month/months.
|
|
||||||
|
|
||||||
4) Calculate the absolute day, using the 'day' argument, or the
|
|
||||||
original datetime day, if the argument is not present. Then,
|
|
||||||
subtract from the day until it fits in the year and month
|
|
||||||
found after their operations.
|
|
||||||
|
|
||||||
5) Add the relative 'days' argument to the absolute day. Notice
|
|
||||||
that the 'weeks' argument is multiplied by 7 and added to
|
|
||||||
'days'.
|
|
||||||
|
|
||||||
6) Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds,
|
|
||||||
microsecond/microseconds.
|
|
||||||
|
|
||||||
7) If the 'weekday' argument is present, calculate the weekday,
|
|
||||||
with the given (wday, nth) tuple. wday is the index of the
|
|
||||||
weekday (0-6, 0=Mon), and nth is the number of weeks to add
|
|
||||||
forward or backward, depending on its signal. Notice that if
|
|
||||||
the calculated date is already Monday, for example, using
|
|
||||||
(0, 1) or (0, -1) won't change the day.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, dt1=None, dt2=None,
|
|
||||||
years=0, months=0, days=0, leapdays=0, weeks=0,
|
|
||||||
hours=0, minutes=0, seconds=0, microseconds=0,
|
|
||||||
year=None, month=None, day=None, weekday=None,
|
|
||||||
yearday=None, nlyearday=None,
|
|
||||||
hour=None, minute=None, second=None, microsecond=None):
|
|
||||||
if dt1 and dt2:
|
|
||||||
if not isinstance(dt1, datetime.date) or \
|
|
||||||
not isinstance(dt2, datetime.date):
|
|
||||||
raise TypeError("relativedelta only diffs datetime/date")
|
|
||||||
if type(dt1) is not type(dt2):
|
|
||||||
if not isinstance(dt1, datetime.datetime):
|
|
||||||
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
|
||||||
elif not isinstance(dt2, datetime.datetime):
|
|
||||||
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
|
||||||
self.years = 0
|
|
||||||
self.months = 0
|
|
||||||
self.days = 0
|
|
||||||
self.leapdays = 0
|
|
||||||
self.hours = 0
|
|
||||||
self.minutes = 0
|
|
||||||
self.seconds = 0
|
|
||||||
self.microseconds = 0
|
|
||||||
self.year = None
|
|
||||||
self.month = None
|
|
||||||
self.day = None
|
|
||||||
self.weekday = None
|
|
||||||
self.hour = None
|
|
||||||
self.minute = None
|
|
||||||
self.second = None
|
|
||||||
self.microsecond = None
|
|
||||||
self._has_time = 0
|
|
||||||
|
|
||||||
months = (dt1.year*12+dt1.month)-(dt2.year*12+dt2.month)
|
|
||||||
self._set_months(months)
|
|
||||||
dtm = self.__radd__(dt2)
|
|
||||||
if dt1 < dt2:
|
|
||||||
while dt1 > dtm:
|
|
||||||
months += 1
|
|
||||||
self._set_months(months)
|
|
||||||
dtm = self.__radd__(dt2)
|
|
||||||
else:
|
|
||||||
while dt1 < dtm:
|
|
||||||
months -= 1
|
|
||||||
self._set_months(months)
|
|
||||||
dtm = self.__radd__(dt2)
|
|
||||||
delta = dt1 - dtm
|
|
||||||
self.seconds = delta.seconds+delta.days*86400
|
|
||||||
self.microseconds = delta.microseconds
|
|
||||||
else:
|
|
||||||
self.years = years
|
|
||||||
self.months = months
|
|
||||||
self.days = days+weeks*7
|
|
||||||
self.leapdays = leapdays
|
|
||||||
self.hours = hours
|
|
||||||
self.minutes = minutes
|
|
||||||
self.seconds = seconds
|
|
||||||
self.microseconds = microseconds
|
|
||||||
self.year = year
|
|
||||||
self.month = month
|
|
||||||
self.day = day
|
|
||||||
self.hour = hour
|
|
||||||
self.minute = minute
|
|
||||||
self.second = second
|
|
||||||
self.microsecond = microsecond
|
|
||||||
|
|
||||||
if type(weekday) is int:
|
|
||||||
self.weekday = weekdays[weekday]
|
|
||||||
else:
|
|
||||||
self.weekday = weekday
|
|
||||||
|
|
||||||
yday = 0
|
|
||||||
if nlyearday:
|
|
||||||
yday = nlyearday
|
|
||||||
elif yearday:
|
|
||||||
yday = yearday
|
|
||||||
if yearday > 59:
|
|
||||||
self.leapdays = -1
|
|
||||||
if yday:
|
|
||||||
ydayidx = [31,59,90,120,151,181,212,243,273,304,334,366]
|
|
||||||
for idx, ydays in enumerate(ydayidx):
|
|
||||||
if yday <= ydays:
|
|
||||||
self.month = idx+1
|
|
||||||
if idx == 0:
|
|
||||||
self.day = yday
|
|
||||||
else:
|
|
||||||
self.day = yday-ydayidx[idx-1]
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise ValueError("invalid year day (%d)" % yday)
|
|
||||||
|
|
||||||
self._fix()
|
|
||||||
|
|
||||||
def _fix(self):
|
|
||||||
if abs(self.microseconds) > 999999:
|
|
||||||
s = self.microseconds//abs(self.microseconds)
|
|
||||||
div, mod = divmod(self.microseconds*s, 1000000)
|
|
||||||
self.microseconds = mod*s
|
|
||||||
self.seconds += div*s
|
|
||||||
if abs(self.seconds) > 59:
|
|
||||||
s = self.seconds//abs(self.seconds)
|
|
||||||
div, mod = divmod(self.seconds*s, 60)
|
|
||||||
self.seconds = mod*s
|
|
||||||
self.minutes += div*s
|
|
||||||
if abs(self.minutes) > 59:
|
|
||||||
s = self.minutes//abs(self.minutes)
|
|
||||||
div, mod = divmod(self.minutes*s, 60)
|
|
||||||
self.minutes = mod*s
|
|
||||||
self.hours += div*s
|
|
||||||
if abs(self.hours) > 23:
|
|
||||||
s = self.hours//abs(self.hours)
|
|
||||||
div, mod = divmod(self.hours*s, 24)
|
|
||||||
self.hours = mod*s
|
|
||||||
self.days += div*s
|
|
||||||
if abs(self.months) > 11:
|
|
||||||
s = self.months//abs(self.months)
|
|
||||||
div, mod = divmod(self.months*s, 12)
|
|
||||||
self.months = mod*s
|
|
||||||
self.years += div*s
|
|
||||||
if (self.hours or self.minutes or self.seconds or self.microseconds or
|
|
||||||
self.hour is not None or self.minute is not None or
|
|
||||||
self.second is not None or self.microsecond is not None):
|
|
||||||
self._has_time = 1
|
|
||||||
else:
|
|
||||||
self._has_time = 0
|
|
||||||
|
|
||||||
def _set_months(self, months):
|
|
||||||
self.months = months
|
|
||||||
if abs(self.months) > 11:
|
|
||||||
s = self.months//abs(self.months)
|
|
||||||
div, mod = divmod(self.months*s, 12)
|
|
||||||
self.months = mod*s
|
|
||||||
self.years = div*s
|
|
||||||
else:
|
|
||||||
self.years = 0
|
|
||||||
|
|
||||||
def __radd__(self, other):
|
|
||||||
if not isinstance(other, datetime.date):
|
|
||||||
raise TypeError("unsupported type for add operation")
|
|
||||||
elif self._has_time and not isinstance(other, datetime.datetime):
|
|
||||||
other = datetime.datetime.fromordinal(other.toordinal())
|
|
||||||
year = (self.year or other.year)+self.years
|
|
||||||
month = self.month or other.month
|
|
||||||
if self.months:
|
|
||||||
assert 1 <= abs(self.months) <= 12
|
|
||||||
month += self.months
|
|
||||||
if month > 12:
|
|
||||||
year += 1
|
|
||||||
month -= 12
|
|
||||||
elif month < 1:
|
|
||||||
year -= 1
|
|
||||||
month += 12
|
|
||||||
day = min(calendar.monthrange(year, month)[1],
|
|
||||||
self.day or other.day)
|
|
||||||
repl = {"year": year, "month": month, "day": day}
|
|
||||||
for attr in ["hour", "minute", "second", "microsecond"]:
|
|
||||||
value = getattr(self, attr)
|
|
||||||
if value is not None:
|
|
||||||
repl[attr] = value
|
|
||||||
days = self.days
|
|
||||||
if self.leapdays and month > 2 and calendar.isleap(year):
|
|
||||||
days += self.leapdays
|
|
||||||
ret = (other.replace(**repl)
|
|
||||||
+ datetime.timedelta(days=days,
|
|
||||||
hours=self.hours,
|
|
||||||
minutes=self.minutes,
|
|
||||||
seconds=self.seconds,
|
|
||||||
microseconds=self.microseconds))
|
|
||||||
if self.weekday:
|
|
||||||
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
|
||||||
jumpdays = (abs(nth)-1)*7
|
|
||||||
if nth > 0:
|
|
||||||
jumpdays += (7-ret.weekday()+weekday)%7
|
|
||||||
else:
|
|
||||||
jumpdays += (ret.weekday()-weekday)%7
|
|
||||||
jumpdays *= -1
|
|
||||||
ret += datetime.timedelta(days=jumpdays)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def __rsub__(self, other):
|
|
||||||
return self.__neg__().__radd__(other)
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
if not isinstance(other, relativedelta):
|
|
||||||
raise TypeError("unsupported type for add operation")
|
|
||||||
return relativedelta(years=other.years+self.years,
|
|
||||||
months=other.months+self.months,
|
|
||||||
days=other.days+self.days,
|
|
||||||
hours=other.hours+self.hours,
|
|
||||||
minutes=other.minutes+self.minutes,
|
|
||||||
seconds=other.seconds+self.seconds,
|
|
||||||
microseconds=other.microseconds+self.microseconds,
|
|
||||||
leapdays=other.leapdays or self.leapdays,
|
|
||||||
year=other.year or self.year,
|
|
||||||
month=other.month or self.month,
|
|
||||||
day=other.day or self.day,
|
|
||||||
weekday=other.weekday or self.weekday,
|
|
||||||
hour=other.hour or self.hour,
|
|
||||||
minute=other.minute or self.minute,
|
|
||||||
second=other.second or self.second,
|
|
||||||
microsecond=other.second or self.microsecond)
|
|
||||||
|
|
||||||
def __sub__(self, other):
|
|
||||||
if not isinstance(other, relativedelta):
|
|
||||||
raise TypeError("unsupported type for sub operation")
|
|
||||||
return relativedelta(years=other.years-self.years,
|
|
||||||
months=other.months-self.months,
|
|
||||||
days=other.days-self.days,
|
|
||||||
hours=other.hours-self.hours,
|
|
||||||
minutes=other.minutes-self.minutes,
|
|
||||||
seconds=other.seconds-self.seconds,
|
|
||||||
microseconds=other.microseconds-self.microseconds,
|
|
||||||
leapdays=other.leapdays or self.leapdays,
|
|
||||||
year=other.year or self.year,
|
|
||||||
month=other.month or self.month,
|
|
||||||
day=other.day or self.day,
|
|
||||||
weekday=other.weekday or self.weekday,
|
|
||||||
hour=other.hour or self.hour,
|
|
||||||
minute=other.minute or self.minute,
|
|
||||||
second=other.second or self.second,
|
|
||||||
microsecond=other.second or self.microsecond)
|
|
||||||
|
|
||||||
def __neg__(self):
|
|
||||||
return relativedelta(years=-self.years,
|
|
||||||
months=-self.months,
|
|
||||||
days=-self.days,
|
|
||||||
hours=-self.hours,
|
|
||||||
minutes=-self.minutes,
|
|
||||||
seconds=-self.seconds,
|
|
||||||
microseconds=-self.microseconds,
|
|
||||||
leapdays=self.leapdays,
|
|
||||||
year=self.year,
|
|
||||||
month=self.month,
|
|
||||||
day=self.day,
|
|
||||||
weekday=self.weekday,
|
|
||||||
hour=self.hour,
|
|
||||||
minute=self.minute,
|
|
||||||
second=self.second,
|
|
||||||
microsecond=self.microsecond)
|
|
||||||
|
|
||||||
def __nonzero__(self):
|
|
||||||
return not (not self.years and
|
|
||||||
not self.months and
|
|
||||||
not self.days and
|
|
||||||
not self.hours and
|
|
||||||
not self.minutes and
|
|
||||||
not self.seconds and
|
|
||||||
not self.microseconds and
|
|
||||||
not self.leapdays and
|
|
||||||
self.year is None and
|
|
||||||
self.month is None and
|
|
||||||
self.day is None and
|
|
||||||
self.weekday is None and
|
|
||||||
self.hour is None and
|
|
||||||
self.minute is None and
|
|
||||||
self.second is None and
|
|
||||||
self.microsecond is None)
|
|
||||||
|
|
||||||
def __mul__(self, other):
|
|
||||||
f = float(other)
|
|
||||||
return relativedelta(years=self.years*f,
|
|
||||||
months=self.months*f,
|
|
||||||
days=self.days*f,
|
|
||||||
hours=self.hours*f,
|
|
||||||
minutes=self.minutes*f,
|
|
||||||
seconds=self.seconds*f,
|
|
||||||
microseconds=self.microseconds*f,
|
|
||||||
leapdays=self.leapdays,
|
|
||||||
year=self.year,
|
|
||||||
month=self.month,
|
|
||||||
day=self.day,
|
|
||||||
weekday=self.weekday,
|
|
||||||
hour=self.hour,
|
|
||||||
minute=self.minute,
|
|
||||||
second=self.second,
|
|
||||||
microsecond=self.microsecond)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, relativedelta):
|
|
||||||
return False
|
|
||||||
if self.weekday or other.weekday:
|
|
||||||
if not self.weekday or not other.weekday:
|
|
||||||
return False
|
|
||||||
if self.weekday.weekday != other.weekday.weekday:
|
|
||||||
return False
|
|
||||||
n1, n2 = self.weekday.n, other.weekday.n
|
|
||||||
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
|
||||||
return False
|
|
||||||
return (self.years == other.years and
|
|
||||||
self.months == other.months and
|
|
||||||
self.days == other.days and
|
|
||||||
self.hours == other.hours and
|
|
||||||
self.minutes == other.minutes and
|
|
||||||
self.seconds == other.seconds and
|
|
||||||
self.leapdays == other.leapdays and
|
|
||||||
self.year == other.year and
|
|
||||||
self.month == other.month and
|
|
||||||
self.day == other.day and
|
|
||||||
self.hour == other.hour and
|
|
||||||
self.minute == other.minute and
|
|
||||||
self.second == other.second and
|
|
||||||
self.microsecond == other.microsecond)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
def __div__(self, other):
|
|
||||||
return self.__mul__(1/float(other))
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
l = []
|
|
||||||
for attr in ["years", "months", "days", "leapdays",
|
|
||||||
"hours", "minutes", "seconds", "microseconds"]:
|
|
||||||
value = getattr(self, attr)
|
|
||||||
if value:
|
|
||||||
l.append("%s=%+d" % (attr, value))
|
|
||||||
for attr in ["year", "month", "day", "weekday",
|
|
||||||
"hour", "minute", "second", "microsecond"]:
|
|
||||||
value = getattr(self, attr)
|
|
||||||
if value is not None:
|
|
||||||
l.append("%s=%s" % (attr, value))
|
|
||||||
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
|
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
import urllib2
|
# this is duplicated in snipppets of code from all over the web, credit to no one
|
||||||
|
# in particular - to all those that have gone before me!
|
||||||
|
from future.moves.urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
#this is duplicated in snipppets of code from all over the web, credit to no one
|
|
||||||
#in particular - to all those that have gone before me!
|
|
||||||
def shorten(aUrl):
|
def shorten(aUrl):
|
||||||
tinyurl = 'http://tinyurl.com/api-create.php?url='
|
tinyurl = 'http://tinyurl.com/api-create.php?url='
|
||||||
req = urllib2.urlopen(tinyurl + aUrl)
|
req = urlopen(tinyurl + aUrl)
|
||||||
data = req.read()
|
data = req.read()
|
||||||
|
|
||||||
#should be a tiny url
|
# should be a tiny url
|
||||||
return str(data)
|
return str(data)
|
||||||
|
|||||||
@@ -2,47 +2,46 @@ import xbmc
|
|||||||
import xbmcgui
|
import xbmcgui
|
||||||
import xbmcaddon
|
import xbmcaddon
|
||||||
|
|
||||||
__addon_id__= 'script.xbmcbackup'
|
__addon_id__ = 'script.xbmcbackup'
|
||||||
__Addon = xbmcaddon.Addon(__addon_id__)
|
__Addon = xbmcaddon.Addon(__addon_id__)
|
||||||
|
|
||||||
|
|
||||||
def data_dir():
|
def data_dir():
|
||||||
return __Addon.getAddonInfo('profile')
|
return __Addon.getAddonInfo('profile')
|
||||||
|
|
||||||
|
|
||||||
def addon_dir():
|
def addon_dir():
|
||||||
return __Addon.getAddonInfo('path')
|
return __Addon.getAddonInfo('path')
|
||||||
|
|
||||||
|
|
||||||
def openSettings():
|
def openSettings():
|
||||||
__Addon.openSettings()
|
__Addon.openSettings()
|
||||||
|
|
||||||
def log(message,loglevel=xbmc.LOGDEBUG):
|
|
||||||
xbmc.log(encode(__addon_id__ + "-" + __Addon.getAddonInfo('version') + ": " + message),level=loglevel)
|
def log(message, loglevel=xbmc.LOGDEBUG):
|
||||||
|
xbmc.log(__addon_id__ + "-" + __Addon.getAddonInfo('version') + ": " + message, level=loglevel)
|
||||||
|
|
||||||
|
|
||||||
def showNotification(message):
|
def showNotification(message):
|
||||||
xbmcgui.Dialog().notification(encode(getString(30010)),encode(message),time=4000,icon=xbmc.translatePath(__Addon.getAddonInfo('path') + "/resources/images/icon.png"))
|
xbmcgui.Dialog().notification(getString(30010), message, time=4000, icon=xbmc.translatePath(__Addon.getAddonInfo('path') + "/resources/images/icon.png"))
|
||||||
|
|
||||||
|
|
||||||
def getSetting(name):
|
def getSetting(name):
|
||||||
return __Addon.getSetting(name)
|
return __Addon.getSetting(name)
|
||||||
|
|
||||||
def setSetting(name,value):
|
|
||||||
__Addon.setSetting(name,value)
|
def setSetting(name, value):
|
||||||
|
__Addon.setSetting(name, value)
|
||||||
|
|
||||||
|
|
||||||
def getString(string_id):
|
def getString(string_id):
|
||||||
return __Addon.getLocalizedString(string_id)
|
return __Addon.getLocalizedString(string_id)
|
||||||
|
|
||||||
def getRegionalTimestamp(date_time,dateformat=['dateshort']):
|
|
||||||
|
def getRegionalTimestamp(date_time, dateformat=['dateshort']):
|
||||||
result = ''
|
result = ''
|
||||||
|
|
||||||
for aFormat in dateformat:
|
for aFormat in dateformat:
|
||||||
result = result + ("%s " % date_time.strftime(xbmc.getRegion(aFormat)))
|
result = result + ("%s " % date_time.strftime(xbmc.getRegion(aFormat)))
|
||||||
|
|
||||||
return result.strip()
|
return result.strip()
|
||||||
|
|
||||||
def encode(string):
|
|
||||||
result = ''
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = string.encode('UTF-8','replace')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
result = 'Unicode Error'
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|||||||
@@ -1,126 +1,131 @@
|
|||||||
import xbmc
|
from __future__ import unicode_literals
|
||||||
import xbmcvfs
|
|
||||||
import xbmcgui
|
|
||||||
import zipfile
|
import zipfile
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import dropbox
|
import xbmc
|
||||||
|
import xbmcvfs
|
||||||
|
import xbmcgui
|
||||||
|
from . import dropbox
|
||||||
from . import utils as utils
|
from . import utils as utils
|
||||||
from dropbox.files import WriteMode,CommitInfo,UploadSessionCursor
|
from .dropbox.files import WriteMode, CommitInfo, UploadSessionCursor
|
||||||
from .authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
from . authorizers import DropboxAuthorizer
|
||||||
|
|
||||||
|
|
||||||
class Vfs:
|
class Vfs:
|
||||||
root_path = None
|
root_path = None
|
||||||
|
|
||||||
def __init__(self,rootString):
|
def __init__(self, rootString):
|
||||||
self.set_root(rootString)
|
self.set_root(rootString)
|
||||||
|
|
||||||
def set_root(self,rootString):
|
def set_root(self, rootString):
|
||||||
old_root = self.root_path
|
old_root = self.root_path
|
||||||
self.root_path = rootString
|
self.root_path = rootString
|
||||||
|
|
||||||
#fix slashes
|
# fix slashes
|
||||||
self.root_path = self.root_path.replace("\\","/")
|
self.root_path = self.root_path.replace("\\", "/")
|
||||||
|
|
||||||
#check if trailing slash is included
|
# check if trailing slash is included
|
||||||
if(self.root_path[-1:] != "/"):
|
if(self.root_path[-1:] != "/"):
|
||||||
self.root_path = self.root_path + "/"
|
self.root_path = self.root_path + "/"
|
||||||
|
|
||||||
#return the old root
|
# return the old root
|
||||||
return old_root
|
return old_root
|
||||||
|
|
||||||
def listdir(self,directory):
|
def listdir(self, directory):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def mkdir(self,directory):
|
def mkdir(self, directory):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def put(self,source,dest):
|
def put(self, source, dest):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def rmdir(self,directory):
|
def rmdir(self, directory):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def rmfile(self,aFile):
|
def rmfile(self, aFile):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def exists(self,aFile):
|
def exists(self, aFile):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def rename(self,aFile,newName):
|
def rename(self, aFile, newName):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class XBMCFileSystem(Vfs):
|
class XBMCFileSystem(Vfs):
|
||||||
|
|
||||||
def listdir(self,directory):
|
def listdir(self, directory):
|
||||||
return xbmcvfs.listdir(directory)
|
return xbmcvfs.listdir(directory)
|
||||||
|
|
||||||
def mkdir(self,directory):
|
def mkdir(self, directory):
|
||||||
return xbmcvfs.mkdir(xbmc.translatePath(directory))
|
return xbmcvfs.mkdir(xbmc.translatePath(directory))
|
||||||
|
|
||||||
def put(self,source,dest):
|
def put(self, source, dest):
|
||||||
return xbmcvfs.copy(xbmc.translatePath(source),xbmc.translatePath(dest))
|
return xbmcvfs.copy(xbmc.translatePath(source), xbmc.translatePath(dest))
|
||||||
|
|
||||||
def rmdir(self,directory):
|
|
||||||
return xbmcvfs.rmdir(directory,True)
|
|
||||||
|
|
||||||
def rmfile(self,aFile):
|
def rmdir(self, directory):
|
||||||
|
return xbmcvfs.rmdir(directory, True)
|
||||||
|
|
||||||
|
def rmfile(self, aFile):
|
||||||
return xbmcvfs.delete(aFile)
|
return xbmcvfs.delete(aFile)
|
||||||
|
|
||||||
def rename(self,aFile,newName):
|
def rename(self, aFile, newName):
|
||||||
return xbmcvfs.rename(aFile, newName)
|
return xbmcvfs.rename(aFile, newName)
|
||||||
|
|
||||||
def exists(self,aFile):
|
def exists(self, aFile):
|
||||||
return xbmcvfs.exists(aFile)
|
return xbmcvfs.exists(aFile)
|
||||||
|
|
||||||
|
|
||||||
class ZipFileSystem(Vfs):
|
class ZipFileSystem(Vfs):
|
||||||
zip = None
|
zip = None
|
||||||
|
|
||||||
def __init__(self,rootString,mode):
|
def __init__(self, rootString, mode):
|
||||||
self.root_path = ""
|
self.root_path = ""
|
||||||
self.zip = zipfile.ZipFile(rootString,mode=mode,compression=zipfile.ZIP_DEFLATED,allowZip64=True)
|
self.zip = zipfile.ZipFile(rootString, mode=mode, compression=zipfile.ZIP_DEFLATED, allowZip64=True)
|
||||||
|
|
||||||
def listdir(self,directory):
|
def listdir(self, directory):
|
||||||
return [[],[]]
|
return [[], []]
|
||||||
|
|
||||||
def mkdir(self,directory):
|
def mkdir(self, directory):
|
||||||
#self.zip.write(directory[len(self.root_path):])
|
# self.zip.write(directory[len(self.root_path):])
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def put(self,source,dest):
|
def put(self, source, dest):
|
||||||
|
|
||||||
aFile = xbmcvfs.File(xbmc.translatePath(source),'r')
|
aFile = xbmcvfs.File(xbmc.translatePath(source), 'r')
|
||||||
|
|
||||||
self.zip.writestr(utils.encode(dest),aFile.read())
|
self.zip.writestr(dest, aFile.readBytes())
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def rmdir(self,directory):
|
def rmdir(self, directory):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def exists(self,aFile):
|
def exists(self, aFile):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
self.zip.close()
|
self.zip.close()
|
||||||
|
|
||||||
def extract(self,aFile,path):
|
def extract(self, aFile, path):
|
||||||
#extract zip file to path
|
# extract zip file to path
|
||||||
self.zip.extract(aFile,path)
|
self.zip.extract(aFile, path)
|
||||||
|
|
||||||
def listFiles(self):
|
def listFiles(self):
|
||||||
return self.zip.infolist()
|
return self.zip.infolist()
|
||||||
|
|
||||||
|
|
||||||
class DropboxFileSystem(Vfs):
|
class DropboxFileSystem(Vfs):
|
||||||
MAX_CHUNK = 50 * 1000 * 1000 #dropbox uses 150, reduced to 50 for small mem systems
|
MAX_CHUNK = 50 * 1000 * 1000 # dropbox uses 150, reduced to 50 for small mem systems
|
||||||
client = None
|
client = None
|
||||||
APP_KEY = ''
|
APP_KEY = ''
|
||||||
APP_SECRET = ''
|
APP_SECRET = ''
|
||||||
|
|
||||||
def __init__(self,rootString):
|
def __init__(self, rootString):
|
||||||
self.set_root(rootString)
|
self.set_root(rootString)
|
||||||
|
|
||||||
authorizer = DropboxAuthorizer()
|
authorizer = DropboxAuthorizer()
|
||||||
@@ -128,325 +133,136 @@ class DropboxFileSystem(Vfs):
|
|||||||
if(authorizer.isAuthorized()):
|
if(authorizer.isAuthorized()):
|
||||||
self.client = authorizer.getClient()
|
self.client = authorizer.getClient()
|
||||||
else:
|
else:
|
||||||
#tell the user to go back and run the authorizer
|
# tell the user to go back and run the authorizer
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30105))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30105))
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
def listdir(self,directory):
|
def listdir(self, directory):
|
||||||
directory = self._fix_slashes(directory)
|
directory = self._fix_slashes(directory)
|
||||||
|
|
||||||
if(self.client != None and self.exists(directory)):
|
if(self.client is not None and self.exists(directory)):
|
||||||
files = []
|
files = []
|
||||||
dirs = []
|
dirs = []
|
||||||
metadata = self.client.files_list_folder(directory)
|
metadata = self.client.files_list_folder(directory)
|
||||||
|
|
||||||
for aFile in metadata.entries:
|
for aFile in metadata.entries:
|
||||||
if(isinstance(aFile,dropbox.files.FolderMetadata)):
|
if(isinstance(aFile, dropbox.files.FolderMetadata)):
|
||||||
dirs.append(utils.encode(aFile.name))
|
dirs.append(aFile.name)
|
||||||
else:
|
else:
|
||||||
files.append(utils.encode(aFile.name))
|
files.append(aFile.name)
|
||||||
|
|
||||||
return [dirs,files]
|
return [dirs, files]
|
||||||
else:
|
else:
|
||||||
return [[],[]]
|
return [[], []]
|
||||||
|
|
||||||
|
|
||||||
def mkdir(self,directory):
|
def mkdir(self, directory):
|
||||||
directory = self._fix_slashes(directory)
|
directory = self._fix_slashes(directory)
|
||||||
if(self.client != None):
|
if(self.client is not None):
|
||||||
#sort of odd but always return true, folder create is implicit with file upload
|
# sort of odd but always return true, folder create is implicit with file upload
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def rmdir(self,directory):
|
def rmdir(self, directory):
|
||||||
directory = self._fix_slashes(directory)
|
directory = self._fix_slashes(directory)
|
||||||
if(self.client != None and self.exists(directory)):
|
if(self.client is not None and self.exists(directory)):
|
||||||
#dropbox is stupid and will refuse to do this sometimes, need to delete recursively
|
# dropbox is stupid and will refuse to do this sometimes, need to delete recursively
|
||||||
dirs,files = self.listdir(directory)
|
dirs, files = self.listdir(directory)
|
||||||
|
|
||||||
for aDir in dirs:
|
for aDir in dirs:
|
||||||
self.rmdir(aDir)
|
self.rmdir(aDir)
|
||||||
|
|
||||||
#finally remove the root directory
|
# finally remove the root directory
|
||||||
self.client.files_delete(directory)
|
self.client.files_delete(directory)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def rmfile(self,aFile):
|
def rmfile(self, aFile):
|
||||||
aFile = self._fix_slashes(aFile)
|
aFile = self._fix_slashes(aFile)
|
||||||
|
|
||||||
if(self.client != None and self.exists(aFile)):
|
if(self.client is not None and self.exists(aFile)):
|
||||||
self.client.files_delete(aFile)
|
self.client.files_delete(aFile)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def exists(self,aFile):
|
def exists(self, aFile):
|
||||||
aFile = self._fix_slashes(aFile)
|
aFile = self._fix_slashes(aFile)
|
||||||
|
|
||||||
if(self.client != None):
|
if(self.client is not None):
|
||||||
#can't list root metadata
|
# can't list root metadata
|
||||||
if(aFile == ''):
|
if(aFile == ''):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
meta_data = self.client.files_get_metadata(aFile)
|
self.client.files_get_metadata(aFile)
|
||||||
#if we make it here the file does exist
|
# if we make it here the file does exist
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def put(self,source,dest,retry=True):
|
def put(self, source, dest, retry=True):
|
||||||
dest = self._fix_slashes(dest)
|
dest = self._fix_slashes(dest)
|
||||||
|
|
||||||
if(self.client != None):
|
if(self.client is not None):
|
||||||
#open the file and get its size
|
# open the file and get its size
|
||||||
f = open(source,'rb')
|
f = open(source, 'rb')
|
||||||
f_size = os.path.getsize(source)
|
f_size = os.path.getsize(source)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if(f_size < self.MAX_CHUNK):
|
if(f_size < self.MAX_CHUNK):
|
||||||
#use the regular upload
|
# use the regular upload
|
||||||
response = self.client.files_upload(f.read(),dest,mode=WriteMode('overwrite'))
|
self.client.files_upload(f.read(), dest, mode=WriteMode('overwrite'))
|
||||||
else:
|
else:
|
||||||
#start the upload session
|
# start the upload session
|
||||||
upload_session = self.client.files_upload_session_start(f.read(self.MAX_CHUNK))
|
upload_session = self.client.files_upload_session_start(f.read(self.MAX_CHUNK))
|
||||||
upload_cursor = UploadSessionCursor(upload_session.session_id,f.tell())
|
upload_cursor = UploadSessionCursor(upload_session.session_id, f.tell())
|
||||||
|
|
||||||
while(f.tell() < f_size):
|
while(f.tell() < f_size):
|
||||||
#check if we should finish the upload
|
# check if we should finish the upload
|
||||||
if((f_size - f.tell()) <= self.MAX_CHUNK):
|
if((f_size - f.tell()) <= self.MAX_CHUNK):
|
||||||
#upload and close
|
# upload and close
|
||||||
self.client.files_upload_session_finish(f.read(self.MAX_CHUNK),upload_cursor,CommitInfo(dest,mode=WriteMode('overwrite')))
|
self.client.files_upload_session_finish(f.read(self.MAX_CHUNK), upload_cursor, CommitInfo(dest, mode=WriteMode('overwrite')))
|
||||||
else:
|
else:
|
||||||
#upload a part and store the offset
|
# upload a part and store the offset
|
||||||
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK),upload_cursor)
|
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK), upload_cursor)
|
||||||
upload_cursor.offset = f.tell()
|
upload_cursor.offset = f.tell()
|
||||||
|
|
||||||
#if no errors we're good!
|
# if no errors we're good!
|
||||||
return True
|
return True
|
||||||
except Exception as anError:
|
except Exception as anError:
|
||||||
utils.log(str(anError))
|
utils.log(str(anError))
|
||||||
|
|
||||||
#if we have an exception retry
|
# if we have an exception retry
|
||||||
if(retry):
|
if(retry):
|
||||||
return self.put(source,dest,False)
|
return self.put(source, dest, False)
|
||||||
else:
|
else:
|
||||||
#tried once already, just quit
|
# tried once already, just quit
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_file(self,source,dest):
|
def get_file(self, source, dest):
|
||||||
if(self.client != None):
|
if(self.client is not None):
|
||||||
#write the file locally
|
# write the file locally
|
||||||
f = self.client.files_download_to_file(dest,source)
|
self.client.files_download_to_file(dest, source)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _fix_slashes(self,filename):
|
def _fix_slashes(self, filename):
|
||||||
result = filename.replace('\\','/')
|
result = filename.replace('\\', '/')
|
||||||
|
|
||||||
#root needs to be a blank string
|
# root needs to be a blank string
|
||||||
if(result == '/'):
|
if(result == '/'):
|
||||||
result = ""
|
result = ""
|
||||||
|
|
||||||
#if dir ends in slash, remove it
|
# if dir ends in slash, remove it
|
||||||
if(result[-1:] == "/"):
|
if(result[-1:] == "/"):
|
||||||
result = result[:-1]
|
result = result[:-1]
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class GoogleDriveFilesystem(Vfs):
|
|
||||||
drive = None
|
|
||||||
history = {}
|
|
||||||
FOLDER_TYPE = 'application/vnd.google-apps.folder'
|
|
||||||
|
|
||||||
def __init__(self,rootString):
|
|
||||||
self.set_root(rootString)
|
|
||||||
|
|
||||||
authorizer = GoogleDriveAuthorizer()
|
|
||||||
|
|
||||||
if(authorizer.isAuthorized()):
|
|
||||||
self.drive = authorizer.getClient()
|
|
||||||
else:
|
|
||||||
#tell the user to go back and run the authorizer
|
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30105))
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
#make sure we have the folder we need
|
|
||||||
xbmc_folder = self._getGoogleFile(self.root_path)
|
|
||||||
if(xbmc_folder == None):
|
|
||||||
self.mkdir(self.root_path)
|
|
||||||
|
|
||||||
def listdir(self,directory):
|
|
||||||
files = []
|
|
||||||
dirs = []
|
|
||||||
|
|
||||||
if(not directory.startswith('/')):
|
|
||||||
directory = '/' + directory
|
|
||||||
|
|
||||||
#get the id of this folder
|
|
||||||
parentFolder = self._getGoogleFile(directory)
|
|
||||||
|
|
||||||
#need to do this after
|
|
||||||
if(not directory.endswith('/')):
|
|
||||||
directory = directory + '/'
|
|
||||||
|
|
||||||
if(parentFolder != None):
|
|
||||||
|
|
||||||
fileList = self.drive.ListFile({'q':"'" + parentFolder['id'] + "' in parents and trashed = false"}).GetList()
|
|
||||||
|
|
||||||
for aFile in fileList:
|
|
||||||
if(aFile['mimeType'] == self.FOLDER_TYPE):
|
|
||||||
dirs.append(utils.encode(aFile['title']))
|
|
||||||
else:
|
|
||||||
files.append(utils.encode(aFile['title']))
|
|
||||||
|
|
||||||
|
|
||||||
return [dirs,files]
|
|
||||||
|
|
||||||
def mkdir(self,directory):
|
|
||||||
result = True
|
|
||||||
|
|
||||||
if(not directory.startswith('/')):
|
|
||||||
directory = '/' + directory
|
|
||||||
|
|
||||||
if(directory.endswith('/')):
|
|
||||||
directory = directory[:-1]
|
|
||||||
|
|
||||||
#split the string by the directory separator
|
|
||||||
pathList = os.path.split(directory)
|
|
||||||
|
|
||||||
if(pathList[0] == '/'):
|
|
||||||
|
|
||||||
#we're at the root, just make the folder
|
|
||||||
newFolder = self.drive.CreateFile({'title': pathList[1], 'parent':'root','mimeType':self.FOLDER_TYPE})
|
|
||||||
newFolder.Upload()
|
|
||||||
else:
|
|
||||||
#get the id of the parent folder
|
|
||||||
parentFolder = self._getGoogleFile(pathList[0])
|
|
||||||
|
|
||||||
if(parentFolder != None):
|
|
||||||
newFolder = self.drive.CreateFile({'title': pathList[1],"parents":[{'kind':'drive#fileLink','id':parentFolder['id']}],'mimeType':self.FOLDER_TYPE})
|
|
||||||
newFolder.Upload()
|
|
||||||
else:
|
|
||||||
result = False
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def put(self,source,dest):
|
|
||||||
result = True
|
|
||||||
|
|
||||||
#make the name separate from the path
|
|
||||||
if(not dest.startswith('/')):
|
|
||||||
dest = '/' + dest
|
|
||||||
|
|
||||||
pathList = os.path.split(dest)
|
|
||||||
|
|
||||||
#get the parent location
|
|
||||||
parentFolder = self._getGoogleFile(pathList[0])
|
|
||||||
|
|
||||||
if(parentFolder != None):
|
|
||||||
#create a new file in this folder
|
|
||||||
newFile = self.drive.CreateFile({"title":pathList[1],"parents":[{'kind':'drive#fileLink','id':parentFolder['id']}]})
|
|
||||||
newFile.SetContentFile(source)
|
|
||||||
newFile.Upload()
|
|
||||||
else:
|
|
||||||
result = False
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_file(self,source, dest):
|
|
||||||
result = True
|
|
||||||
|
|
||||||
#get the id of this file
|
|
||||||
file = self._getGoogleFile(source)
|
|
||||||
|
|
||||||
if(file != None):
|
|
||||||
file.GetContentFile(dest)
|
|
||||||
else:
|
|
||||||
result = False
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def rmdir(self,directory):
|
|
||||||
result = True
|
|
||||||
|
|
||||||
#check that the folder exists
|
|
||||||
folder = self._getGoogleFile(directory)
|
|
||||||
|
|
||||||
if(folder != None):
|
|
||||||
#delete the folder
|
|
||||||
folder.Delete()
|
|
||||||
else:
|
|
||||||
result = False
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def rmfile(self,aFile):
|
|
||||||
#really just the same as the remove directory function
|
|
||||||
return self.rmdir(aFile)
|
|
||||||
|
|
||||||
def exists(self,aFile):
|
|
||||||
#attempt to get this file
|
|
||||||
foundFile = self._getGoogleFile(aFile)
|
|
||||||
|
|
||||||
if(foundFile != None):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def rename(self,aFile,newName):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _getGoogleFile(self,file):
|
|
||||||
result = None
|
|
||||||
|
|
||||||
#file must start with / and not end with one (even directory)
|
|
||||||
if(not file.startswith('/')):
|
|
||||||
file = '/' + file
|
|
||||||
|
|
||||||
if(file.endswith('/')):
|
|
||||||
file = file[:-1]
|
|
||||||
|
|
||||||
if(file in self.history):
|
|
||||||
|
|
||||||
result = self.history[file]
|
|
||||||
else:
|
|
||||||
pathList = os.path.split(file)
|
|
||||||
|
|
||||||
#end of recurision, we got the root
|
|
||||||
if(pathList[0] == '/'):
|
|
||||||
#get the id of this file (if it exists)
|
|
||||||
file_list = self.drive.ListFile({'q':"title='" + pathList[1] + "' and 'root' in parents and trashed=false"}).GetList()
|
|
||||||
|
|
||||||
if(len(file_list) > 0):
|
|
||||||
result = file_list[0]
|
|
||||||
self.history[pathList[1]] = result
|
|
||||||
else:
|
|
||||||
#recurse down the tree
|
|
||||||
current_file = pathList[1]
|
|
||||||
|
|
||||||
parentId = self._getGoogleFile(pathList[0])
|
|
||||||
|
|
||||||
if(parentId != None):
|
|
||||||
self.history[pathList[0]] = parentId
|
|
||||||
|
|
||||||
#attempt to get the id of this file, with this parent
|
|
||||||
file_list = file_list = self.drive.ListFile({'q':"title='" + current_file + "' and '" + parentId['id'] + "' in parents and trashed=false"}).GetList()
|
|
||||||
|
|
||||||
if(len(file_list) > 0):
|
|
||||||
result = file_list[0]
|
|
||||||
self.history[file] = result
|
|
||||||
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
<setting id="upgrade_notes" type="number" label="upgrade_notes" visible="false" default="1" />
|
<setting id="upgrade_notes" type="number" label="upgrade_notes" visible="false" default="1" />
|
||||||
</category>
|
</category>
|
||||||
<category id="backup_path" label="30048">
|
<category id="backup_path" label="30048">
|
||||||
<setting id="remote_selection" type="enum" lvalues="30018|30019|30027|30098" default="0" label="30025"/>
|
<setting id="remote_selection" type="enum" lvalues="30018|30019|30027" default="0" label="30025"/>
|
||||||
<setting id="remote_path_2" type="text" label="30024" default="" visible="eq(-1,1)" />
|
<setting id="remote_path_2" type="text" label="30024" default="" visible="eq(-1,1)" />
|
||||||
<setting id="remote_path" type="folder" label="30020" visible="eq(-2,0)" />
|
<setting id="remote_path" type="folder" label="30020" visible="eq(-2,0)" />
|
||||||
<setting id="dropbox_key" type="text" label="30028" visible="eq(-3,2)" default="" />
|
<setting id="dropbox_key" type="text" label="30028" visible="eq(-3,2)" default="" />
|
||||||
|
|||||||
114
scheduler.py
114
scheduler.py
@@ -1,13 +1,14 @@
|
|||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
from datetime import datetime
|
|
||||||
import time
|
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
from resources.lib.croniter import croniter
|
from resources.lib.croniter import croniter
|
||||||
from resources.lib.backup import XbmcBackup
|
from resources.lib.backup import XbmcBackup
|
||||||
|
|
||||||
UPGRADE_INT = 2 #to keep track of any upgrade notifications
|
UPGRADE_INT = 2 # to keep track of any upgrade notifications
|
||||||
|
|
||||||
|
|
||||||
class BackupScheduler:
|
class BackupScheduler:
|
||||||
monitor = None
|
monitor = None
|
||||||
@@ -15,15 +16,15 @@ class BackupScheduler:
|
|||||||
next_run = 0
|
next_run = 0
|
||||||
next_run_path = None
|
next_run_path = None
|
||||||
restore_point = None
|
restore_point = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.monitor = UpdateMonitor(update_method = self.settingsChanged)
|
self.monitor = UpdateMonitor(update_method=self.settingsChanged)
|
||||||
self.enabled = utils.getSetting("enable_scheduler")
|
self.enabled = utils.getSetting("enable_scheduler")
|
||||||
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
|
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
|
||||||
|
|
||||||
if(self.enabled == "true"):
|
if(self.enabled == "true"):
|
||||||
|
|
||||||
#sleep for 2 minutes so Kodi can start and time can update correctly
|
# sleep for 2 minutes so Kodi can start and time can update correctly
|
||||||
xbmc.Monitor().waitForAbort(120)
|
xbmc.Monitor().waitForAbort(120)
|
||||||
|
|
||||||
nr = 0
|
nr = 0
|
||||||
@@ -31,127 +32,127 @@ class BackupScheduler:
|
|||||||
|
|
||||||
fh = xbmcvfs.File(self.next_run_path)
|
fh = xbmcvfs.File(self.next_run_path)
|
||||||
try:
|
try:
|
||||||
#check if we saved a run time from the last run
|
# check if we saved a run time from the last run
|
||||||
nr = float(fh.read())
|
nr = float(fh.read())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
nr = 0
|
nr = 0
|
||||||
|
|
||||||
fh.close()
|
fh.close()
|
||||||
|
|
||||||
#if we missed and the user wants to play catch-up
|
# if we missed and the user wants to play catch-up
|
||||||
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
|
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
|
||||||
utils.log("scheduled backup was missed, doing it now...")
|
utils.log("scheduled backup was missed, doing it now...")
|
||||||
progress_mode = int(utils.getSetting('progress_mode'))
|
progress_mode = int(utils.getSetting('progress_mode'))
|
||||||
|
|
||||||
if(progress_mode == 0):
|
if(progress_mode == 0):
|
||||||
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
|
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
|
||||||
|
|
||||||
self.doScheduledBackup(progress_mode)
|
self.doScheduledBackup(progress_mode)
|
||||||
|
|
||||||
self.setup()
|
self.setup()
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
#scheduler was turned on, find next run time
|
# scheduler was turned on, find next run time
|
||||||
utils.log("scheduler enabled, finding next run time")
|
utils.log("scheduler enabled, finding next run time")
|
||||||
self.findNextRun(time.time())
|
self.findNextRun(time.time())
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
|
|
||||||
#display upgrade messages if they exist
|
# display upgrade messages if they exist
|
||||||
if(int(utils.getSetting('upgrade_notes')) < UPGRADE_INT):
|
if(int(utils.getSetting('upgrade_notes')) < UPGRADE_INT):
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30132))
|
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30132))
|
||||||
utils.setSetting('upgrade_notes',str(UPGRADE_INT))
|
utils.setSetting('upgrade_notes', str(UPGRADE_INT))
|
||||||
|
|
||||||
#check if a backup should be resumed
|
# check if a backup should be resumed
|
||||||
resumeRestore = self._resumeCheck()
|
resumeRestore = self._resumeCheck()
|
||||||
|
|
||||||
if(resumeRestore):
|
if(resumeRestore):
|
||||||
restore = XbmcBackup()
|
restore = XbmcBackup()
|
||||||
restore.selectRestore(self.restore_point)
|
restore.selectRestore(self.restore_point)
|
||||||
#skip the advanced settings check
|
# skip the advanced settings check
|
||||||
restore.skipAdvanced()
|
restore.skipAdvanced()
|
||||||
restore.restore()
|
restore.restore()
|
||||||
|
|
||||||
while(not self.monitor.abortRequested()):
|
while(not self.monitor.abortRequested()):
|
||||||
|
|
||||||
if(self.enabled == "true"):
|
if(self.enabled == "true"):
|
||||||
#scheduler is still on
|
# scheduler is still on
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
if(self.next_run <= now):
|
if(self.next_run <= now):
|
||||||
progress_mode = int(utils.getSetting('progress_mode'))
|
progress_mode = int(utils.getSetting('progress_mode'))
|
||||||
self.doScheduledBackup(progress_mode)
|
self.doScheduledBackup(progress_mode)
|
||||||
|
|
||||||
#check if we should shut the computer down
|
# check if we should shut the computer down
|
||||||
if(utils.getSetting("cron_shutdown") == 'true'):
|
if(utils.getSetting("cron_shutdown") == 'true'):
|
||||||
#wait 10 seconds to make sure all backup processes and files are completed
|
# wait 10 seconds to make sure all backup processes and files are completed
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
xbmc.executebuiltin('ShutDown()')
|
xbmc.executebuiltin('ShutDown()')
|
||||||
else:
|
else:
|
||||||
#find the next run time like normal
|
# find the next run time like normal
|
||||||
self.findNextRun(now)
|
self.findNextRun(now)
|
||||||
|
|
||||||
xbmc.sleep(500)
|
xbmc.sleep(500)
|
||||||
|
|
||||||
#delete monitor to free up memory
|
# delete monitor to free up memory
|
||||||
del self.monitor
|
del self.monitor
|
||||||
|
|
||||||
def doScheduledBackup(self,progress_mode):
|
def doScheduledBackup(self, progress_mode):
|
||||||
if(progress_mode != 2):
|
if(progress_mode != 2):
|
||||||
utils.showNotification(utils.getString(30053))
|
utils.showNotification(utils.getString(30053))
|
||||||
|
|
||||||
backup = XbmcBackup()
|
backup = XbmcBackup()
|
||||||
|
|
||||||
if(backup.remoteConfigured()):
|
if(backup.remoteConfigured()):
|
||||||
|
|
||||||
if(int(utils.getSetting('progress_mode')) in [0,1]):
|
if(int(utils.getSetting('progress_mode')) in [0, 1]):
|
||||||
backup.backup(True)
|
backup.backup(True)
|
||||||
else:
|
else:
|
||||||
backup.backup(False)
|
backup.backup(False)
|
||||||
|
|
||||||
#check if this is a "one-off"
|
# check if this is a "one-off"
|
||||||
if(int(utils.getSetting("schedule_interval")) == 0):
|
if(int(utils.getSetting("schedule_interval")) == 0):
|
||||||
#disable the scheduler after this run
|
# disable the scheduler after this run
|
||||||
self.enabled = "false"
|
self.enabled = "false"
|
||||||
utils.setSetting('enable_scheduler','false')
|
utils.setSetting('enable_scheduler', 'false')
|
||||||
else:
|
else:
|
||||||
utils.showNotification(utils.getString(30045))
|
utils.showNotification(utils.getString(30045))
|
||||||
|
|
||||||
def findNextRun(self,now):
|
def findNextRun(self, now):
|
||||||
progress_mode = int(utils.getSetting('progress_mode'))
|
progress_mode = int(utils.getSetting('progress_mode'))
|
||||||
|
|
||||||
#find the cron expression and get the next run time
|
# find the cron expression and get the next run time
|
||||||
cron_exp = self.parseSchedule()
|
cron_exp = self.parseSchedule()
|
||||||
|
|
||||||
cron_ob = croniter(cron_exp,datetime.fromtimestamp(now))
|
cron_ob = croniter(cron_exp, datetime.fromtimestamp(now))
|
||||||
new_run_time = cron_ob.get_next(float)
|
new_run_time = cron_ob.get_next(float)
|
||||||
|
|
||||||
if(new_run_time != self.next_run):
|
if(new_run_time != self.next_run):
|
||||||
self.next_run = new_run_time
|
self.next_run = new_run_time
|
||||||
utils.log("scheduler will run again on " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run),['dateshort','time']))
|
utils.log("scheduler will run again on " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run), ['dateshort', 'time']))
|
||||||
|
|
||||||
#write the next time to a file
|
# write the next time to a file
|
||||||
fh = xbmcvfs.File(self.next_run_path, 'w')
|
fh = xbmcvfs.File(self.next_run_path, 'w')
|
||||||
fh.write(str(self.next_run))
|
fh.write(str(self.next_run))
|
||||||
fh.close()
|
fh.close()
|
||||||
|
|
||||||
#only show when not in silent mode
|
# only show when not in silent mode
|
||||||
if(progress_mode != 2):
|
if(progress_mode != 2):
|
||||||
utils.showNotification(utils.getString(30081) + " " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run),['dateshort','time']))
|
utils.showNotification(utils.getString(30081) + " " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run), ['dateshort', 'time']))
|
||||||
|
|
||||||
def settingsChanged(self):
|
def settingsChanged(self):
|
||||||
current_enabled = utils.getSetting("enable_scheduler")
|
current_enabled = utils.getSetting("enable_scheduler")
|
||||||
|
|
||||||
if(current_enabled == "true" and self.enabled == "false"):
|
if(current_enabled == "true" and self.enabled == "false"):
|
||||||
#scheduler was just turned on
|
# scheduler was just turned on
|
||||||
self.enabled = current_enabled
|
self.enabled = current_enabled
|
||||||
self.setup()
|
self.setup()
|
||||||
elif (current_enabled == "false" and self.enabled == "true"):
|
elif (current_enabled == "false" and self.enabled == "true"):
|
||||||
#schedule was turn off
|
# schedule was turn off
|
||||||
self.enabled = current_enabled
|
self.enabled = current_enabled
|
||||||
|
|
||||||
if(self.enabled == "true"):
|
if(self.enabled == "true"):
|
||||||
#always recheck the next run time after an update
|
# always recheck the next run time after an update
|
||||||
self.findNextRun(time.time())
|
self.findNextRun(time.time())
|
||||||
|
|
||||||
def parseSchedule(self):
|
def parseSchedule(self):
|
||||||
@@ -161,14 +162,14 @@ class BackupScheduler:
|
|||||||
hour_of_day = utils.getSetting("schedule_time")
|
hour_of_day = utils.getSetting("schedule_time")
|
||||||
hour_of_day = int(hour_of_day[0:2])
|
hour_of_day = int(hour_of_day[0:2])
|
||||||
if(schedule_type == 0 or schedule_type == 1):
|
if(schedule_type == 0 or schedule_type == 1):
|
||||||
#every day
|
# every day
|
||||||
cron_exp = "0 " + str(hour_of_day) + " * * *"
|
cron_exp = "0 " + str(hour_of_day) + " * * *"
|
||||||
elif(schedule_type == 2):
|
elif(schedule_type == 2):
|
||||||
#once a week
|
# once a week
|
||||||
day_of_week = utils.getSetting("day_of_week")
|
day_of_week = utils.getSetting("day_of_week")
|
||||||
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
|
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
|
||||||
elif(schedule_type == 3):
|
elif(schedule_type == 3):
|
||||||
#first day of month
|
# first day of month
|
||||||
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
|
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
|
||||||
|
|
||||||
return cron_exp
|
return cron_exp
|
||||||
@@ -176,23 +177,24 @@ class BackupScheduler:
|
|||||||
def _resumeCheck(self):
|
def _resumeCheck(self):
|
||||||
shouldContinue = False
|
shouldContinue = False
|
||||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
|
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
|
||||||
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"),'r')
|
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"), 'r')
|
||||||
self.restore_point = rFile.read()
|
self.restore_point = rFile.read()
|
||||||
rFile.close()
|
rFile.close()
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
|
||||||
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042),utils.getString(30043),utils.getString(30044))
|
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042), utils.getString(30043), utils.getString(30044))
|
||||||
|
|
||||||
return shouldContinue
|
return shouldContinue
|
||||||
|
|
||||||
|
|
||||||
class UpdateMonitor(xbmc.Monitor):
|
class UpdateMonitor(xbmc.Monitor):
|
||||||
update_method = None
|
update_method = None
|
||||||
|
|
||||||
def __init__(self,*args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
xbmc.Monitor.__init__(self)
|
xbmc.Monitor.__init__(self)
|
||||||
self.update_method = kwargs['update_method']
|
self.update_method = kwargs['update_method']
|
||||||
|
|
||||||
def onSettingsChanged(self):
|
def onSettingsChanged(self):
|
||||||
self.update_method()
|
self.update_method()
|
||||||
|
|
||||||
|
|
||||||
BackupScheduler().start()
|
BackupScheduler().start()
|
||||||
|
|||||||
Reference in New Issue
Block a user