Compare commits
120 Commits
jarvis
...
krypton-1.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
56a1d95930 | ||
|
|
49b3144baa | ||
|
|
adc6ee0c52 | ||
|
|
5030cbce15 | ||
|
|
236ae90bd1 | ||
|
|
fd4bba7c81 | ||
|
|
ca05f9e9f8 | ||
|
|
9088127b13 | ||
|
|
02ce0baf5e | ||
|
|
03c20a2def | ||
|
|
ef7bef7908 | ||
|
|
13d5dc1b79 | ||
|
|
373ec95a8a | ||
|
|
fa6bdf43a4 | ||
|
|
1eca075d75 | ||
|
|
c2afb9b3ec | ||
|
|
d9e1fe5170 | ||
|
|
3bceb19e01 | ||
|
|
5745a20d33 | ||
|
|
abb27a7251 | ||
|
|
9ae5545f6c | ||
|
|
332afffc5b | ||
|
|
42d0f1b451 | ||
|
|
ff2ca53a22 | ||
|
|
1a27b279b0 | ||
|
|
6dfa4a5520 | ||
|
|
a7b9aeb9c1 | ||
|
|
7226178bfb | ||
|
|
f5bd7130e2 | ||
|
|
ae76d24e86 | ||
|
|
4d56331d8f | ||
|
|
45cf9a367d | ||
|
|
d8ceecb168 | ||
|
|
a2d7e8613a | ||
|
|
b21c11de26 | ||
|
|
a4bb3f3feb | ||
|
|
1f6324b2d5 | ||
|
|
12b25f7cea | ||
|
|
5d9d8a1820 | ||
|
|
2fdf8d37fe | ||
|
|
b34e538d6b | ||
|
|
b5a7aada4c | ||
|
|
1a9c43b998 | ||
|
|
b7f4b14fe2 | ||
|
|
787b054bba | ||
|
|
a7be48a341 | ||
|
|
2fe76b7b52 | ||
|
|
3aed105fd7 | ||
|
|
c9b4554eac | ||
|
|
e736b964a5 | ||
|
|
4c5f6774df | ||
|
|
1f2e315208 | ||
|
|
138f910d07 | ||
|
|
1d3b2f58ab | ||
|
|
865416977d | ||
|
|
68093b2130 | ||
|
|
701a1831bf | ||
|
|
493e0d3a2e | ||
|
|
d87e209226 | ||
|
|
9960e2fc6b | ||
|
|
6aae9d9247 | ||
|
|
004b8dae58 | ||
|
|
6b934ed30c | ||
|
|
e950400222 | ||
|
|
cb2bb8a237 | ||
|
|
eb765c974b | ||
|
|
d18ed2960e | ||
|
|
9f1755686c | ||
|
|
534b3b108f | ||
|
|
4a8b891129 | ||
|
|
49af21a67e | ||
|
|
3ee2cb0414 | ||
|
|
061fd3efed | ||
|
|
76c2fdc0c2 | ||
|
|
2c999b46b9 | ||
|
|
4d891ab551 | ||
|
|
6c33e7c9ba | ||
|
|
f0d8e297a9 | ||
|
|
04ec3bd8a8 | ||
|
|
65ea3c98c4 | ||
|
|
4108f333e2 | ||
|
|
913090637c | ||
|
|
7139b920ad | ||
|
|
48d07c24a0 | ||
|
|
90e4b0c1f4 | ||
|
|
c9415cbf59 | ||
|
|
181654b414 | ||
|
|
71b048418e | ||
|
|
1896a684b0 | ||
|
|
545bd93e8c | ||
|
|
83a01a48bf | ||
|
|
dcc8482d73 | ||
|
|
85306f9469 | ||
|
|
c0d3b01ade | ||
|
|
5c6a8ce91a | ||
|
|
c8f148cd1a | ||
|
|
b6e57b04b1 | ||
|
|
adbf225ea2 | ||
|
|
d9d6c1ed42 | ||
|
|
f5f7bcfcb5 | ||
|
|
4608f04d96 | ||
|
|
e1c8b5a61c | ||
|
|
ada1efb165 | ||
|
|
5a43b5e340 | ||
|
|
33bc84c288 | ||
|
|
8c61616d3c | ||
|
|
622939901e | ||
|
|
756f50bba1 | ||
|
|
5fefbd286d | ||
|
|
5779784e0a | ||
|
|
24f570e888 | ||
|
|
489dcd317f | ||
|
|
216e2f4561 | ||
|
|
c3fe86293d | ||
|
|
20ee7a92ad | ||
|
|
64daaa13e8 | ||
|
|
dda08d04a3 | ||
|
|
469b5ff340 | ||
|
|
ff2f764b2f | ||
|
|
7d51ee05bc |
18
.github/stale-dontuse.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Configuration for probot-stale - https://github.com/probot/stale
|
||||||
|
|
||||||
|
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||||
|
daysUntilStale: 31
|
||||||
|
# Number of days of inactivity before a stale Issue or Pull Request is closed
|
||||||
|
daysUntilClose: 14
|
||||||
|
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||||
|
onlyLabels:
|
||||||
|
- waiting for info
|
||||||
|
- wontfix
|
||||||
|
|
||||||
|
# Label to use when marking as stale
|
||||||
|
staleLabel: inactive
|
||||||
|
|
||||||
|
# Comment to post when marking as stale. Set to `false` to disable
|
||||||
|
markComment: >
|
||||||
|
This issue has been automatically marked as inactive because it has not had
|
||||||
|
recent activity. It will be closed if no further activity occurs.
|
||||||
13
.travis.yml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
dist: xenial
|
||||||
|
language: python
|
||||||
|
python: 3.7
|
||||||
|
|
||||||
|
install:
|
||||||
|
- pip install kodi-addon-checker
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- git config core.quotepath false
|
||||||
|
|
||||||
|
# command to run our tests
|
||||||
|
script:
|
||||||
|
- kodi-addon-checker --branch=krypton --allow-folder-id-mismatch
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
# Backup Addon
|
# Backup Addon
|
||||||
|
 [](https://travis-ci.com/robweber/xbmcbackup) [](https://github.com/robweber/xbmcbackup/blob/master/LICENSE.txt)
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
@@ -17,6 +18,11 @@ For more specific information please check out the [wiki on Github](https://gith
|
|||||||
* [FAQ](https://github.com/robweber/xbmcbackup/wiki/FAQ)
|
* [FAQ](https://github.com/robweber/xbmcbackup/wiki/FAQ)
|
||||||
|
|
||||||
|
|
||||||
|
## Attributions
|
||||||
|
|
||||||
|
Icon files from Open Iconic — www.useiconic.com/open
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
21
addon.xml
@@ -1,15 +1,15 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
<addon id="script.xbmcbackup"
|
<addon id="script.xbmcbackup"
|
||||||
name="Backup" version="1.1.3" provider-name="robweber">
|
name="Backup" version="1.5.4" provider-name="robweber">
|
||||||
<requires>
|
<requires>
|
||||||
<!-- jarvis -->
|
<!-- jarvis -->
|
||||||
<import addon="xbmc.python" version="2.24.0"/>
|
<import addon="xbmc.python" version="2.25.0"/>
|
||||||
<import addon="script.module.httplib2" version="0.8.0" />
|
<import addon="script.module.httplib2" version="0.8.0" />
|
||||||
<import addon="script.module.oauth2client" version="4.1.2" />
|
<import addon="script.module.oauth2client" version="4.1.2" />
|
||||||
<import addon="script.module.uritemplate" version="0.6" />
|
<import addon="script.module.uritemplate" version="0.6" />
|
||||||
<import addon="script.module.yaml" version="3.11"/>
|
<import addon="script.module.yaml" version="3.11"/>
|
||||||
<import addon="script.module.googleapi" version="1.6.4" />
|
<import addon="script.module.googleapi" version="1.6.4" />
|
||||||
<import addon="script.module.requests" version="2.9.1" />
|
<import addon="script.module.dropbox" version="8.4.2"/>
|
||||||
</requires>
|
</requires>
|
||||||
<extension point="xbmc.python.script" library="default.py">
|
<extension point="xbmc.python.script" library="default.py">
|
||||||
<provides>executable</provides>
|
<provides>executable</provides>
|
||||||
@@ -89,12 +89,17 @@
|
|||||||
<source>https://github.com/robweber/xbmcbackup</source>
|
<source>https://github.com/robweber/xbmcbackup</source>
|
||||||
<email></email>
|
<email></email>
|
||||||
<assets>
|
<assets>
|
||||||
<icon>resources/media/icon.png</icon>
|
<icon>resources/images/icon.png</icon>
|
||||||
|
<screenshot>resources/images/screenshot1.jpg</screenshot>
|
||||||
|
<screenshot>resources/images/screenshot2.jpg</screenshot>
|
||||||
|
<screenshot>resources/images/screenshot3.jpg</screenshot>
|
||||||
|
<screenshot>resources/images/screenshot4.jpg</screenshot>
|
||||||
</assets>
|
</assets>
|
||||||
<news>Version 1.1.4
|
<news>Version 1.5.4
|
||||||
- added file chunk support for dropbox uploads
|
- converted PNG to JPG
|
||||||
- fixed settings duplicate ids, thanks aster-anto
|
- updated Travis-CI links
|
||||||
- added scheduler delay to assist with time sync (rpi mostly)
|
- added utf decode to JSON calls
|
||||||
|
- added better system settings/restore functionality (enabled by default)
|
||||||
</news>
|
</news>
|
||||||
</extension>
|
</extension>
|
||||||
</addon>
|
</addon>
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
import sys
|
|
||||||
import urlparse
|
|
||||||
import xbmcgui
|
|
||||||
import resources.lib.utils as utils
|
|
||||||
from resources.lib.authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
|
||||||
|
|
||||||
def get_params():
|
|
||||||
param = {}
|
|
||||||
try:
|
|
||||||
for i in sys.argv:
|
|
||||||
args = i
|
|
||||||
if(args.startswith('?')):
|
|
||||||
args = args[1:]
|
|
||||||
param.update(dict(urlparse.parse_qsl(args)))
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
return param
|
|
||||||
|
|
||||||
params = get_params()
|
|
||||||
|
|
||||||
#drobpox
|
|
||||||
if(params['type'] == 'dropbox'):
|
|
||||||
authorizer = DropboxAuthorizer()
|
|
||||||
|
|
||||||
if(authorizer.authorize()):
|
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30027) + ' ' + utils.getString(30106))
|
|
||||||
else:
|
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30107) + ' ' + utils.getString(30027))
|
|
||||||
|
|
||||||
#google drive
|
|
||||||
elif(params['type'] == 'google_drive'):
|
|
||||||
authorizer = GoogleDriveAuthorizer()
|
|
||||||
|
|
||||||
if(authorizer.authorize()):
|
|
||||||
xbmcgui.Dialog().ok("Backup",utils.getString(30098) + ' ' + utils.getString(30106))
|
|
||||||
else:
|
|
||||||
xbmcgui.Dialog().ok("Backup",utils.getString(30107) + ' ' + utils.getString(30098))
|
|
||||||
396
changelog.md
Normal file
@@ -0,0 +1,396 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||||
|
|
||||||
|
## [Version 1.5.4](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.3...robweber:krypton-1.5.4) - 2020-12-03
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- added method to backup/restore Kodi settings via the GetSettings/SetSettingValue JSON methods in the validation file
|
||||||
|
- added setting to always restore settings or prompt at the time of backup
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- changed PNG screenshots to JPG (per #165)
|
||||||
|
- updated Travis CI links. travis-ci.org is deprectated to travis-ci.com
|
||||||
|
- decode string after GetSettings JSON call (per #162)
|
||||||
|
|
||||||
|
## [Version 1.5.3](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.2...robweber:krypton-1.5.3) - 2019-12-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- added new badges for Kodi Version, TravisCI and license information from shields.io
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- backported a fix from Matrix that fixes backup rotation paths when slashes are missing from the base path (thanks @AnonTester)
|
||||||
|
|
||||||
|
## [Version 1.5.2](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.1...robweber:krypton-1.5.2) - 2019-09-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Updated Changelog format to the one suggested by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||||
|
- Added script.module.dropbox import as a dependency for Dropbox filesystem
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Fixed issue getting xbmcbackup.val file from non-zipped remote directories. Was being copied as though it was a local file so it was failing.
|
||||||
|
- Use linux path separator (/) all the time, Kodi will interpret this correctly on windows. Was causing issues with remote file systems since os.path.sep
|
||||||
|
- Fixed minor python code style changes based on kodi-addon-checker output
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- files releated to dropbox library, using script.module.dropbox import now
|
||||||
|
|
||||||
|
## Version 1.5.1 - 2019-09-10
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Fixed guisettings restores not working - thanks Bluerayx
|
||||||
|
|
||||||
|
## Version 1.5.0 - 2019-08-26
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Added new Advanced file editor and file selection based on a .json
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- File backups and restores will not work with old version - breaking change with previous versions PR117
|
||||||
|
|
||||||
|
## Version 1.1.3 - 2017-12-29
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- added file chunk support for Dropbox uploads
|
||||||
|
- added scheduler delay to assist with time sync (rpi mostly), will delay startup by 2 min
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- fixed settings duplicate ids, thanks aster-anto
|
||||||
|
|
||||||
|
## Version 1.1.2
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Fixes to the Dropbox lib for python 2.6
|
||||||
|
|
||||||
|
## Version 1.1.1
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- added ability to "catchup" on missed scheduled backup
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- fixed error on authorizers (missing secret/key)
|
||||||
|
- updated google oauth and client versions
|
||||||
|
- merged in dropbox v2 library code
|
||||||
|
|
||||||
|
## Version 1.1.0
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- added tinyurl generation for oauth urls
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- moved authorize to settings area for cloud storage
|
||||||
|
|
||||||
|
## Version 1.0.9
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- fixed dropbox rest.py for Python 2.6 - thanks koying!
|
||||||
|
|
||||||
|
## Version 1.0.8
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- updated dropbox api
|
||||||
|
|
||||||
|
## Version 1.0.7
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- updated google client api version
|
||||||
|
|
||||||
|
## Version 1.0.6
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- added progress for zip extraction - hopefully helps with extract errors
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- fix for custom directories not working recursively
|
||||||
|
|
||||||
|
## Version 1.0.5
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- added google drive support
|
||||||
|
- added settings dialog option - thanks ed_davidson
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- make compression setting compatible with python 2.6 and above
|
||||||
|
- fix for growing backups - thanks brokeh
|
||||||
|
|
||||||
|
## Version 1.0.4
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- exit if we can't delete the old archive, non recoverable
|
||||||
|
|
||||||
|
## Version 1.0.3
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- added "delete auth" dialog to delete oauth files in settings
|
||||||
|
|
||||||
|
## Version 1.0.2
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- updated xbmc.python version to 2.19.0 - should be helix only
|
||||||
|
|
||||||
|
## Version 1.0.0
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- rebranded as "Backup"
|
||||||
|
- removed XBMC references and replaced with Kodi
|
||||||
|
- tweaked file walking for Helix
|
||||||
|
|
||||||
|
## Version 0.5.9
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- create restored version of guisettings for easy local restoration
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- fixed dropbox unicode error
|
||||||
|
|
||||||
|
## Version 0.5.8.7
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- allow limited updating of guisettings file through json
|
||||||
|
|
||||||
|
## Version 0.5.8.6
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- show notification if some files failed
|
||||||
|
- check if destination is writeable - thanks war59312
|
||||||
|
|
||||||
|
## Version 0.5.8.5
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- added custom library nodes to config backup options - thanks Ned Scott
|
||||||
|
|
||||||
|
## Version 0.5.8.4
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- backup compression should use zip64 as sizes may be over 2GB
|
||||||
|
- need to expand out path -bugfix
|
||||||
|
|
||||||
|
## Version 0.5.8
|
||||||
|
|
||||||
|
- fixes path substitution errors
|
||||||
|
|
||||||
|
## Version 0.5.7
|
||||||
|
|
||||||
|
- added option to compress backups, uses local source for staging the
|
||||||
|
zip before sending to remote
|
||||||
|
|
||||||
|
## Version 0.5.6
|
||||||
|
|
||||||
|
- fix dropbox delete recursion error - thanks durd updated language
|
||||||
|
files
|
||||||
|
|
||||||
|
## Version 0.5.5
|
||||||
|
|
||||||
|
- fix for dropbox errors during repeated file upload attempts
|
||||||
|
|
||||||
|
## Version 0.5.4
|
||||||
|
|
||||||
|
- check xbmc version when doing a restore
|
||||||
|
|
||||||
|
## Version 0.5.3
|
||||||
|
|
||||||
|
- updated python version
|
||||||
|
|
||||||
|
## Version 0.5.2
|
||||||
|
|
||||||
|
- added additional script and window parameters, thanks Samu-rai
|
||||||
|
- critical error in backup rotation
|
||||||
|
- updated progress bar display
|
||||||
|
|
||||||
|
## Version 0.5.1
|
||||||
|
|
||||||
|
- updated for new Gotham xbmc python updates
|
||||||
|
|
||||||
|
## Version 0.5.0
|
||||||
|
|
||||||
|
- New Version for Gotham
|
||||||
|
|
||||||
|
## Version 0.4.6
|
||||||
|
|
||||||
|
- modified backup folder names to include time, also modified display
|
||||||
|
listing
|
||||||
|
|
||||||
|
## Version 0.4.5
|
||||||
|
|
||||||
|
- added version info to logs
|
||||||
|
- added try/catch for unicode errors
|
||||||
|
|
||||||
|
## Version 0.4.4
|
||||||
|
|
||||||
|
- modified the check for invalid file types
|
||||||
|
|
||||||
|
## Version 0.4.3
|
||||||
|
|
||||||
|
- added error message if remote directory is blank
|
||||||
|
- added license tag
|
||||||
|
|
||||||
|
## Version 0.4.2
|
||||||
|
|
||||||
|
- Added support for userdata/profiles folder - thanks TUSSFC
|
||||||
|
|
||||||
|
## Version 0.4.1
|
||||||
|
|
||||||
|
- added encode() around notifications
|
||||||
|
|
||||||
|
## Version 0.4.0
|
||||||
|
|
||||||
|
- fixed settings display error - thanks zer04c
|
||||||
|
|
||||||
|
## Version 0.3.9
|
||||||
|
|
||||||
|
- added "just once" scheduler for one-off type backups
|
||||||
|
- show notification on scheduler
|
||||||
|
- update updated language files from Transifex
|
||||||
|
|
||||||
|
## Version 0.3.8
|
||||||
|
|
||||||
|
- added advancedsettings check on restore. prompts user to restore only this file and restart xbmc to continue. This fixes issues where path substitution was not working during restores - thanks ctrlbru
|
||||||
|
|
||||||
|
## [Version 0.3.7]
|
||||||
|
|
||||||
|
- added optional addon.xml tags
|
||||||
|
- update language files from Transifex
|
||||||
|
|
||||||
|
## Version 0.3.6
|
||||||
|
|
||||||
|
- added up to 2 custom directories, can be toggled on/off
|
||||||
|
- added a check for backup verification before rotation - no more
|
||||||
|
deleting non backup related files
|
||||||
|
- use monitor class for onSettingsChanged method
|
||||||
|
|
||||||
|
## Version 0.3.5
|
||||||
|
|
||||||
|
- test of custom directories - only 1 at the moment
|
||||||
|
|
||||||
|
## Version 0.3.4
|
||||||
|
|
||||||
|
- added ability to take parameters via RunScript() or
|
||||||
|
JSONRPC.Addons.ExecuteAddon()
|
||||||
|
|
||||||
|
## Version 0.3.3
|
||||||
|
|
||||||
|
- updated xbmc python version (2.1.0)
|
||||||
|
|
||||||
|
## Version 0.3.2
|
||||||
|
|
||||||
|
- added settings for user provided Dropbox key and secret
|
||||||
|
|
||||||
|
## Version 0.3.1
|
||||||
|
|
||||||
|
- added try/except for multiple character encodings
|
||||||
|
- remove token.txt file if Dropbox Authorization is revoked
|
||||||
|
- can shutdown xbmc after scheduled backup
|
||||||
|
|
||||||
|
## Version 0.3.0
|
||||||
|
|
||||||
|
- major vfs rewrite
|
||||||
|
- Added Dropbox as storage target
|
||||||
|
- updated gui/removed settings - thanks SFX Group for idea!
|
||||||
|
|
||||||
|
## Version 0.2.3
|
||||||
|
|
||||||
|
- first official frodo build
|
||||||
|
|
||||||
|
## Version 0.2.2
|
||||||
|
|
||||||
|
- fix for backup rotation sort
|
||||||
|
|
||||||
|
## Version 0.2.1
|
||||||
|
|
||||||
|
- added ability to rotate backups, keeping a set number of days
|
||||||
|
|
||||||
|
## Version 0.2.0
|
||||||
|
|
||||||
|
- removed the vfs.py helper library
|
||||||
|
- default.py file now uses xbmcvfs python library exclusively for
|
||||||
|
listing directories and copy operations
|
||||||
|
|
||||||
|
## Version 0.1.7
|
||||||
|
|
||||||
|
- minor bug fixes and translations updates
|
||||||
|
|
||||||
|
## Version 0.1.6
|
||||||
|
|
||||||
|
- merged scheduler branch with master, can now schedule backups on an
|
||||||
|
interval
|
||||||
|
|
||||||
|
## Version 0.1.5
|
||||||
|
|
||||||
|
- pulled xbmcbackup class into separate library
|
||||||
|
|
||||||
|
## Version 0.1.4
|
||||||
|
|
||||||
|
- added more verbose error message for incorrect paths
|
||||||
|
|
||||||
|
## Version 0.1.3
|
||||||
|
|
||||||
|
- backup folder format - thanks zeroram
|
||||||
|
- added German translations - thanks dersphere
|
||||||
|
- removed need for separate verbose logging setting
|
||||||
|
- updated utf-8 encoding for all logging
|
||||||
|
- backup now uses date as folder name, restore allows user to type date
|
||||||
|
of last backup
|
||||||
|
|
||||||
|
## Version 0.1.2
|
||||||
|
|
||||||
|
- added French language translation - thanks mikebzh44
|
||||||
|
- added some utf-8 encoding tags to filenames
|
||||||
|
|
||||||
|
## Version 0.1.1
|
||||||
|
|
||||||
|
- added check for key in vfs.py - Thanks Martijn!
|
||||||
|
|
||||||
|
## Version 0.1.0
|
||||||
|
|
||||||
|
- removed transparency from icon.png
|
||||||
|
|
||||||
|
## Version 0.0.9
|
||||||
|
|
||||||
|
- modified vfs.py again to filter out xsp files (smart playlists).
|
||||||
|
Created running list for these types of compressed files
|
||||||
|
- added enable/disable logging toggle in settings
|
||||||
|
|
||||||
|
## Version 0.0.8
|
||||||
|
|
||||||
|
- modified vfs.py script to exclude handling zip files as directories,
|
||||||
|
added keymap and peripheral data folders in the "config" section
|
||||||
|
|
||||||
|
## Version 0.0.7
|
||||||
|
|
||||||
|
- removed "restore.txt" file and now write file listing to memory list
|
||||||
|
instead
|
||||||
|
|
||||||
|
## Version 0.0.6
|
||||||
|
|
||||||
|
- Added the vfs module created by paddycarey
|
||||||
|
- File Selection is now followed for both backup and restore options
|
||||||
|
|
||||||
|
## Version 0.0.5
|
||||||
|
|
||||||
|
- Added option to manually type a path rather than browse for one (only
|
||||||
|
one used)
|
||||||
|
- Show progress bar right away so you know this is doing something
|
||||||
|
|
||||||
|
## Version 0.0.4
|
||||||
|
|
||||||
|
- Finished code for restore mode.
|
||||||
|
|
||||||
|
## Version 0.0.3
|
||||||
|
|
||||||
|
- Added progress bar and "silent" option for running on startup or as a
|
||||||
|
script
|
||||||
|
|
||||||
|
## Version 0.0.2
|
||||||
|
|
||||||
|
- First version, should backup directories as needed
|
||||||
314
changelog.txt
@@ -1,314 +0,0 @@
|
|||||||
Version 1.1.3
|
|
||||||
|
|
||||||
added file chunk support for dropbox uploads
|
|
||||||
fixed settings duplicate ids, thanks aster-anto
|
|
||||||
added scheduler delay to assist with time sync (rpi mostly)
|
|
||||||
|
|
||||||
Version 1.1.2
|
|
||||||
|
|
||||||
added fixes to the Dropbox lib for python 2.6
|
|
||||||
|
|
||||||
Version 1.1.1
|
|
||||||
|
|
||||||
fixed error on authorizers (missing secret/key)
|
|
||||||
added ability to "catchup" on missed scheduled backup
|
|
||||||
updated google oauth and client versions
|
|
||||||
merged in dropbox v2 code
|
|
||||||
|
|
||||||
Version 1.1.0
|
|
||||||
|
|
||||||
added tinyurl for oauth urls
|
|
||||||
moved authorize to settings area for cloud storage
|
|
||||||
bug fixes
|
|
||||||
|
|
||||||
Version 1.0.9
|
|
||||||
|
|
||||||
fixed dropbox rest.py for Python 2.6 - thanks koying!
|
|
||||||
|
|
||||||
Version 1.0.8
|
|
||||||
|
|
||||||
bug fixes
|
|
||||||
updated dropbox api
|
|
||||||
|
|
||||||
Version 1.0.7
|
|
||||||
|
|
||||||
updated google client api version
|
|
||||||
|
|
||||||
Version 1.0.6
|
|
||||||
|
|
||||||
fix for custom directories not working recursively
|
|
||||||
added progress for zip extraction - hopefully helps with extract errors
|
|
||||||
|
|
||||||
Version 1.0.5
|
|
||||||
|
|
||||||
added google drive support
|
|
||||||
make compression setting compatible with python 2.6 and above
|
|
||||||
added settings dialog option - thanks ed_davidson
|
|
||||||
fix for growing backups - thanks brokeh
|
|
||||||
|
|
||||||
Version 1.0.4
|
|
||||||
|
|
||||||
exit if we can't delete the old archive, non recoverable
|
|
||||||
|
|
||||||
Version 1.0.3
|
|
||||||
|
|
||||||
added "delete auth" dialog to delete oauth files in settings
|
|
||||||
|
|
||||||
Version 1.0.2
|
|
||||||
|
|
||||||
updated xbmc.python version to 2.19.0 - should be helix only
|
|
||||||
|
|
||||||
Version 1.0.0
|
|
||||||
|
|
||||||
rebranded as "Backup"
|
|
||||||
removed XBMC references and replaced with Kodi
|
|
||||||
tweaked file walking for Helix
|
|
||||||
|
|
||||||
Version 0.5.9
|
|
||||||
|
|
||||||
fixed dropbox unicode error
|
|
||||||
create restored version of guisettings for easy local restoration
|
|
||||||
|
|
||||||
Version 0.5.8.7
|
|
||||||
|
|
||||||
allow limited updating of guisettings file through json
|
|
||||||
|
|
||||||
Version 0.5.8.6
|
|
||||||
|
|
||||||
show notification if some files failed
|
|
||||||
check if destination is writeable - thanks war59312
|
|
||||||
|
|
||||||
Version 0.5.8.5
|
|
||||||
|
|
||||||
added custom library nodes to config backup options - thanks Ned Scott
|
|
||||||
|
|
||||||
Version 0.5.8.4
|
|
||||||
|
|
||||||
backup compression should use zip64 as sizes may be over 2GB
|
|
||||||
need to expand out path -bugfix
|
|
||||||
|
|
||||||
Version 0.5.8
|
|
||||||
|
|
||||||
fixes path substitution errors
|
|
||||||
|
|
||||||
Version 0.5.7
|
|
||||||
|
|
||||||
added option to compress backups, uses local source for staging the zip before sending to remote
|
|
||||||
|
|
||||||
Version 0.5.6
|
|
||||||
|
|
||||||
fix dropbox delete recursion error - thanks durd
|
|
||||||
updated language files
|
|
||||||
|
|
||||||
Version 0.5.5
|
|
||||||
|
|
||||||
fix for dropbox errors during repeated file upload attempts
|
|
||||||
|
|
||||||
Version 0.5.4
|
|
||||||
|
|
||||||
check xbmc version when doing a restore
|
|
||||||
|
|
||||||
Version 0.5.3
|
|
||||||
|
|
||||||
updated python version
|
|
||||||
|
|
||||||
Version 0.5.2
|
|
||||||
|
|
||||||
added additional script and window parameters, thanks Samu-rai
|
|
||||||
|
|
||||||
critical error in backup rotation
|
|
||||||
|
|
||||||
updated progress bar display
|
|
||||||
|
|
||||||
Version 0.5.1
|
|
||||||
|
|
||||||
updated for new Gotham xbmc python updates
|
|
||||||
|
|
||||||
Version 0.5.0
|
|
||||||
|
|
||||||
New Version for Gotham
|
|
||||||
|
|
||||||
Version 0.4.6
|
|
||||||
|
|
||||||
modified backup folder names to include time, also modified display listing
|
|
||||||
|
|
||||||
Version 0.4.5
|
|
||||||
|
|
||||||
added version info to logs
|
|
||||||
|
|
||||||
added try/catch for unicode errors
|
|
||||||
|
|
||||||
Version 0.4.4
|
|
||||||
|
|
||||||
modified the check for invalid file types
|
|
||||||
|
|
||||||
Version 0.4.3
|
|
||||||
|
|
||||||
added error message if remote directory is blank
|
|
||||||
|
|
||||||
added license tag
|
|
||||||
|
|
||||||
Version 0.4.2
|
|
||||||
|
|
||||||
Added support for userdata/profiles folder - thanks TUSSFC
|
|
||||||
|
|
||||||
Version 0.4.1
|
|
||||||
|
|
||||||
added encode() around notifications
|
|
||||||
|
|
||||||
Version 0.4.0
|
|
||||||
|
|
||||||
fixed settings display error - thanks zer04c
|
|
||||||
|
|
||||||
Version 0.3.9
|
|
||||||
|
|
||||||
added "just once" scheduler for one-off type backups
|
|
||||||
show notification on scheduler update
|
|
||||||
updated language files from Transifex
|
|
||||||
|
|
||||||
Version 0.3.8
|
|
||||||
|
|
||||||
added advancedsettings check on restore. prompts user to restore only this file and restart xbmc to continue. This fixes issues where path substitution was not working during restores - thanks ctrlbru
|
|
||||||
|
|
||||||
Version 0.3.7
|
|
||||||
|
|
||||||
added optional addon.xml tags
|
|
||||||
|
|
||||||
update language files from Transifex
|
|
||||||
|
|
||||||
Version 0.3.6
|
|
||||||
|
|
||||||
added up to 2 custom directories, can be toggled on/off
|
|
||||||
|
|
||||||
added a check for backup verification before rotation - no more deleting non backup related files
|
|
||||||
|
|
||||||
use monitor class for onSettingsChanged method
|
|
||||||
|
|
||||||
Version 0.3.5
|
|
||||||
|
|
||||||
test of custom directories - only 1 at the moment
|
|
||||||
|
|
||||||
Version 0.3.4
|
|
||||||
|
|
||||||
added ability to take parameters via RunScript() or JSONRPC.Addons.ExecuteAddon()
|
|
||||||
|
|
||||||
Version 0.3.3
|
|
||||||
|
|
||||||
updated xbmc python version (2.1.0)
|
|
||||||
|
|
||||||
Version 0.3.2
|
|
||||||
|
|
||||||
added settings for user provided Dropbox key and secret
|
|
||||||
|
|
||||||
Version 0.3.1
|
|
||||||
|
|
||||||
added try/except for multiple character encodings
|
|
||||||
|
|
||||||
remove token.txt file if Dropbox Authorization is revoked
|
|
||||||
|
|
||||||
can shutdown xbmc after scheduled backup
|
|
||||||
|
|
||||||
Version 0.3.0
|
|
||||||
|
|
||||||
major vfs rewrite
|
|
||||||
|
|
||||||
Added Dropbox as storage target
|
|
||||||
|
|
||||||
updated gui/removed settings - thanks SFX Group for idea!
|
|
||||||
|
|
||||||
Version 0.2.3
|
|
||||||
|
|
||||||
first official frodo build
|
|
||||||
|
|
||||||
Version 0.2.2
|
|
||||||
|
|
||||||
fix for backup rotation sort
|
|
||||||
|
|
||||||
Version 0.2.1
|
|
||||||
|
|
||||||
added ability to rotate backups, keeping a set number of days
|
|
||||||
|
|
||||||
Version 0.2.0
|
|
||||||
|
|
||||||
removed the vfs.py helper library
|
|
||||||
|
|
||||||
default.py file now uses xbmcvfs python library exclusively for listing directories and copy operations
|
|
||||||
|
|
||||||
Version 0.1.7
|
|
||||||
|
|
||||||
minor bug fixes and translations updates
|
|
||||||
|
|
||||||
Version 0.1.6
|
|
||||||
|
|
||||||
merged scheduler branch with master, can now schedule backups on an interval
|
|
||||||
|
|
||||||
Version 0.1.5
|
|
||||||
|
|
||||||
pulled xbmcbackup class into separate library
|
|
||||||
|
|
||||||
Version 0.1.4
|
|
||||||
|
|
||||||
added more verbose error message for incorrect paths
|
|
||||||
|
|
||||||
Version 0.1.3
|
|
||||||
|
|
||||||
backup folder format - thanks zeroram
|
|
||||||
|
|
||||||
added German translations - thanks dersphere
|
|
||||||
|
|
||||||
removed need for separate verbose logging setting
|
|
||||||
|
|
||||||
updated utf-8 encoding for all logging
|
|
||||||
|
|
||||||
backup now uses date as folder name, restore allows user to type date of last backup
|
|
||||||
|
|
||||||
Version 0.1.2
|
|
||||||
|
|
||||||
added French language translation - thanks mikebzh44
|
|
||||||
|
|
||||||
added some utf-8 encoding tags to filenames
|
|
||||||
|
|
||||||
Version 0.1.1
|
|
||||||
|
|
||||||
added check for key in vfs.py - Thanks Martijn!
|
|
||||||
|
|
||||||
Version 0.1.0
|
|
||||||
|
|
||||||
removed transparency from icon.png
|
|
||||||
|
|
||||||
Version 0.0.9
|
|
||||||
|
|
||||||
modified vfs.py again to filter out xsp files (smart playlists). Created running list for these types of compressed files
|
|
||||||
|
|
||||||
added enable/disable logging toggle in settings
|
|
||||||
|
|
||||||
Version 0.0.8
|
|
||||||
|
|
||||||
modified vfs.py script to exclude handling zip files as directories, added keymap and peripheral data folders in the "config" section
|
|
||||||
|
|
||||||
Version 0.0.7
|
|
||||||
|
|
||||||
removed "restore.txt" file and now write file listing to memory list instead
|
|
||||||
|
|
||||||
Version 0.0.6
|
|
||||||
|
|
||||||
Added the vfs module created by paddycarey
|
|
||||||
File Selection is now followed for both backup and restore options
|
|
||||||
|
|
||||||
Version 0.0.5
|
|
||||||
|
|
||||||
Added option to manually type a path rather than browse for one (only one used)
|
|
||||||
Show progress bar right away so you know this is doing something
|
|
||||||
|
|
||||||
Version 0.0.4
|
|
||||||
|
|
||||||
Finished code for restore mode.
|
|
||||||
|
|
||||||
Version 0.0.3
|
|
||||||
|
|
||||||
Added progress bar and "silent" option for running on startup or as a script
|
|
||||||
|
|
||||||
Version 0.0.2
|
|
||||||
|
|
||||||
First version, should backup directories as needed
|
|
||||||
|
|
||||||
168
default.py
@@ -1,77 +1,91 @@
|
|||||||
import urlparse
|
import sys, urlparse
|
||||||
import xbmcgui
|
import xbmc, xbmcgui
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
from resources.lib.backup import XbmcBackup
|
from resources.lib.backup import XbmcBackup
|
||||||
|
|
||||||
def get_params():
|
def get_params():
|
||||||
param = {}
|
param = {}
|
||||||
|
|
||||||
if(len(sys.argv) > 1):
|
if(len(sys.argv) > 1):
|
||||||
for i in sys.argv:
|
for i in sys.argv:
|
||||||
args = i
|
args = i
|
||||||
if(args.startswith('?')):
|
if(args.startswith('?')):
|
||||||
args = args[1:]
|
args = args[1:]
|
||||||
param.update(dict(urlparse.parse_qsl(args)))
|
param.update(dict(urlparse.parse_qsl(args)))
|
||||||
|
|
||||||
return param
|
return param
|
||||||
|
|
||||||
#the program mode
|
#the program mode
|
||||||
mode = -1
|
mode = -1
|
||||||
params = get_params()
|
params = get_params()
|
||||||
|
|
||||||
|
|
||||||
if("mode" in params):
|
if("mode" in params):
|
||||||
if(params['mode'] == 'backup'):
|
if(params['mode'] == 'backup'):
|
||||||
mode = 0
|
mode = 0
|
||||||
elif(params['mode'] == 'restore'):
|
elif(params['mode'] == 'restore'):
|
||||||
mode = 1
|
mode = 1
|
||||||
|
|
||||||
#if mode wasn't passed in as arg, get from user
|
#if mode wasn't passed in as arg, get from user
|
||||||
if(mode == -1):
|
if(mode == -1):
|
||||||
#figure out if this is a backup or a restore from the user
|
#by default, Backup,Restore,Open Settings
|
||||||
mode = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30023),[utils.getString(30016),utils.getString(30017),utils.getString(30099)])
|
options = [utils.getString(30016),utils.getString(30017),utils.getString(30099)]
|
||||||
|
|
||||||
#check if program should be run
|
#find out if we're using the advanced editor
|
||||||
if(mode != -1):
|
if(int(utils.getSetting('backup_selection_type')) == 1):
|
||||||
#run the profile backup
|
options.append(utils.getString(30125))
|
||||||
backup = XbmcBackup()
|
|
||||||
|
#figure out if this is a backup or a restore from the user
|
||||||
if(mode == 2):
|
mode = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30023),options)
|
||||||
#open the settings dialog
|
|
||||||
utils.openSettings()
|
#check if program should be run
|
||||||
|
if(mode != -1):
|
||||||
elif(backup.remoteConfigured()):
|
#run the profile backup
|
||||||
|
backup = XbmcBackup()
|
||||||
if(mode == backup.Restore):
|
|
||||||
#get list of valid restore points
|
if(mode == 2):
|
||||||
restorePoints = backup.listBackups()
|
#open the settings dialog
|
||||||
pointNames = []
|
utils.openSettings()
|
||||||
folderNames = []
|
elif(mode == 3 and int(utils.getSetting('backup_selection_type')) == 1):
|
||||||
|
#open the advanced editor
|
||||||
for aDir in restorePoints:
|
xbmc.executebuiltin('RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=advanced_editor)')
|
||||||
pointNames.append(aDir[1])
|
elif(backup.remoteConfigured()):
|
||||||
folderNames.append(aDir[0])
|
|
||||||
|
if(mode == backup.Restore):
|
||||||
selectedRestore = -1
|
#get list of valid restore points
|
||||||
|
restorePoints = backup.listBackups()
|
||||||
if("archive" in params):
|
pointNames = []
|
||||||
#check that the user give archive exists
|
folderNames = []
|
||||||
if(params['archive'] in folderNames):
|
|
||||||
#set the index
|
for aDir in restorePoints:
|
||||||
selectedRestore = folderNames.index(params['archive'])
|
pointNames.append(aDir[1])
|
||||||
utils.log(str(selectedRestore) + " : " + params['archive'])
|
folderNames.append(aDir[0])
|
||||||
else:
|
|
||||||
utils.showNotification(utils.getString(30045))
|
selectedRestore = -1
|
||||||
utils.log(params['archive'] + ' is not a valid restore point')
|
|
||||||
else:
|
if("archive" in params):
|
||||||
#allow user to select the backup to restore from
|
#check that the user give archive exists
|
||||||
selectedRestore = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30021),pointNames)
|
if(params['archive'] in folderNames):
|
||||||
|
#set the index
|
||||||
if(selectedRestore != -1):
|
selectedRestore = folderNames.index(params['archive'])
|
||||||
backup.selectRestore(restorePoints[selectedRestore][0])
|
utils.log(str(selectedRestore) + " : " + params['archive'])
|
||||||
|
else:
|
||||||
backup.run(mode)
|
utils.showNotification(utils.getString(30045))
|
||||||
else:
|
utils.log(params['archive'] + ' is not a valid restore point')
|
||||||
#can't go any further
|
else:
|
||||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30045))
|
#allow user to select the backup to restore from
|
||||||
utils.openSettings()
|
selectedRestore = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30021),pointNames)
|
||||||
|
|
||||||
|
if(selectedRestore != -1):
|
||||||
|
backup.selectRestore(restorePoints[selectedRestore][0])
|
||||||
|
|
||||||
|
if('sets' in params):
|
||||||
|
backup.restore(selectedSets=params['sets'].split('|'))
|
||||||
|
else:
|
||||||
|
backup.restore()
|
||||||
|
else:
|
||||||
|
backup.backup()
|
||||||
|
else:
|
||||||
|
#can't go any further
|
||||||
|
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30045))
|
||||||
|
utils.openSettings()
|
||||||
|
|||||||
64
launcher.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import sys
|
||||||
|
import urlparse
|
||||||
|
import xbmc
|
||||||
|
import xbmcgui
|
||||||
|
import xbmcvfs
|
||||||
|
import resources.lib.utils as utils
|
||||||
|
from resources.lib.authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
||||||
|
from resources.lib.advanced_editor import AdvancedBackupEditor
|
||||||
|
|
||||||
|
|
||||||
|
#launcher for various helpful functions found in the settings.xml area
|
||||||
|
|
||||||
|
def authorize_cloud(cloudProvider):
|
||||||
|
#drobpox
|
||||||
|
if(cloudProvider == 'dropbox'):
|
||||||
|
authorizer = DropboxAuthorizer()
|
||||||
|
|
||||||
|
if(authorizer.authorize()):
|
||||||
|
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30027) + ' ' + utils.getString(30106))
|
||||||
|
else:
|
||||||
|
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30107) + ' ' + utils.getString(30027))
|
||||||
|
|
||||||
|
#google drive
|
||||||
|
elif(cloudProvider == 'google_drive'):
|
||||||
|
authorizer = GoogleDriveAuthorizer()
|
||||||
|
|
||||||
|
if(authorizer.authorize()):
|
||||||
|
xbmcgui.Dialog().ok("Backup",utils.getString(30098) + ' ' + utils.getString(30106))
|
||||||
|
else:
|
||||||
|
xbmcgui.Dialog().ok("Backup",utils.getString(30107) + ' ' + utils.getString(30098))
|
||||||
|
|
||||||
|
def remove_auth():
|
||||||
|
#triggered from settings.xml - asks if user wants to delete OAuth token information
|
||||||
|
shouldDelete = xbmcgui.Dialog().yesno(utils.getString(30093),utils.getString(30094),utils.getString(30095),autoclose=7000)
|
||||||
|
|
||||||
|
if(shouldDelete):
|
||||||
|
#delete any of the known token file types
|
||||||
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt")) #dropbox
|
||||||
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "google_drive.dat")) #google drive
|
||||||
|
|
||||||
|
def get_params():
|
||||||
|
param = {}
|
||||||
|
try:
|
||||||
|
for i in sys.argv:
|
||||||
|
args = i
|
||||||
|
if(args.startswith('?')):
|
||||||
|
args = args[1:]
|
||||||
|
param.update(dict(urlparse.parse_qsl(args)))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return param
|
||||||
|
|
||||||
|
params = get_params()
|
||||||
|
|
||||||
|
if(params['action'] == 'authorize_cloud'):
|
||||||
|
authorize_cloud(params['provider'])
|
||||||
|
elif(params['action'] == 'remove_auth'):
|
||||||
|
remove_auth()
|
||||||
|
elif(params['action'] == 'advanced_editor'):
|
||||||
|
editor = AdvancedBackupEditor()
|
||||||
|
editor.showMainScreen()
|
||||||
|
elif(params['action'] == 'advanced_copy_config'):
|
||||||
|
editor = AdvancedBackupEditor()
|
||||||
|
editor.copySimpleConfig()
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
import xbmc
|
|
||||||
import xbmcgui
|
|
||||||
import xbmcvfs
|
|
||||||
import resources.lib.utils as utils
|
|
||||||
|
|
||||||
#triggered from settings.xml - asks if user wants to delete OAuth token information
|
|
||||||
shouldDelete = xbmcgui.Dialog().yesno(utils.getString(30093),utils.getString(30094),utils.getString(30095),autoclose=7000)
|
|
||||||
|
|
||||||
if(shouldDelete):
|
|
||||||
#delete any of the known token file types
|
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt")) #dropbox
|
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "google_drive.dat")) #google drive
|
|
||||||
|
|
||||||
105
resources/data/default_files.json
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
{
|
||||||
|
"addons":{
|
||||||
|
"root":"special://home/addons/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/addons/",
|
||||||
|
"recurse":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type":"exclude",
|
||||||
|
"path":"special://home/addons/packages/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type":"exclude",
|
||||||
|
"path":"special://home/addons/temp/"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"addon_data":{
|
||||||
|
"root":"special://home/userdata/addon_data/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/addon_data/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"database":{
|
||||||
|
"root":"special://home/userdata/Database/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/Database/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"game_saves":{
|
||||||
|
"root":"special://home/userdata/Savestates/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/Savestates/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"playlists":{
|
||||||
|
"root":"special://home/userdata/playlists/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/playlists/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"profiles":{
|
||||||
|
"root":"special://home/userdata/profiles/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/profiles/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"thumbnails":{
|
||||||
|
"root":"special://home/userdata/Thumbnails/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/Thumbnails/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"config":{
|
||||||
|
"root":"special://home/userdata/",
|
||||||
|
"dirs":[
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/",
|
||||||
|
"recurse":false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/keymaps/",
|
||||||
|
"recurse":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/peripheral_data/",
|
||||||
|
"recurse":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type":"include",
|
||||||
|
"path":"special://home/userdata/library/",
|
||||||
|
"recurse":true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
resources/images/folder-icon.png
Normal file
|
After Width: | Height: | Size: 226 B |
|
Before Width: | Height: | Size: 8.9 KiB After Width: | Height: | Size: 8.9 KiB |
BIN
resources/images/plus-icon.png
Normal file
|
After Width: | Height: | Size: 196 B |
BIN
resources/images/screenshot1.jpg
Normal file
|
After Width: | Height: | Size: 100 KiB |
BIN
resources/images/screenshot2.jpg
Normal file
|
After Width: | Height: | Size: 96 KiB |
BIN
resources/images/screenshot3.jpg
Normal file
|
After Width: | Height: | Size: 135 KiB |
BIN
resources/images/screenshot4.jpg
Normal file
|
After Width: | Height: | Size: 95 KiB |
BIN
resources/images/screenshot5.jpg
Normal file
|
After Width: | Height: | Size: 96 KiB |
BIN
resources/images/screenshot6.jpg
Normal file
|
After Width: | Height: | Size: 103 KiB |
@@ -48,6 +48,14 @@ msgctxt "#30013"
|
|||||||
msgid "Scheduling"
|
msgid "Scheduling"
|
||||||
msgstr "Scheduling"
|
msgstr "Scheduling"
|
||||||
|
|
||||||
|
msgctxt "#30014"
|
||||||
|
msgid "Simple"
|
||||||
|
msgstr "Simple"
|
||||||
|
|
||||||
|
msgctxt "#30015"
|
||||||
|
msgid "Advanced"
|
||||||
|
msgstr "Advanced"
|
||||||
|
|
||||||
msgctxt "#30016"
|
msgctxt "#30016"
|
||||||
msgid "Backup"
|
msgid "Backup"
|
||||||
msgstr "Backup"
|
msgstr "Backup"
|
||||||
@@ -129,12 +137,12 @@ msgid "Config Files"
|
|||||||
msgstr "Config Files"
|
msgstr "Config Files"
|
||||||
|
|
||||||
msgctxt "#30036"
|
msgctxt "#30036"
|
||||||
msgid "Custom Directory 1"
|
msgid "Disclaimer"
|
||||||
msgstr "Custom Directory 1"
|
msgstr "Disclaimer"
|
||||||
|
|
||||||
msgctxt "#30037"
|
msgctxt "#30037"
|
||||||
msgid "Custom Directory 2"
|
msgid "Canceling this menu will close and save changes"
|
||||||
msgstr "Custom Directory 2"
|
msgstr "Canceling this menu will close and save changes"
|
||||||
|
|
||||||
msgctxt "#30038"
|
msgctxt "#30038"
|
||||||
msgid "Advanced Settings Detected"
|
msgid "Advanced Settings Detected"
|
||||||
@@ -420,3 +428,143 @@ msgstr "Visit https://console.developers.google.com/"
|
|||||||
msgctxt "#30109"
|
msgctxt "#30109"
|
||||||
msgid "Run on startup if missed"
|
msgid "Run on startup if missed"
|
||||||
msgstr "Run on startup if missed"
|
msgstr "Run on startup if missed"
|
||||||
|
|
||||||
|
msgctxt "#30110"
|
||||||
|
msgid "Set Name"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30111"
|
||||||
|
msgid "Root folder selection"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30112"
|
||||||
|
msgid "Browse Folder"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30113"
|
||||||
|
msgid "Enter Own"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30114"
|
||||||
|
msgid "starts in Kodi home"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30115"
|
||||||
|
msgid "enter path to start there"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30116"
|
||||||
|
msgid "Enter root path"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30117"
|
||||||
|
msgid "Path Error"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30118"
|
||||||
|
msgid "Path does not exist"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30119"
|
||||||
|
msgid "Select root"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30120"
|
||||||
|
msgid "Add Exclude Folder"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30121"
|
||||||
|
msgid "Root Folder"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30122"
|
||||||
|
msgid "Edit"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30123"
|
||||||
|
msgid "Delete"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30124"
|
||||||
|
msgid "Choose Action"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30125"
|
||||||
|
msgid "Advanced Editor"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30126"
|
||||||
|
msgid "Add Set"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30127"
|
||||||
|
msgid "Delete Set"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30128"
|
||||||
|
msgid "Are you sure you want to delete?"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30129"
|
||||||
|
msgid "Exclude"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30130"
|
||||||
|
msgid "The root folder cannot be changed"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30131"
|
||||||
|
msgid "Choose Sets to Restore"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30132"
|
||||||
|
msgid "Version 1.5.0 requires you to setup your file selections again - this is a breaking change"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30133"
|
||||||
|
msgid "Game Saves"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30134"
|
||||||
|
msgid "Include"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30135"
|
||||||
|
msgid "Add Include Folder"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30136"
|
||||||
|
msgid "Path must be within root folder"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30137"
|
||||||
|
msgid "This path is part of a rule already"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30138"
|
||||||
|
msgid "Set Name exists already"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30139"
|
||||||
|
msgid "Copy Simple Config"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30140"
|
||||||
|
msgid "This will copy the default Simple file selection to the Advanced Editor"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30141"
|
||||||
|
msgid "This will erase any current Advanced Editor settings"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30148"
|
||||||
|
msgid "Ask before restoring Kodi UI settings"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30149"
|
||||||
|
msgid "Restore Kodi UI Settings"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgctxt "#30150"
|
||||||
|
msgid "Restore saved Kodi system settings from backup?"
|
||||||
|
msgstr ""
|
||||||
|
|||||||
@@ -129,14 +129,6 @@ msgctxt "#30035"
|
|||||||
msgid "Config Files"
|
msgid "Config Files"
|
||||||
msgstr "Config Files"
|
msgstr "Config Files"
|
||||||
|
|
||||||
msgctxt "#30036"
|
|
||||||
msgid "Custom Directory 1"
|
|
||||||
msgstr "Custom Directory 1"
|
|
||||||
|
|
||||||
msgctxt "#30037"
|
|
||||||
msgid "Custom Directory 2"
|
|
||||||
msgstr "Custom Directory 2"
|
|
||||||
|
|
||||||
msgctxt "#30038"
|
msgctxt "#30038"
|
||||||
msgid "Advanced Settings Detected"
|
msgid "Advanced Settings Detected"
|
||||||
msgstr "Advanced Settings Detected"
|
msgstr "Advanced Settings Detected"
|
||||||
|
|||||||
2
resources/lib/.gitignore
vendored
@@ -1,2 +0,0 @@
|
|||||||
|
|
||||||
*.pyo
|
|
||||||
229
resources/lib/advanced_editor.py
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
import json
|
||||||
|
import xbmcvfs
|
||||||
|
import xbmc
|
||||||
|
import xbmcgui
|
||||||
|
from . import utils as utils
|
||||||
|
|
||||||
|
class BackupSetManager:
|
||||||
|
jsonFile = xbmc.translatePath(utils.data_dir() + "custom_paths.json")
|
||||||
|
paths = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.paths = {}
|
||||||
|
|
||||||
|
#try and read in the custom file
|
||||||
|
self._readFile()
|
||||||
|
|
||||||
|
def addSet(self,aSet):
|
||||||
|
self.paths[aSet['name']] = {'root':aSet['root'],'dirs':[{"type":"include","path":aSet['root'],'recurse':True}]}
|
||||||
|
|
||||||
|
#save the file
|
||||||
|
self._writeFile()
|
||||||
|
|
||||||
|
def updateSet(self,name,aSet):
|
||||||
|
self.paths[name] = aSet
|
||||||
|
|
||||||
|
#save the file
|
||||||
|
self._writeFile()
|
||||||
|
|
||||||
|
def deleteSet(self,index):
|
||||||
|
#match the index to a key
|
||||||
|
keys = self.getSets()
|
||||||
|
|
||||||
|
#delete this set
|
||||||
|
del self.paths[keys[index]]
|
||||||
|
|
||||||
|
#save the file
|
||||||
|
self._writeFile()
|
||||||
|
|
||||||
|
def getSets(self):
|
||||||
|
#list all current sets by name
|
||||||
|
keys = list(self.paths.keys())
|
||||||
|
keys.sort()
|
||||||
|
|
||||||
|
return keys
|
||||||
|
|
||||||
|
def getSet(self,index):
|
||||||
|
keys = self.getSets();
|
||||||
|
|
||||||
|
#return the set at this index
|
||||||
|
return {'name':keys[index],'set':self.paths[keys[index]]}
|
||||||
|
|
||||||
|
def validateSetName(self,name):
|
||||||
|
return (name not in self.getSets())
|
||||||
|
|
||||||
|
def _writeFile(self):
|
||||||
|
#create the custom file
|
||||||
|
aFile = xbmcvfs.File(self.jsonFile,'w')
|
||||||
|
aFile.write(json.dumps(self.paths))
|
||||||
|
aFile.close()
|
||||||
|
|
||||||
|
def _readFile(self):
|
||||||
|
|
||||||
|
if(xbmcvfs.exists(self.jsonFile)):
|
||||||
|
|
||||||
|
#read in the custom file
|
||||||
|
aFile = xbmcvfs.File(self.jsonFile)
|
||||||
|
|
||||||
|
#load custom dirs
|
||||||
|
self.paths = json.loads(aFile.read())
|
||||||
|
aFile.close()
|
||||||
|
else:
|
||||||
|
#write a blank file
|
||||||
|
self._writeFile()
|
||||||
|
|
||||||
|
class AdvancedBackupEditor:
|
||||||
|
dialog = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.dialog = xbmcgui.Dialog()
|
||||||
|
|
||||||
|
def _cleanPath(self,root,path):
|
||||||
|
return path[len(root)-1:]
|
||||||
|
|
||||||
|
def _validatePath(self,root,path):
|
||||||
|
return path.startswith(root)
|
||||||
|
|
||||||
|
def createSet(self):
|
||||||
|
backupSet = None
|
||||||
|
|
||||||
|
name = self.dialog.input(utils.getString(30110),defaultt='Backup Set')
|
||||||
|
|
||||||
|
if(name != None):
|
||||||
|
|
||||||
|
#give a choice to start in home or enter a root path
|
||||||
|
enterHome = self.dialog.yesno(utils.getString(30111),line1=utils.getString(30112) + " - " + utils.getString(30114),line2=utils.getString(30113) + " - " + utils.getString(30115),nolabel=utils.getString(30112),yeslabel=utils.getString(30113))
|
||||||
|
|
||||||
|
rootFolder = 'special://home'
|
||||||
|
if(enterHome):
|
||||||
|
rootFolder = self.dialog.input(utils.getString(30116),defaultt=rootFolder)
|
||||||
|
|
||||||
|
#direcotry has to end in slash
|
||||||
|
if(rootFolder[:-1] != '/'):
|
||||||
|
rootFolder = rootFolder + '/'
|
||||||
|
|
||||||
|
#check that this path even exists
|
||||||
|
if(not xbmcvfs.exists(xbmc.translatePath(rootFolder))):
|
||||||
|
self.dialog.ok(utils.getString(30117),utils.getString(30118),rootFolder)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
#select path to start set
|
||||||
|
rootFolder = self.dialog.browse(type=0,heading=utils.getString(30119),shares='files',defaultt=rootFolder)
|
||||||
|
|
||||||
|
backupSet = {'name':name,'root':rootFolder}
|
||||||
|
|
||||||
|
return backupSet
|
||||||
|
|
||||||
|
def editSet(self,name,backupSet):
|
||||||
|
optionSelected = ''
|
||||||
|
rootPath = backupSet['root']
|
||||||
|
utils.log(rootPath)
|
||||||
|
while(optionSelected != -1):
|
||||||
|
options = [xbmcgui.ListItem(utils.getString(30120),"Exclude a specific folder from this backup set"),xbmcgui.ListItem(utils.getString(30135),"Include a specific folder to this backup set"),xbmcgui.ListItem(rootPath,utils.getString(30121))]
|
||||||
|
|
||||||
|
for aDir in backupSet['dirs']:
|
||||||
|
if(aDir['type'] == 'exclude'):
|
||||||
|
options.append(xbmcgui.ListItem(self._cleanPath(rootPath,aDir['path']),"%s: %s" % ("Type",utils.getString(30129))))
|
||||||
|
elif(aDir['type'] == 'include'):
|
||||||
|
options.append(xbmcgui.ListItem(self._cleanPath(rootPath,aDir['path']),"%s: %s | %s: %s" % ("Type",utils.getString(30134),"Include Sub Folders",str(aDir['recurse']))))
|
||||||
|
|
||||||
|
optionSelected = self.dialog.select(utils.getString(30122) + ' ' + name,options,useDetails=True)
|
||||||
|
|
||||||
|
if(optionSelected == 0 or optionSelected == 1):
|
||||||
|
#add a folder, will equal root if cancel is hit
|
||||||
|
addFolder = self.dialog.browse(type=0,heading=utils.getString(30120),shares='files',defaultt=backupSet['root'])
|
||||||
|
|
||||||
|
if(addFolder.startswith(rootPath)):
|
||||||
|
|
||||||
|
if(not any(addFolder == aDir['path'] for aDir in backupSet['dirs'])):
|
||||||
|
#cannot add root as an exclusion
|
||||||
|
if(optionSelected == 0 and addFolder != backupSet['root']):
|
||||||
|
backupSet['dirs'].append({"path":addFolder,"type":"exclude"})
|
||||||
|
elif(optionSelected == 1):
|
||||||
|
#can add root as inclusion
|
||||||
|
backupSet['dirs'].append({"path":addFolder,"type":"include","recurse":True})
|
||||||
|
else:
|
||||||
|
#this path is already part of another include/exclude rule
|
||||||
|
self.dialog.ok(utils.getString(30117),utils.getString(30137),addFolder)
|
||||||
|
else:
|
||||||
|
#folder must be under root folder
|
||||||
|
self.dialog.ok(utils.getString(30117), utils.getString(30136),rootPath)
|
||||||
|
elif(optionSelected == 2):
|
||||||
|
self.dialog.ok(utils.getString(30121),utils.getString(30130),backupSet['root'])
|
||||||
|
elif(optionSelected > 2):
|
||||||
|
|
||||||
|
cOptions = ['Delete']
|
||||||
|
if(backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
||||||
|
cOptions.append('Toggle Sub Folders')
|
||||||
|
|
||||||
|
contextOption = self.dialog.contextmenu(cOptions)
|
||||||
|
|
||||||
|
if(contextOption == 0):
|
||||||
|
if(self.dialog.yesno(heading=utils.getString(30123),line1=utils.getString(30128))):
|
||||||
|
#remove folder
|
||||||
|
del backupSet['dirs'][optionSelected - 3]
|
||||||
|
elif(contextOption == 1 and backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
||||||
|
#toggle if this folder should be recursive
|
||||||
|
backupSet['dirs'][optionSelected - 3]['recurse'] = not backupSet['dirs'][optionSelected - 3]['recurse']
|
||||||
|
|
||||||
|
return backupSet
|
||||||
|
|
||||||
|
|
||||||
|
def showMainScreen(self):
|
||||||
|
exitCondition = ""
|
||||||
|
customPaths = BackupSetManager()
|
||||||
|
|
||||||
|
#show this every time
|
||||||
|
self.dialog.ok(utils.getString(30036),utils.getString(30037))
|
||||||
|
|
||||||
|
while(exitCondition != -1):
|
||||||
|
#load the custom paths
|
||||||
|
options = [xbmcgui.ListItem(utils.getString(30126),'',utils.addon_dir() + '/resources/images/plus-icon.png')]
|
||||||
|
|
||||||
|
for index in range(0,len(customPaths.getSets())):
|
||||||
|
aSet = customPaths.getSet(index)
|
||||||
|
options.append(xbmcgui.ListItem(aSet['name'],utils.getString(30121) + ': ' + aSet['set']['root'],utils.addon_dir() + '/resources/images/folder-icon.png'))
|
||||||
|
|
||||||
|
#show the gui
|
||||||
|
exitCondition = self.dialog.select(utils.getString(30125),options,useDetails=True)
|
||||||
|
|
||||||
|
if(exitCondition >= 0):
|
||||||
|
if(exitCondition == 0):
|
||||||
|
newSet = self.createSet()
|
||||||
|
|
||||||
|
#check that the name is unique
|
||||||
|
if(customPaths.validateSetName(newSet['name'])):
|
||||||
|
customPaths.addSet(newSet)
|
||||||
|
else:
|
||||||
|
self.dialog.ok(utils.getString(30117), utils.getString(30138),newSet['name'])
|
||||||
|
else:
|
||||||
|
#bring up a context menu
|
||||||
|
menuOption = self.dialog.contextmenu([utils.getString(30122),utils.getString(30123)])
|
||||||
|
|
||||||
|
if(menuOption == 0):
|
||||||
|
#get the set
|
||||||
|
aSet = customPaths.getSet(exitCondition -1)
|
||||||
|
|
||||||
|
#edit the set
|
||||||
|
updatedSet = self.editSet(aSet['name'],aSet['set'])
|
||||||
|
|
||||||
|
#save it
|
||||||
|
customPaths.updateSet(aSet['name'],updatedSet)
|
||||||
|
|
||||||
|
elif(menuOption == 1):
|
||||||
|
if(self.dialog.yesno(heading=utils.getString(30127),line1=utils.getString(30128))):
|
||||||
|
#delete this path - subtract one because of "add" item
|
||||||
|
customPaths.deleteSet(exitCondition -1)
|
||||||
|
|
||||||
|
def copySimpleConfig(self):
|
||||||
|
#disclaimer in case the user hit this on accident
|
||||||
|
shouldContinue = self.dialog.yesno(utils.getString(30139),utils.getString(30140),utils.getString(30141))
|
||||||
|
|
||||||
|
if(shouldContinue):
|
||||||
|
source = xbmc.translatePath(utils.addon_dir() + "/resources/data/default_files.json")
|
||||||
|
dest = xbmc.translatePath(utils.data_dir() + "/custom_paths.json")
|
||||||
|
|
||||||
|
xbmcvfs.copy(source,dest)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -3,9 +3,18 @@ import xbmcgui
|
|||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
import resources.lib.tinyurl as tinyurl
|
import resources.lib.tinyurl as tinyurl
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
import dropbox
|
|
||||||
from resources.lib.pydrive.auth import GoogleAuth
|
#don't die on import error yet, these might not even get used
|
||||||
from resources.lib.pydrive.drive import GoogleDrive
|
try:
|
||||||
|
import dropbox
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from resources.lib.pydrive.auth import GoogleAuth
|
||||||
|
from resources.lib.pydrive.drive import GoogleDrive
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
class DropboxAuthorizer:
|
class DropboxAuthorizer:
|
||||||
APP_KEY = ""
|
APP_KEY = ""
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from .dropbox import __version__, Dropbox, DropboxTeam, create_session # noqa: F401
|
|
||||||
from .oauth import DropboxOAuth2Flow, DropboxOAuth2FlowNoRedirect # noqa: F401
|
|
||||||
@@ -1,310 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Auto-generated by Stone, do not modify.
|
|
||||||
# flake8: noqa
|
|
||||||
# pylint: skip-file
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
from . import stone_base as bb
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv
|
|
||||||
import stone_base as bb
|
|
||||||
|
|
||||||
class LaunchResultBase(bb.Union):
|
|
||||||
"""
|
|
||||||
Result returned by methods that launch an asynchronous job. A method who may
|
|
||||||
either launch an asynchronous job, or complete the request synchronously,
|
|
||||||
can use this union by extending it, and adding a 'complete' field with the
|
|
||||||
type of the synchronous response. See :class:`LaunchEmptyResult` for an
|
|
||||||
example.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar str async_job_id: This response indicates that the processing is
|
|
||||||
asynchronous. The string is an id that can be used to obtain the status
|
|
||||||
of the asynchronous job.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def async_job_id(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``async_job_id`` tag with
|
|
||||||
value ``val``.
|
|
||||||
|
|
||||||
:param str val:
|
|
||||||
:rtype: LaunchResultBase
|
|
||||||
"""
|
|
||||||
return cls('async_job_id', val)
|
|
||||||
|
|
||||||
def is_async_job_id(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``async_job_id``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'async_job_id'
|
|
||||||
|
|
||||||
def get_async_job_id(self):
|
|
||||||
"""
|
|
||||||
This response indicates that the processing is asynchronous. The string
|
|
||||||
is an id that can be used to obtain the status of the asynchronous job.
|
|
||||||
|
|
||||||
Only call this if :meth:`is_async_job_id` is true.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if not self.is_async_job_id():
|
|
||||||
raise AttributeError("tag 'async_job_id' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'LaunchResultBase(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
LaunchResultBase_validator = bv.Union(LaunchResultBase)
|
|
||||||
|
|
||||||
class LaunchEmptyResult(LaunchResultBase):
|
|
||||||
"""
|
|
||||||
Result returned by methods that may either launch an asynchronous job or
|
|
||||||
complete synchronously. Upon synchronous completion of the job, no
|
|
||||||
additional information is returned.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar complete: The job finished synchronously and successfully.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
complete = None
|
|
||||||
|
|
||||||
def is_complete(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``complete``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'complete'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'LaunchEmptyResult(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
LaunchEmptyResult_validator = bv.Union(LaunchEmptyResult)
|
|
||||||
|
|
||||||
class PollArg(object):
|
|
||||||
"""
|
|
||||||
Arguments for methods that poll the status of an asynchronous job.
|
|
||||||
|
|
||||||
:ivar async_job_id: Id of the asynchronous job. This is the value of a
|
|
||||||
response returned from the method that launched the job.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_async_job_id_value',
|
|
||||||
'_async_job_id_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
async_job_id=None):
|
|
||||||
self._async_job_id_value = None
|
|
||||||
self._async_job_id_present = False
|
|
||||||
if async_job_id is not None:
|
|
||||||
self.async_job_id = async_job_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def async_job_id(self):
|
|
||||||
"""
|
|
||||||
Id of the asynchronous job. This is the value of a response returned
|
|
||||||
from the method that launched the job.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._async_job_id_present:
|
|
||||||
return self._async_job_id_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'async_job_id'")
|
|
||||||
|
|
||||||
@async_job_id.setter
|
|
||||||
def async_job_id(self, val):
|
|
||||||
val = self._async_job_id_validator.validate(val)
|
|
||||||
self._async_job_id_value = val
|
|
||||||
self._async_job_id_present = True
|
|
||||||
|
|
||||||
@async_job_id.deleter
|
|
||||||
def async_job_id(self):
|
|
||||||
self._async_job_id_value = None
|
|
||||||
self._async_job_id_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PollArg(async_job_id={!r})'.format(
|
|
||||||
self._async_job_id_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
PollArg_validator = bv.Struct(PollArg)
|
|
||||||
|
|
||||||
class PollResultBase(bb.Union):
|
|
||||||
"""
|
|
||||||
Result returned by methods that poll for the status of an asynchronous job.
|
|
||||||
Unions that extend this union should add a 'complete' field with a type of
|
|
||||||
the information returned upon job completion. See :class:`PollEmptyResult`
|
|
||||||
for an example.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar in_progress: The asynchronous job is still in progress.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
in_progress = None
|
|
||||||
|
|
||||||
def is_in_progress(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``in_progress``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'in_progress'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PollResultBase(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PollResultBase_validator = bv.Union(PollResultBase)
|
|
||||||
|
|
||||||
class PollEmptyResult(PollResultBase):
|
|
||||||
"""
|
|
||||||
Result returned by methods that poll for the status of an asynchronous job.
|
|
||||||
Upon completion of the job, no additional information is returned.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar complete: The asynchronous job has completed successfully.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
complete = None
|
|
||||||
|
|
||||||
def is_complete(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``complete``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'complete'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PollEmptyResult(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PollEmptyResult_validator = bv.Union(PollEmptyResult)
|
|
||||||
|
|
||||||
class PollError(bb.Union):
|
|
||||||
"""
|
|
||||||
Error returned by methods for polling the status of asynchronous job.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar invalid_async_job_id: The job ID is invalid.
|
|
||||||
:ivar internal_error: Something went wrong with the job on Dropbox's end.
|
|
||||||
You'll need to verify that the action you were taking succeeded, and if
|
|
||||||
not, try again. This should happen very rarely.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
invalid_async_job_id = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
internal_error = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_invalid_async_job_id(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid_async_job_id``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid_async_job_id'
|
|
||||||
|
|
||||||
def is_internal_error(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``internal_error``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'internal_error'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PollError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PollError_validator = bv.Union(PollError)
|
|
||||||
|
|
||||||
AsyncJobId_validator = bv.String(min_length=1)
|
|
||||||
LaunchResultBase._async_job_id_validator = AsyncJobId_validator
|
|
||||||
LaunchResultBase._tagmap = {
|
|
||||||
'async_job_id': LaunchResultBase._async_job_id_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
LaunchEmptyResult._complete_validator = bv.Void()
|
|
||||||
LaunchEmptyResult._tagmap = {
|
|
||||||
'complete': LaunchEmptyResult._complete_validator,
|
|
||||||
}
|
|
||||||
LaunchEmptyResult._tagmap.update(LaunchResultBase._tagmap)
|
|
||||||
|
|
||||||
LaunchEmptyResult.complete = LaunchEmptyResult('complete')
|
|
||||||
|
|
||||||
PollArg._async_job_id_validator = AsyncJobId_validator
|
|
||||||
PollArg._all_field_names_ = set(['async_job_id'])
|
|
||||||
PollArg._all_fields_ = [('async_job_id', PollArg._async_job_id_validator)]
|
|
||||||
|
|
||||||
PollResultBase._in_progress_validator = bv.Void()
|
|
||||||
PollResultBase._tagmap = {
|
|
||||||
'in_progress': PollResultBase._in_progress_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PollResultBase.in_progress = PollResultBase('in_progress')
|
|
||||||
|
|
||||||
PollEmptyResult._complete_validator = bv.Void()
|
|
||||||
PollEmptyResult._tagmap = {
|
|
||||||
'complete': PollEmptyResult._complete_validator,
|
|
||||||
}
|
|
||||||
PollEmptyResult._tagmap.update(PollResultBase._tagmap)
|
|
||||||
|
|
||||||
PollEmptyResult.complete = PollEmptyResult('complete')
|
|
||||||
|
|
||||||
PollError._invalid_async_job_id_validator = bv.Void()
|
|
||||||
PollError._internal_error_validator = bv.Void()
|
|
||||||
PollError._other_validator = bv.Void()
|
|
||||||
PollError._tagmap = {
|
|
||||||
'invalid_async_job_id': PollError._invalid_async_job_id_validator,
|
|
||||||
'internal_error': PollError._internal_error_validator,
|
|
||||||
'other': PollError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PollError.invalid_async_job_id = PollError('invalid_async_job_id')
|
|
||||||
PollError.internal_error = PollError('internal_error')
|
|
||||||
PollError.other = PollError('other')
|
|
||||||
|
|
||||||
ROUTES = {
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,723 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Auto-generated by Stone, do not modify.
|
|
||||||
# flake8: noqa
|
|
||||||
# pylint: skip-file
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
from . import stone_base as bb
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv
|
|
||||||
import stone_base as bb
|
|
||||||
|
|
||||||
class AccessError(bb.Union):
|
|
||||||
"""
|
|
||||||
Error occurred because the account doesn't have permission to access the
|
|
||||||
resource.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar InvalidAccountTypeError invalid_account_type: Current account type
|
|
||||||
cannot access the resource.
|
|
||||||
:ivar PaperAccessError paper_access_denied: Current account cannot access
|
|
||||||
Paper.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def invalid_account_type(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``invalid_account_type`` tag
|
|
||||||
with value ``val``.
|
|
||||||
|
|
||||||
:param InvalidAccountTypeError val:
|
|
||||||
:rtype: AccessError
|
|
||||||
"""
|
|
||||||
return cls('invalid_account_type', val)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def paper_access_denied(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``paper_access_denied`` tag
|
|
||||||
with value ``val``.
|
|
||||||
|
|
||||||
:param PaperAccessError val:
|
|
||||||
:rtype: AccessError
|
|
||||||
"""
|
|
||||||
return cls('paper_access_denied', val)
|
|
||||||
|
|
||||||
def is_invalid_account_type(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid_account_type``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid_account_type'
|
|
||||||
|
|
||||||
def is_paper_access_denied(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``paper_access_denied``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'paper_access_denied'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def get_invalid_account_type(self):
|
|
||||||
"""
|
|
||||||
Current account type cannot access the resource.
|
|
||||||
|
|
||||||
Only call this if :meth:`is_invalid_account_type` is true.
|
|
||||||
|
|
||||||
:rtype: InvalidAccountTypeError
|
|
||||||
"""
|
|
||||||
if not self.is_invalid_account_type():
|
|
||||||
raise AttributeError("tag 'invalid_account_type' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def get_paper_access_denied(self):
|
|
||||||
"""
|
|
||||||
Current account cannot access Paper.
|
|
||||||
|
|
||||||
Only call this if :meth:`is_paper_access_denied` is true.
|
|
||||||
|
|
||||||
:rtype: PaperAccessError
|
|
||||||
"""
|
|
||||||
if not self.is_paper_access_denied():
|
|
||||||
raise AttributeError("tag 'paper_access_denied' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'AccessError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
AccessError_validator = bv.Union(AccessError)
|
|
||||||
|
|
||||||
class AuthError(bb.Union):
|
|
||||||
"""
|
|
||||||
Errors occurred during authentication.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar invalid_access_token: The access token is invalid.
|
|
||||||
:ivar invalid_select_user: The user specified in 'Dropbox-API-Select-User'
|
|
||||||
is no longer on the team.
|
|
||||||
:ivar invalid_select_admin: The user specified in 'Dropbox-API-Select-Admin'
|
|
||||||
is not a Dropbox Business team admin.
|
|
||||||
:ivar user_suspended: The user has been suspended.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
invalid_access_token = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
invalid_select_user = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
invalid_select_admin = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
user_suspended = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_invalid_access_token(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid_access_token``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid_access_token'
|
|
||||||
|
|
||||||
def is_invalid_select_user(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid_select_user``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid_select_user'
|
|
||||||
|
|
||||||
def is_invalid_select_admin(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid_select_admin``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid_select_admin'
|
|
||||||
|
|
||||||
def is_user_suspended(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``user_suspended``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'user_suspended'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'AuthError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
AuthError_validator = bv.Union(AuthError)
|
|
||||||
|
|
||||||
class InvalidAccountTypeError(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar endpoint: Current account type doesn't have permission to access this
|
|
||||||
route endpoint.
|
|
||||||
:ivar feature: Current account type doesn't have permission to access this
|
|
||||||
feature.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
endpoint = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
feature = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_endpoint(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``endpoint``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'endpoint'
|
|
||||||
|
|
||||||
def is_feature(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``feature``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'feature'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'InvalidAccountTypeError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
InvalidAccountTypeError_validator = bv.Union(InvalidAccountTypeError)
|
|
||||||
|
|
||||||
class PaperAccessError(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar paper_disabled: Paper is disabled.
|
|
||||||
:ivar not_paper_user: The provided user has not used Paper yet.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
paper_disabled = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
not_paper_user = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_paper_disabled(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``paper_disabled``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'paper_disabled'
|
|
||||||
|
|
||||||
def is_not_paper_user(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``not_paper_user``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'not_paper_user'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PaperAccessError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PaperAccessError_validator = bv.Union(PaperAccessError)
|
|
||||||
|
|
||||||
class RateLimitError(object):
|
|
||||||
"""
|
|
||||||
Error occurred because the app is being rate limited.
|
|
||||||
|
|
||||||
:ivar reason: The reason why the app is being rate limited.
|
|
||||||
:ivar retry_after: The number of seconds that the app should wait before
|
|
||||||
making another request.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_reason_value',
|
|
||||||
'_reason_present',
|
|
||||||
'_retry_after_value',
|
|
||||||
'_retry_after_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
reason=None,
|
|
||||||
retry_after=None):
|
|
||||||
self._reason_value = None
|
|
||||||
self._reason_present = False
|
|
||||||
self._retry_after_value = None
|
|
||||||
self._retry_after_present = False
|
|
||||||
if reason is not None:
|
|
||||||
self.reason = reason
|
|
||||||
if retry_after is not None:
|
|
||||||
self.retry_after = retry_after
|
|
||||||
|
|
||||||
@property
|
|
||||||
def reason(self):
|
|
||||||
"""
|
|
||||||
The reason why the app is being rate limited.
|
|
||||||
|
|
||||||
:rtype: RateLimitReason
|
|
||||||
"""
|
|
||||||
if self._reason_present:
|
|
||||||
return self._reason_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'reason'")
|
|
||||||
|
|
||||||
@reason.setter
|
|
||||||
def reason(self, val):
|
|
||||||
self._reason_validator.validate_type_only(val)
|
|
||||||
self._reason_value = val
|
|
||||||
self._reason_present = True
|
|
||||||
|
|
||||||
@reason.deleter
|
|
||||||
def reason(self):
|
|
||||||
self._reason_value = None
|
|
||||||
self._reason_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def retry_after(self):
|
|
||||||
"""
|
|
||||||
The number of seconds that the app should wait before making another
|
|
||||||
request.
|
|
||||||
|
|
||||||
:rtype: long
|
|
||||||
"""
|
|
||||||
if self._retry_after_present:
|
|
||||||
return self._retry_after_value
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
@retry_after.setter
|
|
||||||
def retry_after(self, val):
|
|
||||||
val = self._retry_after_validator.validate(val)
|
|
||||||
self._retry_after_value = val
|
|
||||||
self._retry_after_present = True
|
|
||||||
|
|
||||||
@retry_after.deleter
|
|
||||||
def retry_after(self):
|
|
||||||
self._retry_after_value = None
|
|
||||||
self._retry_after_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'RateLimitError(reason={!r}, retry_after={!r})'.format(
|
|
||||||
self._reason_value,
|
|
||||||
self._retry_after_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
RateLimitError_validator = bv.Struct(RateLimitError)
|
|
||||||
|
|
||||||
class RateLimitReason(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar too_many_requests: You are making too many requests in the past few
|
|
||||||
minutes.
|
|
||||||
:ivar too_many_write_operations: There are currently too many write
|
|
||||||
operations happening in the user's Dropbox.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
too_many_requests = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
too_many_write_operations = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_too_many_requests(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``too_many_requests``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'too_many_requests'
|
|
||||||
|
|
||||||
def is_too_many_write_operations(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``too_many_write_operations``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'too_many_write_operations'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'RateLimitReason(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
RateLimitReason_validator = bv.Union(RateLimitReason)
|
|
||||||
|
|
||||||
class TokenFromOAuth1Arg(object):
|
|
||||||
"""
|
|
||||||
:ivar oauth1_token: The supplied OAuth 1.0 access token.
|
|
||||||
:ivar oauth1_token_secret: The token secret associated with the supplied
|
|
||||||
access token.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_oauth1_token_value',
|
|
||||||
'_oauth1_token_present',
|
|
||||||
'_oauth1_token_secret_value',
|
|
||||||
'_oauth1_token_secret_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
oauth1_token=None,
|
|
||||||
oauth1_token_secret=None):
|
|
||||||
self._oauth1_token_value = None
|
|
||||||
self._oauth1_token_present = False
|
|
||||||
self._oauth1_token_secret_value = None
|
|
||||||
self._oauth1_token_secret_present = False
|
|
||||||
if oauth1_token is not None:
|
|
||||||
self.oauth1_token = oauth1_token
|
|
||||||
if oauth1_token_secret is not None:
|
|
||||||
self.oauth1_token_secret = oauth1_token_secret
|
|
||||||
|
|
||||||
@property
|
|
||||||
def oauth1_token(self):
|
|
||||||
"""
|
|
||||||
The supplied OAuth 1.0 access token.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._oauth1_token_present:
|
|
||||||
return self._oauth1_token_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'oauth1_token'")
|
|
||||||
|
|
||||||
@oauth1_token.setter
|
|
||||||
def oauth1_token(self, val):
|
|
||||||
val = self._oauth1_token_validator.validate(val)
|
|
||||||
self._oauth1_token_value = val
|
|
||||||
self._oauth1_token_present = True
|
|
||||||
|
|
||||||
@oauth1_token.deleter
|
|
||||||
def oauth1_token(self):
|
|
||||||
self._oauth1_token_value = None
|
|
||||||
self._oauth1_token_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def oauth1_token_secret(self):
|
|
||||||
"""
|
|
||||||
The token secret associated with the supplied access token.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._oauth1_token_secret_present:
|
|
||||||
return self._oauth1_token_secret_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'oauth1_token_secret'")
|
|
||||||
|
|
||||||
@oauth1_token_secret.setter
|
|
||||||
def oauth1_token_secret(self, val):
|
|
||||||
val = self._oauth1_token_secret_validator.validate(val)
|
|
||||||
self._oauth1_token_secret_value = val
|
|
||||||
self._oauth1_token_secret_present = True
|
|
||||||
|
|
||||||
@oauth1_token_secret.deleter
|
|
||||||
def oauth1_token_secret(self):
|
|
||||||
self._oauth1_token_secret_value = None
|
|
||||||
self._oauth1_token_secret_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'TokenFromOAuth1Arg(oauth1_token={!r}, oauth1_token_secret={!r})'.format(
|
|
||||||
self._oauth1_token_value,
|
|
||||||
self._oauth1_token_secret_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
TokenFromOAuth1Arg_validator = bv.Struct(TokenFromOAuth1Arg)
|
|
||||||
|
|
||||||
class TokenFromOAuth1Error(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar invalid_oauth1_token_info: Part or all of the OAuth 1.0 access token
|
|
||||||
info is invalid.
|
|
||||||
:ivar app_id_mismatch: The authorized app does not match the app associated
|
|
||||||
with the supplied access token.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
invalid_oauth1_token_info = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
app_id_mismatch = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_invalid_oauth1_token_info(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid_oauth1_token_info``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid_oauth1_token_info'
|
|
||||||
|
|
||||||
def is_app_id_mismatch(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``app_id_mismatch``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'app_id_mismatch'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'TokenFromOAuth1Error(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
TokenFromOAuth1Error_validator = bv.Union(TokenFromOAuth1Error)
|
|
||||||
|
|
||||||
class TokenFromOAuth1Result(object):
|
|
||||||
"""
|
|
||||||
:ivar oauth2_token: The OAuth 2.0 token generated from the supplied OAuth
|
|
||||||
1.0 token.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_oauth2_token_value',
|
|
||||||
'_oauth2_token_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
oauth2_token=None):
|
|
||||||
self._oauth2_token_value = None
|
|
||||||
self._oauth2_token_present = False
|
|
||||||
if oauth2_token is not None:
|
|
||||||
self.oauth2_token = oauth2_token
|
|
||||||
|
|
||||||
@property
|
|
||||||
def oauth2_token(self):
|
|
||||||
"""
|
|
||||||
The OAuth 2.0 token generated from the supplied OAuth 1.0 token.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._oauth2_token_present:
|
|
||||||
return self._oauth2_token_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'oauth2_token'")
|
|
||||||
|
|
||||||
@oauth2_token.setter
|
|
||||||
def oauth2_token(self, val):
|
|
||||||
val = self._oauth2_token_validator.validate(val)
|
|
||||||
self._oauth2_token_value = val
|
|
||||||
self._oauth2_token_present = True
|
|
||||||
|
|
||||||
@oauth2_token.deleter
|
|
||||||
def oauth2_token(self):
|
|
||||||
self._oauth2_token_value = None
|
|
||||||
self._oauth2_token_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'TokenFromOAuth1Result(oauth2_token={!r})'.format(
|
|
||||||
self._oauth2_token_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
TokenFromOAuth1Result_validator = bv.Struct(TokenFromOAuth1Result)
|
|
||||||
|
|
||||||
AccessError._invalid_account_type_validator = InvalidAccountTypeError_validator
|
|
||||||
AccessError._paper_access_denied_validator = PaperAccessError_validator
|
|
||||||
AccessError._other_validator = bv.Void()
|
|
||||||
AccessError._tagmap = {
|
|
||||||
'invalid_account_type': AccessError._invalid_account_type_validator,
|
|
||||||
'paper_access_denied': AccessError._paper_access_denied_validator,
|
|
||||||
'other': AccessError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
AccessError.other = AccessError('other')
|
|
||||||
|
|
||||||
AuthError._invalid_access_token_validator = bv.Void()
|
|
||||||
AuthError._invalid_select_user_validator = bv.Void()
|
|
||||||
AuthError._invalid_select_admin_validator = bv.Void()
|
|
||||||
AuthError._user_suspended_validator = bv.Void()
|
|
||||||
AuthError._other_validator = bv.Void()
|
|
||||||
AuthError._tagmap = {
|
|
||||||
'invalid_access_token': AuthError._invalid_access_token_validator,
|
|
||||||
'invalid_select_user': AuthError._invalid_select_user_validator,
|
|
||||||
'invalid_select_admin': AuthError._invalid_select_admin_validator,
|
|
||||||
'user_suspended': AuthError._user_suspended_validator,
|
|
||||||
'other': AuthError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
AuthError.invalid_access_token = AuthError('invalid_access_token')
|
|
||||||
AuthError.invalid_select_user = AuthError('invalid_select_user')
|
|
||||||
AuthError.invalid_select_admin = AuthError('invalid_select_admin')
|
|
||||||
AuthError.user_suspended = AuthError('user_suspended')
|
|
||||||
AuthError.other = AuthError('other')
|
|
||||||
|
|
||||||
InvalidAccountTypeError._endpoint_validator = bv.Void()
|
|
||||||
InvalidAccountTypeError._feature_validator = bv.Void()
|
|
||||||
InvalidAccountTypeError._other_validator = bv.Void()
|
|
||||||
InvalidAccountTypeError._tagmap = {
|
|
||||||
'endpoint': InvalidAccountTypeError._endpoint_validator,
|
|
||||||
'feature': InvalidAccountTypeError._feature_validator,
|
|
||||||
'other': InvalidAccountTypeError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
InvalidAccountTypeError.endpoint = InvalidAccountTypeError('endpoint')
|
|
||||||
InvalidAccountTypeError.feature = InvalidAccountTypeError('feature')
|
|
||||||
InvalidAccountTypeError.other = InvalidAccountTypeError('other')
|
|
||||||
|
|
||||||
PaperAccessError._paper_disabled_validator = bv.Void()
|
|
||||||
PaperAccessError._not_paper_user_validator = bv.Void()
|
|
||||||
PaperAccessError._other_validator = bv.Void()
|
|
||||||
PaperAccessError._tagmap = {
|
|
||||||
'paper_disabled': PaperAccessError._paper_disabled_validator,
|
|
||||||
'not_paper_user': PaperAccessError._not_paper_user_validator,
|
|
||||||
'other': PaperAccessError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PaperAccessError.paper_disabled = PaperAccessError('paper_disabled')
|
|
||||||
PaperAccessError.not_paper_user = PaperAccessError('not_paper_user')
|
|
||||||
PaperAccessError.other = PaperAccessError('other')
|
|
||||||
|
|
||||||
RateLimitError._reason_validator = RateLimitReason_validator
|
|
||||||
RateLimitError._retry_after_validator = bv.UInt64()
|
|
||||||
RateLimitError._all_field_names_ = set([
|
|
||||||
'reason',
|
|
||||||
'retry_after',
|
|
||||||
])
|
|
||||||
RateLimitError._all_fields_ = [
|
|
||||||
('reason', RateLimitError._reason_validator),
|
|
||||||
('retry_after', RateLimitError._retry_after_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
RateLimitReason._too_many_requests_validator = bv.Void()
|
|
||||||
RateLimitReason._too_many_write_operations_validator = bv.Void()
|
|
||||||
RateLimitReason._other_validator = bv.Void()
|
|
||||||
RateLimitReason._tagmap = {
|
|
||||||
'too_many_requests': RateLimitReason._too_many_requests_validator,
|
|
||||||
'too_many_write_operations': RateLimitReason._too_many_write_operations_validator,
|
|
||||||
'other': RateLimitReason._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
RateLimitReason.too_many_requests = RateLimitReason('too_many_requests')
|
|
||||||
RateLimitReason.too_many_write_operations = RateLimitReason('too_many_write_operations')
|
|
||||||
RateLimitReason.other = RateLimitReason('other')
|
|
||||||
|
|
||||||
TokenFromOAuth1Arg._oauth1_token_validator = bv.String(min_length=1)
|
|
||||||
TokenFromOAuth1Arg._oauth1_token_secret_validator = bv.String(min_length=1)
|
|
||||||
TokenFromOAuth1Arg._all_field_names_ = set([
|
|
||||||
'oauth1_token',
|
|
||||||
'oauth1_token_secret',
|
|
||||||
])
|
|
||||||
TokenFromOAuth1Arg._all_fields_ = [
|
|
||||||
('oauth1_token', TokenFromOAuth1Arg._oauth1_token_validator),
|
|
||||||
('oauth1_token_secret', TokenFromOAuth1Arg._oauth1_token_secret_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
TokenFromOAuth1Error._invalid_oauth1_token_info_validator = bv.Void()
|
|
||||||
TokenFromOAuth1Error._app_id_mismatch_validator = bv.Void()
|
|
||||||
TokenFromOAuth1Error._other_validator = bv.Void()
|
|
||||||
TokenFromOAuth1Error._tagmap = {
|
|
||||||
'invalid_oauth1_token_info': TokenFromOAuth1Error._invalid_oauth1_token_info_validator,
|
|
||||||
'app_id_mismatch': TokenFromOAuth1Error._app_id_mismatch_validator,
|
|
||||||
'other': TokenFromOAuth1Error._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenFromOAuth1Error.invalid_oauth1_token_info = TokenFromOAuth1Error('invalid_oauth1_token_info')
|
|
||||||
TokenFromOAuth1Error.app_id_mismatch = TokenFromOAuth1Error('app_id_mismatch')
|
|
||||||
TokenFromOAuth1Error.other = TokenFromOAuth1Error('other')
|
|
||||||
|
|
||||||
TokenFromOAuth1Result._oauth2_token_validator = bv.String(min_length=1)
|
|
||||||
TokenFromOAuth1Result._all_field_names_ = set(['oauth2_token'])
|
|
||||||
TokenFromOAuth1Result._all_fields_ = [('oauth2_token', TokenFromOAuth1Result._oauth2_token_validator)]
|
|
||||||
|
|
||||||
token_from_oauth1 = bb.Route(
|
|
||||||
'token/from_oauth1',
|
|
||||||
False,
|
|
||||||
TokenFromOAuth1Arg_validator,
|
|
||||||
TokenFromOAuth1Result_validator,
|
|
||||||
TokenFromOAuth1Error_validator,
|
|
||||||
{'host': u'api',
|
|
||||||
'style': u'rpc'},
|
|
||||||
)
|
|
||||||
token_revoke = bb.Route(
|
|
||||||
'token/revoke',
|
|
||||||
False,
|
|
||||||
bv.Void(),
|
|
||||||
bv.Void(),
|
|
||||||
bv.Void(),
|
|
||||||
{'host': u'api',
|
|
||||||
'style': u'rpc'},
|
|
||||||
)
|
|
||||||
|
|
||||||
ROUTES = {
|
|
||||||
'token/from_oauth1': token_from_oauth1,
|
|
||||||
'token/revoke': token_revoke,
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,323 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Auto-generated by Stone, do not modify.
|
|
||||||
# flake8: noqa
|
|
||||||
# pylint: skip-file
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
from . import stone_base as bb
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv
|
|
||||||
import stone_base as bb
|
|
||||||
|
|
||||||
class InvalidPathRootError(object):
|
|
||||||
"""
|
|
||||||
:ivar path_root: The latest path root id for user's team if the user is
|
|
||||||
still in a team.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_path_root_value',
|
|
||||||
'_path_root_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = False
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
path_root=None):
|
|
||||||
self._path_root_value = None
|
|
||||||
self._path_root_present = False
|
|
||||||
if path_root is not None:
|
|
||||||
self.path_root = path_root
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_root(self):
|
|
||||||
"""
|
|
||||||
The latest path root id for user's team if the user is still in a team.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._path_root_present:
|
|
||||||
return self._path_root_value
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@path_root.setter
|
|
||||||
def path_root(self, val):
|
|
||||||
if val is None:
|
|
||||||
del self.path_root
|
|
||||||
return
|
|
||||||
val = self._path_root_validator.validate(val)
|
|
||||||
self._path_root_value = val
|
|
||||||
self._path_root_present = True
|
|
||||||
|
|
||||||
@path_root.deleter
|
|
||||||
def path_root(self):
|
|
||||||
self._path_root_value = None
|
|
||||||
self._path_root_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'InvalidPathRootError(path_root={!r})'.format(
|
|
||||||
self._path_root_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
InvalidPathRootError_validator = bv.Struct(InvalidPathRootError)
|
|
||||||
|
|
||||||
class PathRoot(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar home: Paths are relative to the authenticating user's home directory,
|
|
||||||
whether or not that user belongs to a team.
|
|
||||||
:ivar member_home: Paths are relative to the authenticating team member's
|
|
||||||
home directory. (This results in ``PathRootError.invalid`` if the user
|
|
||||||
does not belong to a team.).
|
|
||||||
:ivar str team: Paths are relative to the given team directory. (This
|
|
||||||
results in :field:`PathRootError.invalid` if the user is not a member of
|
|
||||||
the team associated with that path root id.).
|
|
||||||
:ivar user_home: Paths are relative to the user's home directory. (This
|
|
||||||
results in ``PathRootError.invalid`` if the belongs to a team.).
|
|
||||||
:ivar str namespace_id: Paths are relative to given namespace id (This
|
|
||||||
results in :field:`PathRootError.no_permission` if you don't have access
|
|
||||||
to this namespace.).
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
home = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
member_home = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
user_home = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def team(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``team`` tag with value
|
|
||||||
``val``.
|
|
||||||
|
|
||||||
:param str val:
|
|
||||||
:rtype: PathRoot
|
|
||||||
"""
|
|
||||||
return cls('team', val)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def namespace_id(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``namespace_id`` tag with
|
|
||||||
value ``val``.
|
|
||||||
|
|
||||||
:param str val:
|
|
||||||
:rtype: PathRoot
|
|
||||||
"""
|
|
||||||
return cls('namespace_id', val)
|
|
||||||
|
|
||||||
def is_home(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``home``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'home'
|
|
||||||
|
|
||||||
def is_member_home(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``member_home``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'member_home'
|
|
||||||
|
|
||||||
def is_team(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``team``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'team'
|
|
||||||
|
|
||||||
def is_user_home(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``user_home``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'user_home'
|
|
||||||
|
|
||||||
def is_namespace_id(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``namespace_id``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'namespace_id'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def get_team(self):
|
|
||||||
"""
|
|
||||||
Paths are relative to the given team directory. (This results in
|
|
||||||
``PathRootError.invalid`` if the user is not a member of the team
|
|
||||||
associated with that path root id.).
|
|
||||||
|
|
||||||
Only call this if :meth:`is_team` is true.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if not self.is_team():
|
|
||||||
raise AttributeError("tag 'team' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def get_namespace_id(self):
|
|
||||||
"""
|
|
||||||
Paths are relative to given namespace id (This results in
|
|
||||||
``PathRootError.no_permission`` if you don't have access to this
|
|
||||||
namespace.).
|
|
||||||
|
|
||||||
Only call this if :meth:`is_namespace_id` is true.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if not self.is_namespace_id():
|
|
||||||
raise AttributeError("tag 'namespace_id' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PathRoot(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PathRoot_validator = bv.Union(PathRoot)
|
|
||||||
|
|
||||||
class PathRootError(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar InvalidPathRootError invalid: The path root id value in
|
|
||||||
Dropbox-API-Path-Root header is no longer valid.
|
|
||||||
:ivar no_permission: You don't have permission to access the path root id in
|
|
||||||
Dropbox-API-Path-Root header.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
no_permission = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def invalid(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``invalid`` tag with value
|
|
||||||
``val``.
|
|
||||||
|
|
||||||
:param InvalidPathRootError val:
|
|
||||||
:rtype: PathRootError
|
|
||||||
"""
|
|
||||||
return cls('invalid', val)
|
|
||||||
|
|
||||||
def is_invalid(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``invalid``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'invalid'
|
|
||||||
|
|
||||||
def is_no_permission(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``no_permission``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'no_permission'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def get_invalid(self):
|
|
||||||
"""
|
|
||||||
The path root id value in Dropbox-API-Path-Root header is no longer
|
|
||||||
valid.
|
|
||||||
|
|
||||||
Only call this if :meth:`is_invalid` is true.
|
|
||||||
|
|
||||||
:rtype: InvalidPathRootError
|
|
||||||
"""
|
|
||||||
if not self.is_invalid():
|
|
||||||
raise AttributeError("tag 'invalid' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PathRootError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PathRootError_validator = bv.Union(PathRootError)
|
|
||||||
|
|
||||||
Date_validator = bv.Timestamp(u'%Y-%m-%d')
|
|
||||||
DisplayName_validator = bv.String(min_length=1, pattern=u'[^/:?*<>"|]*')
|
|
||||||
DisplayNameLegacy_validator = bv.String(min_length=1)
|
|
||||||
DropboxTimestamp_validator = bv.Timestamp(u'%Y-%m-%dT%H:%M:%SZ')
|
|
||||||
EmailAddress_validator = bv.String(max_length=255, pattern=u"^['&A-Za-z0-9._%+-]+@[A-Za-z0-9-][A-Za-z0-9.-]*.[A-Za-z]{2,15}$")
|
|
||||||
# A ISO639-1 code.
|
|
||||||
LanguageCode_validator = bv.String(min_length=2)
|
|
||||||
NamePart_validator = bv.String(min_length=1, max_length=100, pattern=u'[^/:?*<>"|]*')
|
|
||||||
NamespaceId_validator = bv.String(pattern=u'[-_0-9a-zA-Z:]+')
|
|
||||||
OptionalNamePart_validator = bv.String(max_length=100, pattern=u'[^/:?*<>"|]*')
|
|
||||||
PathRootId_validator = NamespaceId_validator
|
|
||||||
SessionId_validator = bv.String()
|
|
||||||
SharedFolderId_validator = NamespaceId_validator
|
|
||||||
InvalidPathRootError._path_root_validator = bv.Nullable(PathRootId_validator)
|
|
||||||
InvalidPathRootError._all_field_names_ = set(['path_root'])
|
|
||||||
InvalidPathRootError._all_fields_ = [('path_root', InvalidPathRootError._path_root_validator)]
|
|
||||||
|
|
||||||
PathRoot._home_validator = bv.Void()
|
|
||||||
PathRoot._member_home_validator = bv.Void()
|
|
||||||
PathRoot._team_validator = PathRootId_validator
|
|
||||||
PathRoot._user_home_validator = bv.Void()
|
|
||||||
PathRoot._namespace_id_validator = PathRootId_validator
|
|
||||||
PathRoot._other_validator = bv.Void()
|
|
||||||
PathRoot._tagmap = {
|
|
||||||
'home': PathRoot._home_validator,
|
|
||||||
'member_home': PathRoot._member_home_validator,
|
|
||||||
'team': PathRoot._team_validator,
|
|
||||||
'user_home': PathRoot._user_home_validator,
|
|
||||||
'namespace_id': PathRoot._namespace_id_validator,
|
|
||||||
'other': PathRoot._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PathRoot.home = PathRoot('home')
|
|
||||||
PathRoot.member_home = PathRoot('member_home')
|
|
||||||
PathRoot.user_home = PathRoot('user_home')
|
|
||||||
PathRoot.other = PathRoot('other')
|
|
||||||
|
|
||||||
PathRootError._invalid_validator = InvalidPathRootError_validator
|
|
||||||
PathRootError._no_permission_validator = bv.Void()
|
|
||||||
PathRootError._other_validator = bv.Void()
|
|
||||||
PathRootError._tagmap = {
|
|
||||||
'invalid': PathRootError._invalid_validator,
|
|
||||||
'no_permission': PathRootError._no_permission_validator,
|
|
||||||
'other': PathRootError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PathRootError.no_permission = PathRootError('no_permission')
|
|
||||||
PathRootError.other = PathRootError('other')
|
|
||||||
|
|
||||||
ROUTES = {
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,515 +0,0 @@
|
|||||||
__all__ = [
|
|
||||||
'Dropbox',
|
|
||||||
'DropboxTeam',
|
|
||||||
'create_session',
|
|
||||||
]
|
|
||||||
|
|
||||||
# This should always be 0.0.0 in master. Only update this after tagging
|
|
||||||
# before release.
|
|
||||||
__version__ = '0.0.0'
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import six
|
|
||||||
|
|
||||||
from . import files, stone_serializers
|
|
||||||
from .auth import (
|
|
||||||
AuthError_validator,
|
|
||||||
RateLimitError_validator,
|
|
||||||
)
|
|
||||||
from .base import DropboxBase
|
|
||||||
from .base_team import DropboxTeamBase
|
|
||||||
from .exceptions import (
|
|
||||||
ApiError,
|
|
||||||
AuthError,
|
|
||||||
BadInputError,
|
|
||||||
HttpError,
|
|
||||||
InternalServerError,
|
|
||||||
RateLimitError,
|
|
||||||
)
|
|
||||||
from .session import (
|
|
||||||
API_HOST,
|
|
||||||
API_CONTENT_HOST,
|
|
||||||
API_NOTIFICATION_HOST,
|
|
||||||
HOST_API,
|
|
||||||
HOST_CONTENT,
|
|
||||||
HOST_NOTIFY,
|
|
||||||
pinned_session,
|
|
||||||
)
|
|
||||||
|
|
||||||
class RouteResult(object):
|
|
||||||
"""The successful result of a call to a route."""
|
|
||||||
|
|
||||||
def __init__(self, obj_result, http_resp=None):
|
|
||||||
"""
|
|
||||||
:param str obj_result: The result of a route not including the binary
|
|
||||||
payload portion, if one exists. Must be serialized JSON.
|
|
||||||
:param requests.models.Response http_resp: A raw HTTP response. It will
|
|
||||||
be used to stream the binary-body payload of the response.
|
|
||||||
"""
|
|
||||||
assert isinstance(obj_result, six.string_types), \
|
|
||||||
'obj_result: expected string, got %r' % type(obj_result)
|
|
||||||
if http_resp is not None:
|
|
||||||
assert isinstance(http_resp, requests.models.Response), \
|
|
||||||
'http_resp: expected requests.models.Response, got %r' % \
|
|
||||||
type(http_resp)
|
|
||||||
self.obj_result = obj_result
|
|
||||||
self.http_resp = http_resp
|
|
||||||
|
|
||||||
class RouteErrorResult(object):
|
|
||||||
"""The error result of a call to a route."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, obj_result):
|
|
||||||
"""
|
|
||||||
:param str request_id: A request_id can be shared with Dropbox Support
|
|
||||||
to pinpoint the exact request that returns an error.
|
|
||||||
:param str obj_result: The result of a route not including the binary
|
|
||||||
payload portion, if one exists.
|
|
||||||
"""
|
|
||||||
self.request_id = request_id
|
|
||||||
self.obj_result = obj_result
|
|
||||||
|
|
||||||
def create_session(max_connections=8, proxies=None):
|
|
||||||
"""
|
|
||||||
Creates a session object that can be used by multiple :class:`Dropbox` and
|
|
||||||
:class:`DropboxTeam` instances. This lets you share a connection pool
|
|
||||||
amongst them, as well as proxy parameters.
|
|
||||||
|
|
||||||
:param int max_connections: Maximum connection pool size.
|
|
||||||
:param dict proxies: See the `requests module
|
|
||||||
<http://docs.python-requests.org/en/latest/user/advanced/#proxies>`_
|
|
||||||
for more details.
|
|
||||||
:rtype: :class:`requests.sessions.Session`. `See the requests module
|
|
||||||
<http://docs.python-requests.org/en/latest/user/advanced/#session-objects>`_
|
|
||||||
for more details.
|
|
||||||
"""
|
|
||||||
# We only need as many pool_connections as we have unique hostnames.
|
|
||||||
session = pinned_session(pool_maxsize=max_connections)
|
|
||||||
if proxies:
|
|
||||||
session.proxies = proxies
|
|
||||||
return session
|
|
||||||
|
|
||||||
class _DropboxTransport(object):
|
|
||||||
"""
|
|
||||||
Responsible for implementing the wire protocol for making requests to the
|
|
||||||
Dropbox API.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_API_VERSION = '2'
|
|
||||||
|
|
||||||
# Download style means that the route argument goes in a Dropbox-API-Arg
|
|
||||||
# header, and the result comes back in a Dropbox-API-Result header. The
|
|
||||||
# HTTP response body contains a binary payload.
|
|
||||||
_ROUTE_STYLE_DOWNLOAD = 'download'
|
|
||||||
|
|
||||||
# Upload style means that the route argument goes in a Dropbox-API-Arg
|
|
||||||
# header. The HTTP request body contains a binary payload. The result
|
|
||||||
# comes back in a Dropbox-API-Result header.
|
|
||||||
_ROUTE_STYLE_UPLOAD = 'upload'
|
|
||||||
|
|
||||||
# RPC style means that the argument and result of a route are contained in
|
|
||||||
# the HTTP body.
|
|
||||||
_ROUTE_STYLE_RPC = 'rpc'
|
|
||||||
|
|
||||||
# This is the default longest time we'll block on receiving data from the server
|
|
||||||
_DEFAULT_TIMEOUT = 30
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
oauth2_access_token,
|
|
||||||
max_retries_on_error=4,
|
|
||||||
max_retries_on_rate_limit=None,
|
|
||||||
user_agent=None,
|
|
||||||
session=None,
|
|
||||||
headers=None,
|
|
||||||
timeout=_DEFAULT_TIMEOUT):
|
|
||||||
"""
|
|
||||||
:param str oauth2_access_token: OAuth2 access token for making client
|
|
||||||
requests.
|
|
||||||
|
|
||||||
:param int max_retries_on_error: On 5xx errors, the number of times to
|
|
||||||
retry.
|
|
||||||
:param Optional[int] max_retries_on_rate_limit: On 429 errors, the
|
|
||||||
number of times to retry. If `None`, always retries.
|
|
||||||
:param str user_agent: The user agent to use when making requests. This
|
|
||||||
helps us identify requests coming from your application. We
|
|
||||||
recommend you use the format "AppName/Version". If set, we append
|
|
||||||
"/OfficialDropboxPythonSDKv2/__version__" to the user_agent,
|
|
||||||
:param session: If not provided, a new session (connection pool) is
|
|
||||||
created. To share a session across multiple clients, use
|
|
||||||
:func:`create_session`.
|
|
||||||
:type session: :class:`requests.sessions.Session`
|
|
||||||
:param dict headers: Additional headers to add to requests.
|
|
||||||
:param Optional[float] timeout: Maximum duration in seconds that
|
|
||||||
client will wait for any single packet from the
|
|
||||||
server. After the timeout the client will give up on
|
|
||||||
connection. If `None`, client will wait forever. Defaults
|
|
||||||
to 30 seconds.
|
|
||||||
"""
|
|
||||||
assert len(oauth2_access_token) > 0, \
|
|
||||||
'OAuth2 access token cannot be empty.'
|
|
||||||
assert headers is None or isinstance(headers, dict), \
|
|
||||||
'Expected dict, got %r' % headers
|
|
||||||
self._oauth2_access_token = oauth2_access_token
|
|
||||||
|
|
||||||
self._max_retries_on_error = max_retries_on_error
|
|
||||||
self._max_retries_on_rate_limit = max_retries_on_rate_limit
|
|
||||||
if session:
|
|
||||||
assert isinstance(session, requests.sessions.Session), \
|
|
||||||
'Expected requests.sessions.Session, got %r' % session
|
|
||||||
self._session = session
|
|
||||||
else:
|
|
||||||
self._session = create_session()
|
|
||||||
self._headers = headers
|
|
||||||
|
|
||||||
base_user_agent = 'OfficialDropboxPythonSDKv2/' + __version__
|
|
||||||
if user_agent:
|
|
||||||
self._raw_user_agent = user_agent
|
|
||||||
self._user_agent = '{}/{}'.format(user_agent, base_user_agent)
|
|
||||||
else:
|
|
||||||
self._raw_user_agent = None
|
|
||||||
self._user_agent = base_user_agent
|
|
||||||
|
|
||||||
self._logger = logging.getLogger('dropbox')
|
|
||||||
|
|
||||||
self._host_map = {HOST_API: API_HOST,
|
|
||||||
HOST_CONTENT: API_CONTENT_HOST,
|
|
||||||
HOST_NOTIFY: API_NOTIFICATION_HOST}
|
|
||||||
|
|
||||||
self._timeout = timeout
|
|
||||||
|
|
||||||
def request(self,
|
|
||||||
route,
|
|
||||||
namespace,
|
|
||||||
request_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=None):
|
|
||||||
"""
|
|
||||||
Makes a request to the Dropbox API and in the process validates that
|
|
||||||
the route argument and result are the expected data types. The
|
|
||||||
request_arg is converted to JSON based on the arg_data_type. Likewise,
|
|
||||||
the response is deserialized from JSON and converted to an object based
|
|
||||||
on the {result,error}_data_type.
|
|
||||||
|
|
||||||
:param host: The Dropbox API host to connect to.
|
|
||||||
:param route: The route to make the request to.
|
|
||||||
:type route: :class:`.datatypes.stone_base.Route`
|
|
||||||
:param request_arg: Argument for the route that conforms to the
|
|
||||||
validator specified by route.arg_type.
|
|
||||||
:param request_binary: String or file pointer representing the binary
|
|
||||||
payload. Use None if there is no binary payload.
|
|
||||||
:param Optional[float] timeout: Maximum duration in seconds
|
|
||||||
that client will wait for any single packet from the
|
|
||||||
server. After the timeout the client will give up on
|
|
||||||
connection. If `None`, will use default timeout set on
|
|
||||||
Dropbox object. Defaults to `None`.
|
|
||||||
:return: The route's result.
|
|
||||||
"""
|
|
||||||
host = route.attrs['host'] or 'api'
|
|
||||||
route_name = namespace + '/' + route.name
|
|
||||||
route_style = route.attrs['style'] or 'rpc'
|
|
||||||
serialized_arg = stone_serializers.json_encode(route.arg_type,
|
|
||||||
request_arg)
|
|
||||||
|
|
||||||
if (timeout is None and
|
|
||||||
route == files.list_folder_longpoll):
|
|
||||||
# The client normally sends a timeout value to the
|
|
||||||
# longpoll route. The server will respond after
|
|
||||||
# <timeout> + random(0, 90) seconds. We increase the
|
|
||||||
# socket timeout to the longpoll timeout value plus 90
|
|
||||||
# seconds so that we don't cut the server response short
|
|
||||||
# due to a shorter socket timeout.
|
|
||||||
# NB: This is done here because base.py is auto-generated
|
|
||||||
timeout = request_arg.timeout + 90
|
|
||||||
|
|
||||||
res = self.request_json_string_with_retry(host,
|
|
||||||
route_name,
|
|
||||||
route_style,
|
|
||||||
serialized_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=timeout)
|
|
||||||
decoded_obj_result = json.loads(res.obj_result)
|
|
||||||
if isinstance(res, RouteResult):
|
|
||||||
returned_data_type = route.result_type
|
|
||||||
obj = decoded_obj_result
|
|
||||||
elif isinstance(res, RouteErrorResult):
|
|
||||||
returned_data_type = route.error_type
|
|
||||||
obj = decoded_obj_result['error']
|
|
||||||
user_message = decoded_obj_result.get('user_message')
|
|
||||||
user_message_text = user_message and user_message.get('text')
|
|
||||||
user_message_locale = user_message and user_message.get('locale')
|
|
||||||
else:
|
|
||||||
raise AssertionError('Expected RouteResult or RouteErrorResult, '
|
|
||||||
'but res is %s' % type(res))
|
|
||||||
|
|
||||||
deserialized_result = stone_serializers.json_compat_obj_decode(
|
|
||||||
returned_data_type, obj, strict=False)
|
|
||||||
|
|
||||||
if isinstance(res, RouteErrorResult):
|
|
||||||
raise ApiError(res.request_id,
|
|
||||||
deserialized_result,
|
|
||||||
user_message_text,
|
|
||||||
user_message_locale)
|
|
||||||
elif route_style == self._ROUTE_STYLE_DOWNLOAD:
|
|
||||||
return (deserialized_result, res.http_resp)
|
|
||||||
else:
|
|
||||||
return deserialized_result
|
|
||||||
|
|
||||||
def request_json_object(self,
|
|
||||||
host,
|
|
||||||
route_name,
|
|
||||||
route_style,
|
|
||||||
request_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=None):
|
|
||||||
"""
|
|
||||||
Makes a request to the Dropbox API, taking a JSON-serializable Python
|
|
||||||
object as an argument, and returning one as a response.
|
|
||||||
|
|
||||||
:param host: The Dropbox API host to connect to.
|
|
||||||
:param route_name: The name of the route to invoke.
|
|
||||||
:param route_style: The style of the route.
|
|
||||||
:param str request_arg: A JSON-serializable Python object representing
|
|
||||||
the argument for the route.
|
|
||||||
:param Optional[bytes] request_binary: Bytes representing the binary
|
|
||||||
payload. Use None if there is no binary payload.
|
|
||||||
:param Optional[float] timeout: Maximum duration in seconds
|
|
||||||
that client will wait for any single packet from the
|
|
||||||
server. After the timeout the client will give up on
|
|
||||||
connection. If `None`, will use default timeout set on
|
|
||||||
Dropbox object. Defaults to `None`.
|
|
||||||
:return: The route's result as a JSON-serializable Python object.
|
|
||||||
"""
|
|
||||||
serialized_arg = json.dumps(request_arg)
|
|
||||||
res = self.request_json_string_with_retry(host,
|
|
||||||
route_name,
|
|
||||||
route_style,
|
|
||||||
serialized_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=timeout)
|
|
||||||
# This can throw a ValueError if the result is not deserializable,
|
|
||||||
# but that would be completely unexpected.
|
|
||||||
deserialized_result = json.loads(res.obj_result)
|
|
||||||
if isinstance(res, RouteResult) and res.http_resp is not None:
|
|
||||||
return (deserialized_result, res.http_resp)
|
|
||||||
else:
|
|
||||||
return deserialized_result
|
|
||||||
|
|
||||||
def request_json_string_with_retry(self,
|
|
||||||
host,
|
|
||||||
route_name,
|
|
||||||
route_style,
|
|
||||||
request_json_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=None):
|
|
||||||
"""
|
|
||||||
See :meth:`request_json_object` for description of parameters.
|
|
||||||
|
|
||||||
:param request_json_arg: A string representing the serialized JSON
|
|
||||||
argument to the route.
|
|
||||||
"""
|
|
||||||
attempt = 0
|
|
||||||
rate_limit_errors = 0
|
|
||||||
while True:
|
|
||||||
self._logger.info('Request to %s', route_name)
|
|
||||||
try:
|
|
||||||
return self.request_json_string(host,
|
|
||||||
route_name,
|
|
||||||
route_style,
|
|
||||||
request_json_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=timeout)
|
|
||||||
except InternalServerError as e:
|
|
||||||
attempt += 1
|
|
||||||
if attempt <= self._max_retries_on_error:
|
|
||||||
# Use exponential backoff
|
|
||||||
backoff = 2**attempt * random.random()
|
|
||||||
self._logger.info(
|
|
||||||
'HttpError status_code=%s: Retrying in %.1f seconds',
|
|
||||||
e.status_code, backoff)
|
|
||||||
time.sleep(backoff)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
except RateLimitError as e:
|
|
||||||
rate_limit_errors += 1
|
|
||||||
if (self._max_retries_on_rate_limit is None or
|
|
||||||
self._max_retries_on_rate_limit >= rate_limit_errors):
|
|
||||||
# Set default backoff to 5 seconds.
|
|
||||||
backoff = e.backoff if e.backoff is not None else 5.0
|
|
||||||
self._logger.info(
|
|
||||||
'Ratelimit: Retrying in %.1f seconds.', backoff)
|
|
||||||
time.sleep(backoff)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def request_json_string(self,
|
|
||||||
host,
|
|
||||||
func_name,
|
|
||||||
route_style,
|
|
||||||
request_json_arg,
|
|
||||||
request_binary,
|
|
||||||
timeout=None):
|
|
||||||
"""
|
|
||||||
See :meth:`request_json_string_with_retry` for description of
|
|
||||||
parameters.
|
|
||||||
"""
|
|
||||||
if host not in self._host_map:
|
|
||||||
raise ValueError('Unknown value for host: %r' % host)
|
|
||||||
|
|
||||||
if not isinstance(request_binary, (six.binary_type, type(None))):
|
|
||||||
# Disallow streams and file-like objects even though the underlying
|
|
||||||
# requests library supports them. This is to prevent incorrect
|
|
||||||
# behavior when a non-rewindable stream is read from, but the
|
|
||||||
# request fails and needs to be re-tried at a later time.
|
|
||||||
raise TypeError('expected request_binary as binary type, got %s' %
|
|
||||||
type(request_binary))
|
|
||||||
|
|
||||||
# Fully qualified hostname
|
|
||||||
fq_hostname = self._host_map[host]
|
|
||||||
url = self._get_route_url(fq_hostname, func_name)
|
|
||||||
|
|
||||||
headers = {'User-Agent': self._user_agent}
|
|
||||||
if host != HOST_NOTIFY:
|
|
||||||
headers['Authorization'] = 'Bearer %s' % self._oauth2_access_token
|
|
||||||
if self._headers:
|
|
||||||
headers.update(self._headers)
|
|
||||||
|
|
||||||
# The contents of the body of the HTTP request
|
|
||||||
body = None
|
|
||||||
# Whether the response should be streamed incrementally, or buffered
|
|
||||||
# entirely. If stream is True, the caller is responsible for closing
|
|
||||||
# the HTTP response.
|
|
||||||
stream = False
|
|
||||||
|
|
||||||
if route_style == self._ROUTE_STYLE_RPC:
|
|
||||||
headers['Content-Type'] = 'application/json'
|
|
||||||
body = request_json_arg
|
|
||||||
elif route_style == self._ROUTE_STYLE_DOWNLOAD:
|
|
||||||
headers['Dropbox-API-Arg'] = request_json_arg
|
|
||||||
stream = True
|
|
||||||
elif route_style == self._ROUTE_STYLE_UPLOAD:
|
|
||||||
headers['Content-Type'] = 'application/octet-stream'
|
|
||||||
headers['Dropbox-API-Arg'] = request_json_arg
|
|
||||||
body = request_binary
|
|
||||||
else:
|
|
||||||
raise ValueError('Unknown operation style: %r' % route_style)
|
|
||||||
|
|
||||||
if timeout is None:
|
|
||||||
timeout = self._timeout
|
|
||||||
|
|
||||||
r = self._session.post(url,
|
|
||||||
headers=headers,
|
|
||||||
data=body,
|
|
||||||
stream=stream,
|
|
||||||
verify=True,
|
|
||||||
timeout=timeout,
|
|
||||||
)
|
|
||||||
|
|
||||||
request_id = r.headers.get('x-dropbox-request-id')
|
|
||||||
if r.status_code >= 500:
|
|
||||||
raise InternalServerError(request_id, r.status_code, r.text)
|
|
||||||
elif r.status_code == 400:
|
|
||||||
raise BadInputError(request_id, r.text)
|
|
||||||
elif r.status_code == 401:
|
|
||||||
assert r.headers.get('content-type') == 'application/json', (
|
|
||||||
'Expected content-type to be application/json, got %r' %
|
|
||||||
r.headers.get('content-type'))
|
|
||||||
err = stone_serializers.json_compat_obj_decode(
|
|
||||||
AuthError_validator, r.json()['error'])
|
|
||||||
raise AuthError(request_id, err)
|
|
||||||
elif r.status_code == 429:
|
|
||||||
err = None
|
|
||||||
if r.headers.get('content-type') == 'application/json':
|
|
||||||
err = stone_serializers.json_compat_obj_decode(
|
|
||||||
RateLimitError_validator, r.json()['error'])
|
|
||||||
retry_after = err.retry_after
|
|
||||||
else:
|
|
||||||
retry_after_str = r.headers.get('retry-after')
|
|
||||||
if retry_after_str is not None:
|
|
||||||
retry_after = int(retry_after_str)
|
|
||||||
else:
|
|
||||||
retry_after = None
|
|
||||||
raise RateLimitError(request_id, err, retry_after)
|
|
||||||
elif 200 <= r.status_code <= 299:
|
|
||||||
if route_style == self._ROUTE_STYLE_DOWNLOAD:
|
|
||||||
raw_resp = r.headers['dropbox-api-result']
|
|
||||||
else:
|
|
||||||
assert r.headers.get('content-type') == 'application/json', (
|
|
||||||
'Expected content-type to be application/json, got %r' %
|
|
||||||
r.headers.get('content-type'))
|
|
||||||
raw_resp = r.content.decode('utf-8')
|
|
||||||
if route_style == self._ROUTE_STYLE_DOWNLOAD:
|
|
||||||
return RouteResult(raw_resp, r)
|
|
||||||
else:
|
|
||||||
return RouteResult(raw_resp)
|
|
||||||
elif r.status_code in (403, 404, 409):
|
|
||||||
raw_resp = r.content.decode('utf-8')
|
|
||||||
return RouteErrorResult(request_id, raw_resp)
|
|
||||||
else:
|
|
||||||
raise HttpError(request_id, r.status_code, r.text)
|
|
||||||
|
|
||||||
def _get_route_url(self, hostname, route_name):
|
|
||||||
"""Returns the URL of the route.
|
|
||||||
|
|
||||||
:param str hostname: Hostname to make the request to.
|
|
||||||
:param str route_name: Name of the route.
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
return 'https://{hostname}/{version}/{route_name}'.format(
|
|
||||||
hostname=hostname,
|
|
||||||
version=Dropbox._API_VERSION,
|
|
||||||
route_name=route_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _save_body_to_file(self, download_path, http_resp, chunksize=2**16):
|
|
||||||
"""
|
|
||||||
Saves the body of an HTTP response to a file.
|
|
||||||
|
|
||||||
:param str download_path: Local path to save data to.
|
|
||||||
:param http_resp: The HTTP response whose body will be saved.
|
|
||||||
:type http_resp: :class:`requests.models.Response`
|
|
||||||
:rtype: None
|
|
||||||
"""
|
|
||||||
with open(download_path, 'wb') as f:
|
|
||||||
with contextlib.closing(http_resp):
|
|
||||||
for c in http_resp.iter_content(chunksize):
|
|
||||||
f.write(c)
|
|
||||||
|
|
||||||
class Dropbox(_DropboxTransport, DropboxBase):
|
|
||||||
"""
|
|
||||||
Use this class to make requests to the Dropbox API using a user's access
|
|
||||||
token. Methods of this class are meant to act on the corresponding user's
|
|
||||||
Dropbox.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
class DropboxTeam(_DropboxTransport, DropboxTeamBase):
|
|
||||||
"""
|
|
||||||
Use this class to make requests to the Dropbox API using a team's access
|
|
||||||
token. Methods of this class are meant to act on the team, but there is
|
|
||||||
also an :meth:`as_user` method for assuming a team member's identity.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def as_user(self, team_member_id):
|
|
||||||
"""
|
|
||||||
Allows a team credential to assume the identity of a member of the
|
|
||||||
team.
|
|
||||||
|
|
||||||
:return: A :class:`Dropbox` object that can be used to query on behalf
|
|
||||||
of this member of the team.
|
|
||||||
:rtype: Dropbox
|
|
||||||
"""
|
|
||||||
new_headers = self._headers.copy() if self._headers else {}
|
|
||||||
new_headers['Dropbox-API-Select-User'] = team_member_id
|
|
||||||
return Dropbox(
|
|
||||||
self._oauth2_access_token,
|
|
||||||
max_retries_on_error=self._max_retries_on_error,
|
|
||||||
max_retries_on_rate_limit=self._max_retries_on_rate_limit,
|
|
||||||
user_agent=self._raw_user_agent,
|
|
||||||
session=self._session,
|
|
||||||
headers=new_headers,
|
|
||||||
)
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
class DropboxException(Exception):
|
|
||||||
"""All errors related to making an API request extend this."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, *args, **kwargs):
|
|
||||||
# A request_id can be shared with Dropbox Support to pinpoint the exact
|
|
||||||
# request that returns an error.
|
|
||||||
super(DropboxException, self).__init__(request_id, *args, **kwargs)
|
|
||||||
self.request_id = request_id
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return repr(self)
|
|
||||||
|
|
||||||
|
|
||||||
class ApiError(DropboxException):
|
|
||||||
"""Errors produced by the Dropbox API."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, error, user_message_text, user_message_locale):
|
|
||||||
"""
|
|
||||||
:param (str) request_id: A request_id can be shared with Dropbox
|
|
||||||
Support to pinpoint the exact request that returns an error.
|
|
||||||
:param error: An instance of the error data type for the route.
|
|
||||||
:param (str) user_message_text: A human-readable message that can be
|
|
||||||
displayed to the end user. Is None, if unavailable.
|
|
||||||
:param (str) user_message_locale: The locale of ``user_message_text``,
|
|
||||||
if present.
|
|
||||||
"""
|
|
||||||
super(ApiError, self).__init__(request_id, error)
|
|
||||||
self.error = error
|
|
||||||
self.user_message_text = user_message_text
|
|
||||||
self.user_message_locale = user_message_locale
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'ApiError({!r}, {})'.format(self.request_id, self.error)
|
|
||||||
|
|
||||||
|
|
||||||
class HttpError(DropboxException):
|
|
||||||
"""Errors produced at the HTTP layer."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, status_code, body):
|
|
||||||
super(HttpError, self).__init__(request_id, status_code, body)
|
|
||||||
self.status_code = status_code
|
|
||||||
self.body = body
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'HttpError({!r}, {}, {!r})'.format(self.request_id,
|
|
||||||
self.status_code, self.body)
|
|
||||||
|
|
||||||
|
|
||||||
class BadInputError(HttpError):
|
|
||||||
"""Errors due to bad input parameters to an API Operation."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, message):
|
|
||||||
super(BadInputError, self).__init__(request_id, 400, message)
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'BadInputError({!r}, {!r})'.format(self.request_id, self.message)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthError(HttpError):
|
|
||||||
"""Errors due to invalid authentication credentials."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, error):
|
|
||||||
super(AuthError, self).__init__(request_id, 401, None)
|
|
||||||
self.error = error
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'AuthError({!r}, {!r})'.format(self.request_id, self.error)
|
|
||||||
|
|
||||||
|
|
||||||
class RateLimitError(HttpError):
|
|
||||||
"""Error caused by rate limiting."""
|
|
||||||
|
|
||||||
def __init__(self, request_id, error=None, backoff=None):
|
|
||||||
super(RateLimitError, self).__init__(request_id, 429, None)
|
|
||||||
self.error = error
|
|
||||||
self.backoff = backoff
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'RateLimitError({!r}, {!r}, {!r})'.format(
|
|
||||||
self.request_id, self.error, self.backoff)
|
|
||||||
|
|
||||||
|
|
||||||
class InternalServerError(HttpError):
|
|
||||||
"""Errors due to a problem on Dropbox."""
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'InternalServerError({!r}, {}, {!r})'.format(
|
|
||||||
self.request_id, self.status_code, self.body)
|
|
||||||
@@ -1,519 +0,0 @@
|
|||||||
__all__ = [
|
|
||||||
'BadRequestException',
|
|
||||||
'BadStateException',
|
|
||||||
'CsrfException',
|
|
||||||
'DropboxOAuth2Flow',
|
|
||||||
'DropboxOAuth2FlowNoRedirect',
|
|
||||||
'NotApprovedException',
|
|
||||||
'OAuth2FlowNoRedirectResult',
|
|
||||||
'OAuth2FlowResult',
|
|
||||||
'ProviderException',
|
|
||||||
]
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import os
|
|
||||||
import six
|
|
||||||
import urllib
|
|
||||||
|
|
||||||
from .session import (
|
|
||||||
API_HOST,
|
|
||||||
WEB_HOST,
|
|
||||||
pinned_session,
|
|
||||||
)
|
|
||||||
|
|
||||||
if six.PY3:
|
|
||||||
url_path_quote = urllib.parse.quote # pylint: disable=no-member,useless-suppression
|
|
||||||
url_encode = urllib.parse.urlencode # pylint: disable=no-member,useless-suppression
|
|
||||||
else:
|
|
||||||
url_path_quote = urllib.quote # pylint: disable=no-member,useless-suppression
|
|
||||||
url_encode = urllib.urlencode # pylint: disable=no-member,useless-suppression
|
|
||||||
|
|
||||||
|
|
||||||
class OAuth2FlowNoRedirectResult(object):
|
|
||||||
"""
|
|
||||||
Authorization information for an OAuth2Flow performed with no redirect.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, access_token, account_id, user_id):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
access_token (str): Token to be used to authenticate later
|
|
||||||
requests.
|
|
||||||
account_id (str): The Dropbox user's account ID.
|
|
||||||
user_id (str): Deprecated (use account_id instead).
|
|
||||||
"""
|
|
||||||
self.access_token = access_token
|
|
||||||
self.account_id = account_id
|
|
||||||
self.user_id = user_id
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'OAuth2FlowNoRedirectResult(%r, %r, %r)' % (
|
|
||||||
self.access_token,
|
|
||||||
self.account_id,
|
|
||||||
self.user_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuth2FlowResult(OAuth2FlowNoRedirectResult):
|
|
||||||
"""
|
|
||||||
Authorization information for an OAuth2Flow with redirect.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, access_token, account_id, user_id, url_state):
|
|
||||||
"""
|
|
||||||
Same as OAuth2FlowNoRedirectResult but with url_state.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url_state (str): The url state that was set by
|
|
||||||
:meth:`DropboxOAuth2Flow.start`.
|
|
||||||
"""
|
|
||||||
super(OAuth2FlowResult, self).__init__(
|
|
||||||
access_token, account_id, user_id)
|
|
||||||
self.url_state = url_state
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_no_redirect_result(cls, result, url_state):
|
|
||||||
assert isinstance(result, OAuth2FlowNoRedirectResult)
|
|
||||||
return cls(
|
|
||||||
result.access_token, result.account_id, result.user_id, url_state)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'OAuth2FlowResult(%r, %r, %r, %r)' % (
|
|
||||||
self.access_token,
|
|
||||||
self.account_id,
|
|
||||||
self.user_id,
|
|
||||||
self.url_state,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DropboxOAuth2FlowBase(object):
|
|
||||||
|
|
||||||
def __init__(self, consumer_key, consumer_secret, locale=None):
|
|
||||||
self.consumer_key = consumer_key
|
|
||||||
self.consumer_secret = consumer_secret
|
|
||||||
self.locale = locale
|
|
||||||
self.requests_session = pinned_session()
|
|
||||||
|
|
||||||
def _get_authorize_url(self, redirect_uri, state):
|
|
||||||
params = dict(response_type='code',
|
|
||||||
client_id=self.consumer_key)
|
|
||||||
if redirect_uri is not None:
|
|
||||||
params['redirect_uri'] = redirect_uri
|
|
||||||
if state is not None:
|
|
||||||
params['state'] = state
|
|
||||||
|
|
||||||
return self.build_url('/oauth2/authorize', params, WEB_HOST)
|
|
||||||
|
|
||||||
def _finish(self, code, redirect_uri):
|
|
||||||
url = self.build_url('/oauth2/token')
|
|
||||||
params = {'grant_type': 'authorization_code',
|
|
||||||
'code': code,
|
|
||||||
'client_id': self.consumer_key,
|
|
||||||
'client_secret': self.consumer_secret,
|
|
||||||
}
|
|
||||||
if self.locale is not None:
|
|
||||||
params['locale'] = self.locale
|
|
||||||
if redirect_uri is not None:
|
|
||||||
params['redirect_uri'] = redirect_uri
|
|
||||||
|
|
||||||
resp = self.requests_session.post(url, data=params)
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
d = resp.json()
|
|
||||||
|
|
||||||
if 'team_id' in d:
|
|
||||||
account_id = d['team_id']
|
|
||||||
else:
|
|
||||||
account_id = d['account_id']
|
|
||||||
|
|
||||||
access_token = d['access_token']
|
|
||||||
uid = d['uid']
|
|
||||||
|
|
||||||
return OAuth2FlowNoRedirectResult(
|
|
||||||
access_token,
|
|
||||||
account_id,
|
|
||||||
uid)
|
|
||||||
|
|
||||||
def build_path(self, target, params=None):
|
|
||||||
"""Build the path component for an API URL.
|
|
||||||
|
|
||||||
This method urlencodes the parameters, adds them
|
|
||||||
to the end of the target url, and puts a marker for the API
|
|
||||||
version in front.
|
|
||||||
|
|
||||||
:param str target: A target url (e.g. '/files') to build upon.
|
|
||||||
:param dict params: Optional dictionary of parameters (name to value).
|
|
||||||
:return: The path and parameters components of an API URL.
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if six.PY2 and isinstance(target, six.text_type):
|
|
||||||
target = target.encode('utf8')
|
|
||||||
|
|
||||||
target_path = url_path_quote(target)
|
|
||||||
|
|
||||||
params = params or {}
|
|
||||||
params = params.copy()
|
|
||||||
|
|
||||||
if self.locale:
|
|
||||||
params['locale'] = self.locale
|
|
||||||
|
|
||||||
if params:
|
|
||||||
query_string = _params_to_urlencoded(params)
|
|
||||||
return "%s?%s" % (target_path, query_string)
|
|
||||||
else:
|
|
||||||
return target_path
|
|
||||||
|
|
||||||
def build_url(self, target, params=None, host=API_HOST):
|
|
||||||
"""Build an API URL.
|
|
||||||
|
|
||||||
This method adds scheme and hostname to the path
|
|
||||||
returned from build_path.
|
|
||||||
|
|
||||||
:param str target: A target url (e.g. '/files') to build upon.
|
|
||||||
:param dict params: Optional dictionary of parameters (name to value).
|
|
||||||
:return: The full API URL.
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
return "https://%s%s" % (host, self.build_path(target, params))
|
|
||||||
|
|
||||||
|
|
||||||
class DropboxOAuth2FlowNoRedirect(DropboxOAuth2FlowBase):
|
|
||||||
"""
|
|
||||||
OAuth 2 authorization helper for apps that can't provide a redirect URI
|
|
||||||
(such as the command-line example apps).
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
from dropbox import DropboxOAuth2FlowNoRedirect
|
|
||||||
|
|
||||||
auth_flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET)
|
|
||||||
|
|
||||||
authorize_url = auth_flow.start()
|
|
||||||
print "1. Go to: " + authorize_url
|
|
||||||
print "2. Click \\"Allow\\" (you might have to log in first)."
|
|
||||||
print "3. Copy the authorization code."
|
|
||||||
auth_code = raw_input("Enter the authorization code here: ").strip()
|
|
||||||
|
|
||||||
try:
|
|
||||||
oauth_result = auth_flow.finish(auth_code)
|
|
||||||
except Exception, e:
|
|
||||||
print('Error: %s' % (e,))
|
|
||||||
return
|
|
||||||
|
|
||||||
dbx = Dropbox(oauth_result.access_token)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, consumer_key, consumer_secret, locale=None): # noqa: E501; pylint: disable=useless-super-delegation
|
|
||||||
"""
|
|
||||||
Construct an instance.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
:param str consumer_key: Your API app's "app key".
|
|
||||||
:param str consumer_secret: Your API app's "app secret".
|
|
||||||
:param str locale: The locale of the user of your application. For
|
|
||||||
example "en" or "en_US". Some API calls return localized data and
|
|
||||||
error messages; this setting tells the server which locale to use.
|
|
||||||
By default, the server uses "en_US".
|
|
||||||
"""
|
|
||||||
# pylint: disable=useless-super-delegation
|
|
||||||
super(DropboxOAuth2FlowNoRedirect, self).__init__(
|
|
||||||
consumer_key,
|
|
||||||
consumer_secret,
|
|
||||||
locale,
|
|
||||||
)
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
"""
|
|
||||||
Starts the OAuth 2 authorization process.
|
|
||||||
|
|
||||||
:return: The URL for a page on Dropbox's website. This page will let
|
|
||||||
the user "approve" your app, which gives your app permission to
|
|
||||||
access the user's Dropbox account. Tell the user to visit this URL
|
|
||||||
and approve your app.
|
|
||||||
"""
|
|
||||||
return self._get_authorize_url(None, None)
|
|
||||||
|
|
||||||
def finish(self, code):
|
|
||||||
"""
|
|
||||||
If the user approves your app, they will be presented with an
|
|
||||||
"authorization code". Have the user copy/paste that authorization code
|
|
||||||
into your app and then call this method to get an access token.
|
|
||||||
|
|
||||||
:param str code: The authorization code shown to the user when they
|
|
||||||
approved your app.
|
|
||||||
:rtype: OAuth2FlowNoRedirectResult
|
|
||||||
:raises: The same exceptions as :meth:`DropboxOAuth2Flow.finish()`.
|
|
||||||
"""
|
|
||||||
return self._finish(code, None)
|
|
||||||
|
|
||||||
|
|
||||||
class DropboxOAuth2Flow(DropboxOAuth2FlowBase):
|
|
||||||
"""
|
|
||||||
OAuth 2 authorization helper. Use this for web apps.
|
|
||||||
|
|
||||||
OAuth 2 has a two-step authorization process. The first step is having the
|
|
||||||
user authorize your app. The second involves getting an OAuth 2 access
|
|
||||||
token from Dropbox.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
from dropbox import DropboxOAuth2Flow
|
|
||||||
|
|
||||||
def get_dropbox_auth_flow(web_app_session):
|
|
||||||
redirect_uri = "https://my-web-server.org/dropbox-auth-finish"
|
|
||||||
return DropboxOAuth2Flow(
|
|
||||||
APP_KEY, APP_SECRET, redirect_uri, web_app_session,
|
|
||||||
"dropbox-auth-csrf-token")
|
|
||||||
|
|
||||||
# URL handler for /dropbox-auth-start
|
|
||||||
def dropbox_auth_start(web_app_session, request):
|
|
||||||
authorize_url = get_dropbox_auth_flow(web_app_session).start()
|
|
||||||
redirect_to(authorize_url)
|
|
||||||
|
|
||||||
# URL handler for /dropbox-auth-finish
|
|
||||||
def dropbox_auth_finish(web_app_session, request):
|
|
||||||
try:
|
|
||||||
oauth_result = \\
|
|
||||||
get_dropbox_auth_flow(web_app_session).finish(
|
|
||||||
request.query_params)
|
|
||||||
except BadRequestException, e:
|
|
||||||
http_status(400)
|
|
||||||
except BadStateException, e:
|
|
||||||
# Start the auth flow again.
|
|
||||||
redirect_to("/dropbox-auth-start")
|
|
||||||
except CsrfException, e:
|
|
||||||
http_status(403)
|
|
||||||
except NotApprovedException, e:
|
|
||||||
flash('Not approved? Why not?')
|
|
||||||
return redirect_to("/home")
|
|
||||||
except ProviderException, e:
|
|
||||||
logger.log("Auth error: %s" % (e,))
|
|
||||||
http_status(403)
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, consumer_key, consumer_secret, redirect_uri, session,
|
|
||||||
csrf_token_session_key, locale=None):
|
|
||||||
"""
|
|
||||||
Construct an instance.
|
|
||||||
|
|
||||||
:param str consumer_key: Your API app's "app key".
|
|
||||||
:param str consumer_secret: Your API app's "app secret".
|
|
||||||
:param str redirect_uri: The URI that the Dropbox server will redirect
|
|
||||||
the user to after the user finishes authorizing your app. This URI
|
|
||||||
must be HTTPS-based and pre-registered with the Dropbox servers,
|
|
||||||
though localhost URIs are allowed without pre-registration and can
|
|
||||||
be either HTTP or HTTPS.
|
|
||||||
:param dict session: A dict-like object that represents the current
|
|
||||||
user's web session (will be used to save the CSRF token).
|
|
||||||
:param str csrf_token_session_key: The key to use when storing the CSRF
|
|
||||||
token in the session (for example: "dropbox-auth-csrf-token").
|
|
||||||
:param str locale: The locale of the user of your application. For
|
|
||||||
example "en" or "en_US". Some API calls return localized data and
|
|
||||||
error messages; this setting tells the server which locale to use.
|
|
||||||
By default, the server uses "en_US".
|
|
||||||
"""
|
|
||||||
super(DropboxOAuth2Flow, self).__init__(consumer_key, consumer_secret, locale)
|
|
||||||
self.redirect_uri = redirect_uri
|
|
||||||
self.session = session
|
|
||||||
self.csrf_token_session_key = csrf_token_session_key
|
|
||||||
|
|
||||||
def start(self, url_state=None):
|
|
||||||
"""
|
|
||||||
Starts the OAuth 2 authorization process.
|
|
||||||
|
|
||||||
This function builds an "authorization URL". You should redirect your
|
|
||||||
user's browser to this URL, which will give them an opportunity to
|
|
||||||
grant your app access to their Dropbox account. When the user
|
|
||||||
completes this process, they will be automatically redirected to the
|
|
||||||
``redirect_uri`` you passed in to the constructor.
|
|
||||||
|
|
||||||
This function will also save a CSRF token to
|
|
||||||
``session[csrf_token_session_key]`` (as provided to the constructor).
|
|
||||||
This CSRF token will be checked on :meth:`finish()` to prevent request
|
|
||||||
forgery.
|
|
||||||
|
|
||||||
:param str url_state: Any data that you would like to keep in the URL
|
|
||||||
through the authorization process. This exact value will be
|
|
||||||
returned to you by :meth:`finish()`.
|
|
||||||
:return: The URL for a page on Dropbox's website. This page will let
|
|
||||||
the user "approve" your app, which gives your app permission to
|
|
||||||
access the user's Dropbox account. Tell the user to visit this URL
|
|
||||||
and approve your app.
|
|
||||||
"""
|
|
||||||
csrf_token = base64.urlsafe_b64encode(os.urandom(16)).decode('ascii')
|
|
||||||
state = csrf_token
|
|
||||||
if url_state is not None:
|
|
||||||
state += "|" + url_state
|
|
||||||
self.session[self.csrf_token_session_key] = csrf_token
|
|
||||||
|
|
||||||
return self._get_authorize_url(self.redirect_uri, state)
|
|
||||||
|
|
||||||
def finish(self, query_params):
|
|
||||||
"""
|
|
||||||
Call this after the user has visited the authorize URL (see
|
|
||||||
:meth:`start()`), approved your app and was redirected to your redirect
|
|
||||||
URI.
|
|
||||||
|
|
||||||
:param dict query_params: The query parameters on the GET request to
|
|
||||||
your redirect URI.
|
|
||||||
:rtype: OAuth2FlowResult
|
|
||||||
:raises: :class:`BadRequestException` If the redirect URL was missing
|
|
||||||
parameters or if the given parameters were not valid.
|
|
||||||
:raises: :class:`BadStateException` If there's no CSRF token in the
|
|
||||||
session.
|
|
||||||
:raises: :class:`CsrfException` If the ``state`` query parameter
|
|
||||||
doesn't contain the CSRF token from the user's session.
|
|
||||||
:raises: :class:`NotApprovedException` If the user chose not to
|
|
||||||
approve your app.
|
|
||||||
:raises: :class:`ProviderException` If Dropbox redirected to your
|
|
||||||
redirect URI with some unexpected error identifier and error message.
|
|
||||||
"""
|
|
||||||
# Check well-formedness of request.
|
|
||||||
|
|
||||||
state = query_params.get('state')
|
|
||||||
if state is None:
|
|
||||||
raise BadRequestException("Missing query parameter 'state'.")
|
|
||||||
|
|
||||||
error = query_params.get('error')
|
|
||||||
error_description = query_params.get('error_description')
|
|
||||||
code = query_params.get('code')
|
|
||||||
|
|
||||||
if error is not None and code is not None:
|
|
||||||
raise BadRequestException(
|
|
||||||
"Query parameters 'code' and 'error' are both set; "
|
|
||||||
"only one must be set.")
|
|
||||||
if error is None and code is None:
|
|
||||||
raise BadRequestException(
|
|
||||||
"Neither query parameter 'code' or 'error' is set.")
|
|
||||||
|
|
||||||
# Check CSRF token
|
|
||||||
|
|
||||||
if self.csrf_token_session_key not in self.session:
|
|
||||||
raise BadStateException('Missing CSRF token in session.')
|
|
||||||
csrf_token_from_session = self.session[self.csrf_token_session_key]
|
|
||||||
if len(csrf_token_from_session) <= 20:
|
|
||||||
raise AssertionError('CSRF token unexpectedly short: %r' %
|
|
||||||
csrf_token_from_session)
|
|
||||||
|
|
||||||
split_pos = state.find('|')
|
|
||||||
if split_pos < 0:
|
|
||||||
given_csrf_token = state
|
|
||||||
url_state = None
|
|
||||||
else:
|
|
||||||
given_csrf_token = state[0:split_pos]
|
|
||||||
url_state = state[split_pos + 1:]
|
|
||||||
|
|
||||||
if not _safe_equals(csrf_token_from_session, given_csrf_token):
|
|
||||||
raise CsrfException('expected %r, got %r' %
|
|
||||||
(csrf_token_from_session, given_csrf_token))
|
|
||||||
|
|
||||||
del self.session[self.csrf_token_session_key]
|
|
||||||
|
|
||||||
# Check for error identifier
|
|
||||||
|
|
||||||
if error is not None:
|
|
||||||
if error == 'access_denied':
|
|
||||||
# The user clicked "Deny"
|
|
||||||
if error_description is None:
|
|
||||||
raise NotApprovedException(
|
|
||||||
'No additional description from Dropbox')
|
|
||||||
else:
|
|
||||||
raise NotApprovedException(
|
|
||||||
'Additional description from Dropbox: %s' %
|
|
||||||
error_description)
|
|
||||||
else:
|
|
||||||
# All other errors
|
|
||||||
full_message = error
|
|
||||||
if error_description is not None:
|
|
||||||
full_message += ": " + error_description
|
|
||||||
raise ProviderException(full_message)
|
|
||||||
|
|
||||||
# If everything went ok, make the network call to get an access token.
|
|
||||||
|
|
||||||
no_redirect_result = self._finish(code, self.redirect_uri)
|
|
||||||
return OAuth2FlowResult.from_no_redirect_result(
|
|
||||||
no_redirect_result, url_state)
|
|
||||||
|
|
||||||
|
|
||||||
class BadRequestException(Exception):
|
|
||||||
"""
|
|
||||||
Thrown if the redirect URL was missing parameters or if the
|
|
||||||
given parameters were not valid.
|
|
||||||
|
|
||||||
The recommended action is to show an HTTP 400 error page.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BadStateException(Exception):
|
|
||||||
"""
|
|
||||||
Thrown if all the parameters are correct, but there's no CSRF token in the
|
|
||||||
session. This probably means that the session expired.
|
|
||||||
|
|
||||||
The recommended action is to redirect the user's browser to try the
|
|
||||||
approval process again.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CsrfException(Exception):
|
|
||||||
"""
|
|
||||||
Thrown if the given 'state' parameter doesn't contain the CSRF token from
|
|
||||||
the user's session. This is blocked to prevent CSRF attacks.
|
|
||||||
|
|
||||||
The recommended action is to respond with an HTTP 403 error page.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotApprovedException(Exception):
|
|
||||||
"""
|
|
||||||
The user chose not to approve your app.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProviderException(Exception):
|
|
||||||
"""
|
|
||||||
Dropbox redirected to your redirect URI with some unexpected error
|
|
||||||
identifier and error message.
|
|
||||||
|
|
||||||
The recommended action is to log the error, tell the user something went
|
|
||||||
wrong, and let them try again.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_equals(a, b):
|
|
||||||
if len(a) != len(b):
|
|
||||||
return False
|
|
||||||
res = 0
|
|
||||||
for ca, cb in zip(a, b):
|
|
||||||
res |= ord(ca) ^ ord(cb)
|
|
||||||
return res == 0
|
|
||||||
|
|
||||||
|
|
||||||
def _params_to_urlencoded(params):
|
|
||||||
"""
|
|
||||||
Returns a application/x-www-form-urlencoded ``str`` representing the
|
|
||||||
key/value pairs in ``params``.
|
|
||||||
|
|
||||||
Keys are values are ``str()``'d before calling ``urllib.urlencode``, with
|
|
||||||
the exception of unicode objects which are utf8-encoded.
|
|
||||||
"""
|
|
||||||
def encode(o):
|
|
||||||
if isinstance(o, six.binary_type):
|
|
||||||
return o
|
|
||||||
else:
|
|
||||||
if isinstance(o, six.text_type):
|
|
||||||
return o.encode('utf-8')
|
|
||||||
else:
|
|
||||||
return str(o).encode('utf-8')
|
|
||||||
|
|
||||||
#fix for python 2.6
|
|
||||||
utf8_params = {}
|
|
||||||
for k,v in six.iteritems(params):
|
|
||||||
utf8_params[encode(k)] = encode(v)
|
|
||||||
|
|
||||||
return url_encode(utf8_params)
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
import resources.lib.utils as utils
|
|
||||||
|
|
||||||
def resource_filename(*args):
|
|
||||||
return utils.addon_dir() + "/resources/lib/dropbox/trusted-certs.crt"
|
|
||||||
@@ -1,835 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Auto-generated by Stone, do not modify.
|
|
||||||
# flake8: noqa
|
|
||||||
# pylint: skip-file
|
|
||||||
"""
|
|
||||||
This namespace contains helper entities for property and property/template endpoints.
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
from . import stone_base as bb
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv
|
|
||||||
import stone_base as bb
|
|
||||||
|
|
||||||
class GetPropertyTemplateArg(object):
|
|
||||||
"""
|
|
||||||
:ivar template_id: An identifier for property template added by route
|
|
||||||
properties/template/add.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_template_id_value',
|
|
||||||
'_template_id_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
template_id=None):
|
|
||||||
self._template_id_value = None
|
|
||||||
self._template_id_present = False
|
|
||||||
if template_id is not None:
|
|
||||||
self.template_id = template_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def template_id(self):
|
|
||||||
"""
|
|
||||||
An identifier for property template added by route
|
|
||||||
properties/template/add.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._template_id_present:
|
|
||||||
return self._template_id_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'template_id'")
|
|
||||||
|
|
||||||
@template_id.setter
|
|
||||||
def template_id(self, val):
|
|
||||||
val = self._template_id_validator.validate(val)
|
|
||||||
self._template_id_value = val
|
|
||||||
self._template_id_present = True
|
|
||||||
|
|
||||||
@template_id.deleter
|
|
||||||
def template_id(self):
|
|
||||||
self._template_id_value = None
|
|
||||||
self._template_id_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'GetPropertyTemplateArg(template_id={!r})'.format(
|
|
||||||
self._template_id_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
GetPropertyTemplateArg_validator = bv.Struct(GetPropertyTemplateArg)
|
|
||||||
|
|
||||||
class PropertyGroupTemplate(object):
|
|
||||||
"""
|
|
||||||
Describes property templates that can be filled and associated with a file.
|
|
||||||
|
|
||||||
:ivar name: A display name for the property template. Property template
|
|
||||||
names can be up to 256 bytes.
|
|
||||||
:ivar description: Description for new property template. Property template
|
|
||||||
descriptions can be up to 1024 bytes.
|
|
||||||
:ivar fields: This is a list of custom properties associated with a property
|
|
||||||
template. There can be up to 64 properties in a single property
|
|
||||||
template.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_name_value',
|
|
||||||
'_name_present',
|
|
||||||
'_description_value',
|
|
||||||
'_description_present',
|
|
||||||
'_fields_value',
|
|
||||||
'_fields_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
name=None,
|
|
||||||
description=None,
|
|
||||||
fields=None):
|
|
||||||
self._name_value = None
|
|
||||||
self._name_present = False
|
|
||||||
self._description_value = None
|
|
||||||
self._description_present = False
|
|
||||||
self._fields_value = None
|
|
||||||
self._fields_present = False
|
|
||||||
if name is not None:
|
|
||||||
self.name = name
|
|
||||||
if description is not None:
|
|
||||||
self.description = description
|
|
||||||
if fields is not None:
|
|
||||||
self.fields = fields
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
A display name for the property template. Property template names can be
|
|
||||||
up to 256 bytes.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._name_present:
|
|
||||||
return self._name_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'name'")
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, val):
|
|
||||||
val = self._name_validator.validate(val)
|
|
||||||
self._name_value = val
|
|
||||||
self._name_present = True
|
|
||||||
|
|
||||||
@name.deleter
|
|
||||||
def name(self):
|
|
||||||
self._name_value = None
|
|
||||||
self._name_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
"""
|
|
||||||
Description for new property template. Property template descriptions
|
|
||||||
can be up to 1024 bytes.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._description_present:
|
|
||||||
return self._description_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'description'")
|
|
||||||
|
|
||||||
@description.setter
|
|
||||||
def description(self, val):
|
|
||||||
val = self._description_validator.validate(val)
|
|
||||||
self._description_value = val
|
|
||||||
self._description_present = True
|
|
||||||
|
|
||||||
@description.deleter
|
|
||||||
def description(self):
|
|
||||||
self._description_value = None
|
|
||||||
self._description_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fields(self):
|
|
||||||
"""
|
|
||||||
This is a list of custom properties associated with a property template.
|
|
||||||
There can be up to 64 properties in a single property template.
|
|
||||||
|
|
||||||
:rtype: list of [PropertyFieldTemplate]
|
|
||||||
"""
|
|
||||||
if self._fields_present:
|
|
||||||
return self._fields_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'fields'")
|
|
||||||
|
|
||||||
@fields.setter
|
|
||||||
def fields(self, val):
|
|
||||||
val = self._fields_validator.validate(val)
|
|
||||||
self._fields_value = val
|
|
||||||
self._fields_present = True
|
|
||||||
|
|
||||||
@fields.deleter
|
|
||||||
def fields(self):
|
|
||||||
self._fields_value = None
|
|
||||||
self._fields_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PropertyGroupTemplate(name={!r}, description={!r}, fields={!r})'.format(
|
|
||||||
self._name_value,
|
|
||||||
self._description_value,
|
|
||||||
self._fields_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
PropertyGroupTemplate_validator = bv.Struct(PropertyGroupTemplate)
|
|
||||||
|
|
||||||
class GetPropertyTemplateResult(PropertyGroupTemplate):
|
|
||||||
"""
|
|
||||||
The Property template for the specified template.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
name=None,
|
|
||||||
description=None,
|
|
||||||
fields=None):
|
|
||||||
super(GetPropertyTemplateResult, self).__init__(name,
|
|
||||||
description,
|
|
||||||
fields)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'GetPropertyTemplateResult(name={!r}, description={!r}, fields={!r})'.format(
|
|
||||||
self._name_value,
|
|
||||||
self._description_value,
|
|
||||||
self._fields_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
GetPropertyTemplateResult_validator = bv.Struct(GetPropertyTemplateResult)
|
|
||||||
|
|
||||||
class ListPropertyTemplateIds(object):
|
|
||||||
"""
|
|
||||||
:ivar template_ids: List of identifiers for templates added by route
|
|
||||||
properties/template/add.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_template_ids_value',
|
|
||||||
'_template_ids_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
template_ids=None):
|
|
||||||
self._template_ids_value = None
|
|
||||||
self._template_ids_present = False
|
|
||||||
if template_ids is not None:
|
|
||||||
self.template_ids = template_ids
|
|
||||||
|
|
||||||
@property
|
|
||||||
def template_ids(self):
|
|
||||||
"""
|
|
||||||
List of identifiers for templates added by route
|
|
||||||
properties/template/add.
|
|
||||||
|
|
||||||
:rtype: list of [str]
|
|
||||||
"""
|
|
||||||
if self._template_ids_present:
|
|
||||||
return self._template_ids_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'template_ids'")
|
|
||||||
|
|
||||||
@template_ids.setter
|
|
||||||
def template_ids(self, val):
|
|
||||||
val = self._template_ids_validator.validate(val)
|
|
||||||
self._template_ids_value = val
|
|
||||||
self._template_ids_present = True
|
|
||||||
|
|
||||||
@template_ids.deleter
|
|
||||||
def template_ids(self):
|
|
||||||
self._template_ids_value = None
|
|
||||||
self._template_ids_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'ListPropertyTemplateIds(template_ids={!r})'.format(
|
|
||||||
self._template_ids_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
ListPropertyTemplateIds_validator = bv.Struct(ListPropertyTemplateIds)
|
|
||||||
|
|
||||||
class PropertyTemplateError(bb.Union):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar str template_not_found: Property template does not exist for given
|
|
||||||
identifier.
|
|
||||||
:ivar restricted_content: You do not have the permissions to modify this
|
|
||||||
property template.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
restricted_content = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def template_not_found(cls, val):
|
|
||||||
"""
|
|
||||||
Create an instance of this class set to the ``template_not_found`` tag
|
|
||||||
with value ``val``.
|
|
||||||
|
|
||||||
:param str val:
|
|
||||||
:rtype: PropertyTemplateError
|
|
||||||
"""
|
|
||||||
return cls('template_not_found', val)
|
|
||||||
|
|
||||||
def is_template_not_found(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``template_not_found``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'template_not_found'
|
|
||||||
|
|
||||||
def is_restricted_content(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``restricted_content``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'restricted_content'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def get_template_not_found(self):
|
|
||||||
"""
|
|
||||||
Property template does not exist for given identifier.
|
|
||||||
|
|
||||||
Only call this if :meth:`is_template_not_found` is true.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if not self.is_template_not_found():
|
|
||||||
raise AttributeError("tag 'template_not_found' not set")
|
|
||||||
return self._value
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PropertyTemplateError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PropertyTemplateError_validator = bv.Union(PropertyTemplateError)
|
|
||||||
|
|
||||||
class ModifyPropertyTemplateError(PropertyTemplateError):
|
|
||||||
"""
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar conflicting_property_names: A property field name already exists in
|
|
||||||
the template.
|
|
||||||
:ivar too_many_properties: There are too many properties in the changed
|
|
||||||
template. The maximum number of properties per template is 32.
|
|
||||||
:ivar too_many_templates: There are too many templates for the team.
|
|
||||||
:ivar template_attribute_too_large: The template name, description or field
|
|
||||||
names is too large.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
conflicting_property_names = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
too_many_properties = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
too_many_templates = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
template_attribute_too_large = None
|
|
||||||
|
|
||||||
def is_conflicting_property_names(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``conflicting_property_names``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'conflicting_property_names'
|
|
||||||
|
|
||||||
def is_too_many_properties(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``too_many_properties``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'too_many_properties'
|
|
||||||
|
|
||||||
def is_too_many_templates(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``too_many_templates``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'too_many_templates'
|
|
||||||
|
|
||||||
def is_template_attribute_too_large(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``template_attribute_too_large``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'template_attribute_too_large'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'ModifyPropertyTemplateError(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
ModifyPropertyTemplateError_validator = bv.Union(ModifyPropertyTemplateError)
|
|
||||||
|
|
||||||
class PropertyField(object):
|
|
||||||
"""
|
|
||||||
:ivar name: This is the name or key of a custom property in a property
|
|
||||||
template. File property names can be up to 256 bytes.
|
|
||||||
:ivar value: Value of a custom property attached to a file. Values can be up
|
|
||||||
to 1024 bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_name_value',
|
|
||||||
'_name_present',
|
|
||||||
'_value_value',
|
|
||||||
'_value_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
name=None,
|
|
||||||
value=None):
|
|
||||||
self._name_value = None
|
|
||||||
self._name_present = False
|
|
||||||
self._value_value = None
|
|
||||||
self._value_present = False
|
|
||||||
if name is not None:
|
|
||||||
self.name = name
|
|
||||||
if value is not None:
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
This is the name or key of a custom property in a property template.
|
|
||||||
File property names can be up to 256 bytes.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._name_present:
|
|
||||||
return self._name_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'name'")
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, val):
|
|
||||||
val = self._name_validator.validate(val)
|
|
||||||
self._name_value = val
|
|
||||||
self._name_present = True
|
|
||||||
|
|
||||||
@name.deleter
|
|
||||||
def name(self):
|
|
||||||
self._name_value = None
|
|
||||||
self._name_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def value(self):
|
|
||||||
"""
|
|
||||||
Value of a custom property attached to a file. Values can be up to 1024
|
|
||||||
bytes.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._value_present:
|
|
||||||
return self._value_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'value'")
|
|
||||||
|
|
||||||
@value.setter
|
|
||||||
def value(self, val):
|
|
||||||
val = self._value_validator.validate(val)
|
|
||||||
self._value_value = val
|
|
||||||
self._value_present = True
|
|
||||||
|
|
||||||
@value.deleter
|
|
||||||
def value(self):
|
|
||||||
self._value_value = None
|
|
||||||
self._value_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PropertyField(name={!r}, value={!r})'.format(
|
|
||||||
self._name_value,
|
|
||||||
self._value_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
PropertyField_validator = bv.Struct(PropertyField)
|
|
||||||
|
|
||||||
class PropertyFieldTemplate(object):
|
|
||||||
"""
|
|
||||||
Describe a single property field type which that can be part of a property
|
|
||||||
template.
|
|
||||||
|
|
||||||
:ivar name: This is the name or key of a custom property in a property
|
|
||||||
template. File property names can be up to 256 bytes.
|
|
||||||
:ivar description: This is the description for a custom property in a
|
|
||||||
property template. File property description can be up to 1024 bytes.
|
|
||||||
:ivar type: This is the data type of the value of this property. This type
|
|
||||||
will be enforced upon property creation and modifications.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_name_value',
|
|
||||||
'_name_present',
|
|
||||||
'_description_value',
|
|
||||||
'_description_present',
|
|
||||||
'_type_value',
|
|
||||||
'_type_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
name=None,
|
|
||||||
description=None,
|
|
||||||
type=None):
|
|
||||||
self._name_value = None
|
|
||||||
self._name_present = False
|
|
||||||
self._description_value = None
|
|
||||||
self._description_present = False
|
|
||||||
self._type_value = None
|
|
||||||
self._type_present = False
|
|
||||||
if name is not None:
|
|
||||||
self.name = name
|
|
||||||
if description is not None:
|
|
||||||
self.description = description
|
|
||||||
if type is not None:
|
|
||||||
self.type = type
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
This is the name or key of a custom property in a property template.
|
|
||||||
File property names can be up to 256 bytes.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._name_present:
|
|
||||||
return self._name_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'name'")
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, val):
|
|
||||||
val = self._name_validator.validate(val)
|
|
||||||
self._name_value = val
|
|
||||||
self._name_present = True
|
|
||||||
|
|
||||||
@name.deleter
|
|
||||||
def name(self):
|
|
||||||
self._name_value = None
|
|
||||||
self._name_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
"""
|
|
||||||
This is the description for a custom property in a property template.
|
|
||||||
File property description can be up to 1024 bytes.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._description_present:
|
|
||||||
return self._description_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'description'")
|
|
||||||
|
|
||||||
@description.setter
|
|
||||||
def description(self, val):
|
|
||||||
val = self._description_validator.validate(val)
|
|
||||||
self._description_value = val
|
|
||||||
self._description_present = True
|
|
||||||
|
|
||||||
@description.deleter
|
|
||||||
def description(self):
|
|
||||||
self._description_value = None
|
|
||||||
self._description_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def type(self):
|
|
||||||
"""
|
|
||||||
This is the data type of the value of this property. This type will be
|
|
||||||
enforced upon property creation and modifications.
|
|
||||||
|
|
||||||
:rtype: PropertyType
|
|
||||||
"""
|
|
||||||
if self._type_present:
|
|
||||||
return self._type_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'type'")
|
|
||||||
|
|
||||||
@type.setter
|
|
||||||
def type(self, val):
|
|
||||||
self._type_validator.validate_type_only(val)
|
|
||||||
self._type_value = val
|
|
||||||
self._type_present = True
|
|
||||||
|
|
||||||
@type.deleter
|
|
||||||
def type(self):
|
|
||||||
self._type_value = None
|
|
||||||
self._type_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PropertyFieldTemplate(name={!r}, description={!r}, type={!r})'.format(
|
|
||||||
self._name_value,
|
|
||||||
self._description_value,
|
|
||||||
self._type_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
PropertyFieldTemplate_validator = bv.Struct(PropertyFieldTemplate)
|
|
||||||
|
|
||||||
class PropertyGroup(object):
|
|
||||||
"""
|
|
||||||
Collection of custom properties in filled property templates.
|
|
||||||
|
|
||||||
:ivar template_id: A unique identifier for a property template type.
|
|
||||||
:ivar fields: This is a list of custom properties associated with a file.
|
|
||||||
There can be up to 32 properties for a template.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_template_id_value',
|
|
||||||
'_template_id_present',
|
|
||||||
'_fields_value',
|
|
||||||
'_fields_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
template_id=None,
|
|
||||||
fields=None):
|
|
||||||
self._template_id_value = None
|
|
||||||
self._template_id_present = False
|
|
||||||
self._fields_value = None
|
|
||||||
self._fields_present = False
|
|
||||||
if template_id is not None:
|
|
||||||
self.template_id = template_id
|
|
||||||
if fields is not None:
|
|
||||||
self.fields = fields
|
|
||||||
|
|
||||||
@property
|
|
||||||
def template_id(self):
|
|
||||||
"""
|
|
||||||
A unique identifier for a property template type.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._template_id_present:
|
|
||||||
return self._template_id_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'template_id'")
|
|
||||||
|
|
||||||
@template_id.setter
|
|
||||||
def template_id(self, val):
|
|
||||||
val = self._template_id_validator.validate(val)
|
|
||||||
self._template_id_value = val
|
|
||||||
self._template_id_present = True
|
|
||||||
|
|
||||||
@template_id.deleter
|
|
||||||
def template_id(self):
|
|
||||||
self._template_id_value = None
|
|
||||||
self._template_id_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fields(self):
|
|
||||||
"""
|
|
||||||
This is a list of custom properties associated with a file. There can be
|
|
||||||
up to 32 properties for a template.
|
|
||||||
|
|
||||||
:rtype: list of [PropertyField]
|
|
||||||
"""
|
|
||||||
if self._fields_present:
|
|
||||||
return self._fields_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'fields'")
|
|
||||||
|
|
||||||
@fields.setter
|
|
||||||
def fields(self, val):
|
|
||||||
val = self._fields_validator.validate(val)
|
|
||||||
self._fields_value = val
|
|
||||||
self._fields_present = True
|
|
||||||
|
|
||||||
@fields.deleter
|
|
||||||
def fields(self):
|
|
||||||
self._fields_value = None
|
|
||||||
self._fields_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PropertyGroup(template_id={!r}, fields={!r})'.format(
|
|
||||||
self._template_id_value,
|
|
||||||
self._fields_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
PropertyGroup_validator = bv.Struct(PropertyGroup)
|
|
||||||
|
|
||||||
class PropertyType(bb.Union):
|
|
||||||
"""
|
|
||||||
Data type of the given property added. This endpoint is in beta and only
|
|
||||||
properties of type strings is supported.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar string: The associated property will be of type string. Unicode is
|
|
||||||
supported.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
string = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_string(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``string``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'string'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'PropertyType(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
PropertyType_validator = bv.Union(PropertyType)
|
|
||||||
|
|
||||||
TemplateId_validator = bv.String(min_length=1, pattern=u'(/|ptid:).*')
|
|
||||||
GetPropertyTemplateArg._template_id_validator = TemplateId_validator
|
|
||||||
GetPropertyTemplateArg._all_field_names_ = set(['template_id'])
|
|
||||||
GetPropertyTemplateArg._all_fields_ = [('template_id', GetPropertyTemplateArg._template_id_validator)]
|
|
||||||
|
|
||||||
PropertyGroupTemplate._name_validator = bv.String()
|
|
||||||
PropertyGroupTemplate._description_validator = bv.String()
|
|
||||||
PropertyGroupTemplate._fields_validator = bv.List(PropertyFieldTemplate_validator)
|
|
||||||
PropertyGroupTemplate._all_field_names_ = set([
|
|
||||||
'name',
|
|
||||||
'description',
|
|
||||||
'fields',
|
|
||||||
])
|
|
||||||
PropertyGroupTemplate._all_fields_ = [
|
|
||||||
('name', PropertyGroupTemplate._name_validator),
|
|
||||||
('description', PropertyGroupTemplate._description_validator),
|
|
||||||
('fields', PropertyGroupTemplate._fields_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
GetPropertyTemplateResult._all_field_names_ = PropertyGroupTemplate._all_field_names_.union(set([]))
|
|
||||||
GetPropertyTemplateResult._all_fields_ = PropertyGroupTemplate._all_fields_ + []
|
|
||||||
|
|
||||||
ListPropertyTemplateIds._template_ids_validator = bv.List(TemplateId_validator)
|
|
||||||
ListPropertyTemplateIds._all_field_names_ = set(['template_ids'])
|
|
||||||
ListPropertyTemplateIds._all_fields_ = [('template_ids', ListPropertyTemplateIds._template_ids_validator)]
|
|
||||||
|
|
||||||
PropertyTemplateError._template_not_found_validator = TemplateId_validator
|
|
||||||
PropertyTemplateError._restricted_content_validator = bv.Void()
|
|
||||||
PropertyTemplateError._other_validator = bv.Void()
|
|
||||||
PropertyTemplateError._tagmap = {
|
|
||||||
'template_not_found': PropertyTemplateError._template_not_found_validator,
|
|
||||||
'restricted_content': PropertyTemplateError._restricted_content_validator,
|
|
||||||
'other': PropertyTemplateError._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PropertyTemplateError.restricted_content = PropertyTemplateError('restricted_content')
|
|
||||||
PropertyTemplateError.other = PropertyTemplateError('other')
|
|
||||||
|
|
||||||
ModifyPropertyTemplateError._conflicting_property_names_validator = bv.Void()
|
|
||||||
ModifyPropertyTemplateError._too_many_properties_validator = bv.Void()
|
|
||||||
ModifyPropertyTemplateError._too_many_templates_validator = bv.Void()
|
|
||||||
ModifyPropertyTemplateError._template_attribute_too_large_validator = bv.Void()
|
|
||||||
ModifyPropertyTemplateError._tagmap = {
|
|
||||||
'conflicting_property_names': ModifyPropertyTemplateError._conflicting_property_names_validator,
|
|
||||||
'too_many_properties': ModifyPropertyTemplateError._too_many_properties_validator,
|
|
||||||
'too_many_templates': ModifyPropertyTemplateError._too_many_templates_validator,
|
|
||||||
'template_attribute_too_large': ModifyPropertyTemplateError._template_attribute_too_large_validator,
|
|
||||||
}
|
|
||||||
ModifyPropertyTemplateError._tagmap.update(PropertyTemplateError._tagmap)
|
|
||||||
|
|
||||||
ModifyPropertyTemplateError.conflicting_property_names = ModifyPropertyTemplateError('conflicting_property_names')
|
|
||||||
ModifyPropertyTemplateError.too_many_properties = ModifyPropertyTemplateError('too_many_properties')
|
|
||||||
ModifyPropertyTemplateError.too_many_templates = ModifyPropertyTemplateError('too_many_templates')
|
|
||||||
ModifyPropertyTemplateError.template_attribute_too_large = ModifyPropertyTemplateError('template_attribute_too_large')
|
|
||||||
|
|
||||||
PropertyField._name_validator = bv.String()
|
|
||||||
PropertyField._value_validator = bv.String()
|
|
||||||
PropertyField._all_field_names_ = set([
|
|
||||||
'name',
|
|
||||||
'value',
|
|
||||||
])
|
|
||||||
PropertyField._all_fields_ = [
|
|
||||||
('name', PropertyField._name_validator),
|
|
||||||
('value', PropertyField._value_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
PropertyFieldTemplate._name_validator = bv.String()
|
|
||||||
PropertyFieldTemplate._description_validator = bv.String()
|
|
||||||
PropertyFieldTemplate._type_validator = PropertyType_validator
|
|
||||||
PropertyFieldTemplate._all_field_names_ = set([
|
|
||||||
'name',
|
|
||||||
'description',
|
|
||||||
'type',
|
|
||||||
])
|
|
||||||
PropertyFieldTemplate._all_fields_ = [
|
|
||||||
('name', PropertyFieldTemplate._name_validator),
|
|
||||||
('description', PropertyFieldTemplate._description_validator),
|
|
||||||
('type', PropertyFieldTemplate._type_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
PropertyGroup._template_id_validator = TemplateId_validator
|
|
||||||
PropertyGroup._fields_validator = bv.List(PropertyField_validator)
|
|
||||||
PropertyGroup._all_field_names_ = set([
|
|
||||||
'template_id',
|
|
||||||
'fields',
|
|
||||||
])
|
|
||||||
PropertyGroup._all_fields_ = [
|
|
||||||
('template_id', PropertyGroup._template_id_validator),
|
|
||||||
('fields', PropertyGroup._fields_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
PropertyType._string_validator = bv.Void()
|
|
||||||
PropertyType._other_validator = bv.Void()
|
|
||||||
PropertyType._tagmap = {
|
|
||||||
'string': PropertyType._string_validator,
|
|
||||||
'other': PropertyType._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
PropertyType.string = PropertyType('string')
|
|
||||||
PropertyType.other = PropertyType('other')
|
|
||||||
|
|
||||||
ROUTES = {
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
import pkg_resources
|
|
||||||
import os
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from requests.packages.urllib3.poolmanager import PoolManager
|
|
||||||
|
|
||||||
API_DOMAIN = os.environ.get('DROPBOX_API_DOMAIN',
|
|
||||||
os.environ.get('DROPBOX_DOMAIN', '.dropboxapi.com'))
|
|
||||||
|
|
||||||
WEB_DOMAIN = os.environ.get('DROPBOX_WEB_DOMAIN',
|
|
||||||
os.environ.get('DROPBOX_DOMAIN', '.dropbox.com'))
|
|
||||||
|
|
||||||
# Default short hostname for RPC-style routes.
|
|
||||||
HOST_API = 'api'
|
|
||||||
|
|
||||||
# Default short hostname for upload and download-style routes.
|
|
||||||
HOST_CONTENT = 'content'
|
|
||||||
|
|
||||||
# Default short hostname for longpoll routes.
|
|
||||||
HOST_NOTIFY = 'notify'
|
|
||||||
|
|
||||||
# Default short hostname for the Drobox website.
|
|
||||||
HOST_WWW = 'www'
|
|
||||||
|
|
||||||
API_HOST = os.environ.get('DROPBOX_API_HOST', HOST_API + API_DOMAIN)
|
|
||||||
API_CONTENT_HOST = os.environ.get('DROPBOX_API_CONTENT_HOST', HOST_CONTENT + API_DOMAIN)
|
|
||||||
API_NOTIFICATION_HOST = os.environ.get('DROPBOX_API_NOTIFY_HOST', HOST_NOTIFY + API_DOMAIN)
|
|
||||||
WEB_HOST = os.environ.get('DROPBOX_WEB_HOST', HOST_WWW + WEB_DOMAIN)
|
|
||||||
|
|
||||||
_TRUSTED_CERT_FILE = pkg_resources.resource_filename(__name__, 'trusted-certs.crt')
|
|
||||||
|
|
||||||
# TODO(kelkabany): We probably only want to instantiate this once so that even
|
|
||||||
# if multiple Dropbox objects are instantiated, they all share the same pool.
|
|
||||||
class _SSLAdapter(HTTPAdapter):
|
|
||||||
def init_poolmanager(self, connections, maxsize, block=False, **_):
|
|
||||||
self.poolmanager = PoolManager(
|
|
||||||
num_pools=connections,
|
|
||||||
maxsize=maxsize,
|
|
||||||
block=block,
|
|
||||||
cert_reqs=ssl.CERT_REQUIRED,
|
|
||||||
ca_certs=_TRUSTED_CERT_FILE,
|
|
||||||
)
|
|
||||||
|
|
||||||
def pinned_session(pool_maxsize=8):
|
|
||||||
http_adapter = _SSLAdapter(pool_connections=4, pool_maxsize=pool_maxsize)
|
|
||||||
_session = requests.session()
|
|
||||||
_session.mount('https://', http_adapter)
|
|
||||||
|
|
||||||
return _session
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
"""
|
|
||||||
Helpers for representing Stone data types in Python.
|
|
||||||
|
|
||||||
This module should be dropped into a project that requires the use of Stone. In
|
|
||||||
the future, this could be imported from a pre-installed Python package, rather
|
|
||||||
than being added to a project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import, unicode_literals
|
|
||||||
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv # type: ignore
|
|
||||||
|
|
||||||
_MYPY = False
|
|
||||||
if _MYPY:
|
|
||||||
import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression
|
|
||||||
|
|
||||||
|
|
||||||
class Union(object):
|
|
||||||
# TODO(kelkabany): Possible optimization is to remove _value if a
|
|
||||||
# union is composed of only symbols.
|
|
||||||
__slots__ = ['_tag', '_value']
|
|
||||||
_tagmap = {} # type: typing.Dict[typing.Text, bv.Validator]
|
|
||||||
|
|
||||||
def __init__(self, tag, value=None):
|
|
||||||
# type: (typing.Text, typing.Optional[typing.Any]) -> None
|
|
||||||
assert tag in self._tagmap, 'Invalid tag %r.' % tag
|
|
||||||
validator = self._tagmap[tag]
|
|
||||||
if isinstance(validator, bv.Void):
|
|
||||||
assert value is None, 'Void type union member must have None value.'
|
|
||||||
elif isinstance(validator, (bv.Struct, bv.Union)):
|
|
||||||
validator.validate_type_only(value)
|
|
||||||
else:
|
|
||||||
validator.validate(value)
|
|
||||||
self._tag = tag
|
|
||||||
self._value = value
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
# Also need to check if one class is a subclass of another. If one union extends another,
|
|
||||||
# the common fields should be able to be compared to each other.
|
|
||||||
return (
|
|
||||||
isinstance(other, Union) and
|
|
||||||
(isinstance(self, other.__class__) or isinstance(other, self.__class__)) and
|
|
||||||
self._tag == other._tag and self._value == other._value
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((self._tag, self._value))
|
|
||||||
|
|
||||||
class Route(object):
|
|
||||||
|
|
||||||
def __init__(self, name, deprecated, arg_type, result_type, error_type, attrs):
|
|
||||||
self.name = name
|
|
||||||
self.deprecated = deprecated
|
|
||||||
self.arg_type = arg_type
|
|
||||||
self.result_type = result_type
|
|
||||||
self.error_type = error_type
|
|
||||||
assert isinstance(attrs, dict), 'Expected dict, got %r' % attrs
|
|
||||||
self.attrs = attrs
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Route({!r}, {!r}, {!r}, {!r}, {!r}, {!r})'.format(
|
|
||||||
self.name,
|
|
||||||
self.deprecated,
|
|
||||||
self.arg_type,
|
|
||||||
self.result_type,
|
|
||||||
self.error_type,
|
|
||||||
self.attrs)
|
|
||||||
@@ -1,595 +0,0 @@
|
|||||||
"""
|
|
||||||
Defines classes to represent each Stone type in Python. These classes should
|
|
||||||
be used to validate Python objects and normalize them for a given type.
|
|
||||||
|
|
||||||
The data types defined here should not be specific to an RPC or serialization
|
|
||||||
format.
|
|
||||||
|
|
||||||
This module should be dropped into a project that requires the use of Stone. In
|
|
||||||
the future, this could be imported from a pre-installed Python package, rather
|
|
||||||
than being added to a project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import, unicode_literals
|
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
|
||||||
import datetime
|
|
||||||
import math
|
|
||||||
import numbers
|
|
||||||
import re
|
|
||||||
import six
|
|
||||||
|
|
||||||
_MYPY = False
|
|
||||||
if _MYPY:
|
|
||||||
import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression
|
|
||||||
|
|
||||||
# See <http://python3porting.com/differences.html#buffer>
|
|
||||||
if six.PY3:
|
|
||||||
_binary_types = (bytes, memoryview) # noqa: E501,F821 # pylint: disable=undefined-variable,useless-suppression
|
|
||||||
else:
|
|
||||||
_binary_types = (bytes, buffer) # noqa: E501,F821 # pylint: disable=undefined-variable,useless-suppression
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(Exception):
|
|
||||||
"""Raised when a value doesn't pass validation by its validator."""
|
|
||||||
|
|
||||||
def __init__(self, message, parent=None):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
message (str): Error message detailing validation failure.
|
|
||||||
parent (str): Adds the parent as the closest reference point for
|
|
||||||
the error. Use :meth:`add_parent` to add more.
|
|
||||||
"""
|
|
||||||
super(ValidationError, self).__init__(message)
|
|
||||||
self.message = message
|
|
||||||
self._parents = []
|
|
||||||
if parent:
|
|
||||||
self._parents.append(parent)
|
|
||||||
|
|
||||||
def add_parent(self, parent):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
parent (str): Adds the parent to the top of the tree of references
|
|
||||||
that lead to the validator that failed.
|
|
||||||
"""
|
|
||||||
self._parents.append(parent)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
"""
|
|
||||||
Returns:
|
|
||||||
str: A descriptive message of the validation error that may also
|
|
||||||
include the path to the validator that failed.
|
|
||||||
"""
|
|
||||||
if self._parents:
|
|
||||||
return '{}: {}'.format('.'.join(self._parents[::-1]), self.message)
|
|
||||||
else:
|
|
||||||
return self.message
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
# Not a perfect repr, but includes the error location information.
|
|
||||||
return 'ValidationError(%r)' % six.text_type(self)
|
|
||||||
|
|
||||||
|
|
||||||
def generic_type_name(v):
|
|
||||||
"""Return a descriptive type name that isn't Python specific. For example,
|
|
||||||
an int value will return 'integer' rather than 'int'."""
|
|
||||||
if isinstance(v, numbers.Integral):
|
|
||||||
# Must come before real numbers check since integrals are reals too
|
|
||||||
return 'integer'
|
|
||||||
elif isinstance(v, numbers.Real):
|
|
||||||
return 'float'
|
|
||||||
elif isinstance(v, (tuple, list)):
|
|
||||||
return 'list'
|
|
||||||
elif isinstance(v, six.string_types):
|
|
||||||
return 'string'
|
|
||||||
elif v is None:
|
|
||||||
return 'null'
|
|
||||||
else:
|
|
||||||
return type(v).__name__
|
|
||||||
|
|
||||||
|
|
||||||
class Validator(object):
|
|
||||||
"""All primitive and composite data types should be a subclass of this."""
|
|
||||||
__metaclass__ = ABCMeta
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def validate(self, val):
|
|
||||||
"""Validates that val is of this data type.
|
|
||||||
|
|
||||||
Returns: A normalized value if validation succeeds.
|
|
||||||
Raises: ValidationError
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def has_default(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_default(self):
|
|
||||||
raise AssertionError('No default available.')
|
|
||||||
|
|
||||||
|
|
||||||
class Primitive(Validator):
|
|
||||||
"""A basic type that is defined by Stone."""
|
|
||||||
# pylint: disable=abstract-method
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Boolean(Primitive):
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, bool):
|
|
||||||
raise ValidationError('%r is not a valid boolean' % val)
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
class Integer(Primitive):
|
|
||||||
"""
|
|
||||||
Do not use this class directly. Extend it and specify a 'minimum' and
|
|
||||||
'maximum' value as class variables for a more restrictive integer range.
|
|
||||||
"""
|
|
||||||
minimum = None # type: typing.Optional[int]
|
|
||||||
maximum = None # type: typing.Optional[int]
|
|
||||||
|
|
||||||
def __init__(self, min_value=None, max_value=None):
|
|
||||||
"""
|
|
||||||
A more restrictive minimum or maximum value can be specified than the
|
|
||||||
range inherent to the defined type.
|
|
||||||
"""
|
|
||||||
if min_value is not None:
|
|
||||||
assert isinstance(min_value, numbers.Integral), \
|
|
||||||
'min_value must be an integral number'
|
|
||||||
assert min_value >= self.minimum, \
|
|
||||||
'min_value cannot be less than the minimum value for this ' \
|
|
||||||
'type (%d < %d)' % (min_value, self.minimum)
|
|
||||||
self.minimum = min_value
|
|
||||||
if max_value is not None:
|
|
||||||
assert isinstance(max_value, numbers.Integral), \
|
|
||||||
'max_value must be an integral number'
|
|
||||||
assert max_value <= self.maximum, \
|
|
||||||
'max_value cannot be greater than the maximum value for ' \
|
|
||||||
'this type (%d < %d)' % (max_value, self.maximum)
|
|
||||||
self.maximum = max_value
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, numbers.Integral):
|
|
||||||
raise ValidationError('expected integer, got %s'
|
|
||||||
% generic_type_name(val))
|
|
||||||
elif not (self.minimum <= val <= self.maximum):
|
|
||||||
raise ValidationError('%d is not within range [%d, %d]'
|
|
||||||
% (val, self.minimum, self.maximum))
|
|
||||||
return val
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s()' % self.__class__.__name__
|
|
||||||
|
|
||||||
|
|
||||||
class Int32(Integer):
|
|
||||||
minimum = -2**31
|
|
||||||
maximum = 2**31 - 1
|
|
||||||
|
|
||||||
|
|
||||||
class UInt32(Integer):
|
|
||||||
minimum = 0
|
|
||||||
maximum = 2**32 - 1
|
|
||||||
|
|
||||||
|
|
||||||
class Int64(Integer):
|
|
||||||
minimum = -2**63
|
|
||||||
maximum = 2**63 - 1
|
|
||||||
|
|
||||||
|
|
||||||
class UInt64(Integer):
|
|
||||||
minimum = 0
|
|
||||||
maximum = 2**64 - 1
|
|
||||||
|
|
||||||
|
|
||||||
class Real(Primitive):
|
|
||||||
"""
|
|
||||||
Do not use this class directly. Extend it and optionally set a 'minimum'
|
|
||||||
and 'maximum' value to enforce a range that's a subset of the Python float
|
|
||||||
implementation. Python floats are doubles.
|
|
||||||
"""
|
|
||||||
minimum = None # type: typing.Optional[float]
|
|
||||||
maximum = None # type: typing.Optional[float]
|
|
||||||
|
|
||||||
def __init__(self, min_value=None, max_value=None):
|
|
||||||
"""
|
|
||||||
A more restrictive minimum or maximum value can be specified than the
|
|
||||||
range inherent to the defined type.
|
|
||||||
"""
|
|
||||||
if min_value is not None:
|
|
||||||
assert isinstance(min_value, numbers.Real), \
|
|
||||||
'min_value must be a real number'
|
|
||||||
if not isinstance(min_value, float):
|
|
||||||
try:
|
|
||||||
min_value = float(min_value)
|
|
||||||
except OverflowError:
|
|
||||||
raise AssertionError('min_value is too small for a float')
|
|
||||||
if self.minimum is not None and min_value < self.minimum:
|
|
||||||
raise AssertionError('min_value cannot be less than the '
|
|
||||||
'minimum value for this type (%f < %f)' %
|
|
||||||
(min_value, self.minimum))
|
|
||||||
self.minimum = min_value
|
|
||||||
if max_value is not None:
|
|
||||||
assert isinstance(max_value, numbers.Real), \
|
|
||||||
'max_value must be a real number'
|
|
||||||
if not isinstance(max_value, float):
|
|
||||||
try:
|
|
||||||
max_value = float(max_value)
|
|
||||||
except OverflowError:
|
|
||||||
raise AssertionError('max_value is too large for a float')
|
|
||||||
if self.maximum is not None and max_value > self.maximum:
|
|
||||||
raise AssertionError('max_value cannot be greater than the '
|
|
||||||
'maximum value for this type (%f < %f)' %
|
|
||||||
(max_value, self.maximum))
|
|
||||||
self.maximum = max_value
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, numbers.Real):
|
|
||||||
raise ValidationError('expected real number, got %s' %
|
|
||||||
generic_type_name(val))
|
|
||||||
if not isinstance(val, float):
|
|
||||||
# This checks for the case where a number is passed in with a
|
|
||||||
# magnitude larger than supported by float64.
|
|
||||||
try:
|
|
||||||
val = float(val)
|
|
||||||
except OverflowError:
|
|
||||||
raise ValidationError('too large for float')
|
|
||||||
if math.isnan(val) or math.isinf(val):
|
|
||||||
raise ValidationError('%f values are not supported' % val)
|
|
||||||
if self.minimum is not None and val < self.minimum:
|
|
||||||
raise ValidationError('%f is not greater than %f' %
|
|
||||||
(val, self.minimum))
|
|
||||||
if self.maximum is not None and val > self.maximum:
|
|
||||||
raise ValidationError('%f is not less than %f' %
|
|
||||||
(val, self.maximum))
|
|
||||||
return val
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s()' % self.__class__.__name__
|
|
||||||
|
|
||||||
|
|
||||||
class Float32(Real):
|
|
||||||
# Maximum and minimums from the IEEE 754-1985 standard
|
|
||||||
minimum = -3.40282 * 10**38
|
|
||||||
maximum = 3.40282 * 10**38
|
|
||||||
|
|
||||||
|
|
||||||
class Float64(Real):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class String(Primitive):
|
|
||||||
"""Represents a unicode string."""
|
|
||||||
|
|
||||||
def __init__(self, min_length=None, max_length=None, pattern=None):
|
|
||||||
if min_length is not None:
|
|
||||||
assert isinstance(min_length, numbers.Integral), \
|
|
||||||
'min_length must be an integral number'
|
|
||||||
assert min_length >= 0, 'min_length must be >= 0'
|
|
||||||
if max_length is not None:
|
|
||||||
assert isinstance(max_length, numbers.Integral), \
|
|
||||||
'max_length must be an integral number'
|
|
||||||
assert max_length > 0, 'max_length must be > 0'
|
|
||||||
if min_length and max_length:
|
|
||||||
assert max_length >= min_length, 'max_length must be >= min_length'
|
|
||||||
if pattern is not None:
|
|
||||||
assert isinstance(pattern, six.string_types), \
|
|
||||||
'pattern must be a string'
|
|
||||||
|
|
||||||
self.min_length = min_length
|
|
||||||
self.max_length = max_length
|
|
||||||
self.pattern = pattern
|
|
||||||
self.pattern_re = None
|
|
||||||
|
|
||||||
if pattern:
|
|
||||||
try:
|
|
||||||
self.pattern_re = re.compile(r"\A(?:" + pattern + r")\Z")
|
|
||||||
except re.error as e:
|
|
||||||
raise AssertionError('Regex {!r} failed: {}'.format(
|
|
||||||
pattern, e.args[0]))
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
"""
|
|
||||||
A unicode string of the correct length and pattern will pass validation.
|
|
||||||
In PY2, we enforce that a str type must be valid utf-8, and a unicode
|
|
||||||
string will be returned.
|
|
||||||
"""
|
|
||||||
if not isinstance(val, six.string_types):
|
|
||||||
raise ValidationError("'%s' expected to be a string, got %s"
|
|
||||||
% (val, generic_type_name(val)))
|
|
||||||
if not six.PY3 and isinstance(val, str):
|
|
||||||
try:
|
|
||||||
val = val.decode('utf-8')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ValidationError("'%s' was not valid utf-8")
|
|
||||||
|
|
||||||
if self.max_length is not None and len(val) > self.max_length:
|
|
||||||
raise ValidationError("'%s' must be at most %d characters, got %d"
|
|
||||||
% (val, self.max_length, len(val)))
|
|
||||||
if self.min_length is not None and len(val) < self.min_length:
|
|
||||||
raise ValidationError("'%s' must be at least %d characters, got %d"
|
|
||||||
% (val, self.min_length, len(val)))
|
|
||||||
|
|
||||||
if self.pattern and not self.pattern_re.match(val):
|
|
||||||
raise ValidationError("'%s' did not match pattern '%s'"
|
|
||||||
% (val, self.pattern))
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
class Bytes(Primitive):
|
|
||||||
|
|
||||||
def __init__(self, min_length=None, max_length=None):
|
|
||||||
if min_length is not None:
|
|
||||||
assert isinstance(min_length, numbers.Integral), \
|
|
||||||
'min_length must be an integral number'
|
|
||||||
assert min_length >= 0, 'min_length must be >= 0'
|
|
||||||
if max_length is not None:
|
|
||||||
assert isinstance(max_length, numbers.Integral), \
|
|
||||||
'max_length must be an integral number'
|
|
||||||
assert max_length > 0, 'max_length must be > 0'
|
|
||||||
if min_length is not None and max_length is not None:
|
|
||||||
assert max_length >= min_length, 'max_length must be >= min_length'
|
|
||||||
|
|
||||||
self.min_length = min_length
|
|
||||||
self.max_length = max_length
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, _binary_types):
|
|
||||||
raise ValidationError("expected bytes type, got %s"
|
|
||||||
% generic_type_name(val))
|
|
||||||
elif self.max_length is not None and len(val) > self.max_length:
|
|
||||||
raise ValidationError("'%s' must have at most %d bytes, got %d"
|
|
||||||
% (val, self.max_length, len(val)))
|
|
||||||
elif self.min_length is not None and len(val) < self.min_length:
|
|
||||||
raise ValidationError("'%s' has fewer than %d bytes, got %d"
|
|
||||||
% (val, self.min_length, len(val)))
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
class Timestamp(Primitive):
|
|
||||||
"""Note that while a format is specified, it isn't used in validation
|
|
||||||
since a native Python datetime object is preferred. The format, however,
|
|
||||||
can and should be used by serializers."""
|
|
||||||
|
|
||||||
def __init__(self, fmt):
|
|
||||||
"""fmt must be composed of format codes that the C standard (1989)
|
|
||||||
supports, most notably in its strftime() function."""
|
|
||||||
assert isinstance(fmt, six.text_type), 'format must be a string'
|
|
||||||
self.format = fmt
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, datetime.datetime):
|
|
||||||
raise ValidationError('expected timestamp, got %s'
|
|
||||||
% generic_type_name(val))
|
|
||||||
elif val.tzinfo is not None and \
|
|
||||||
val.tzinfo.utcoffset(val).total_seconds() != 0:
|
|
||||||
raise ValidationError('timestamp should have either a UTC '
|
|
||||||
'timezone or none set at all')
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
class Composite(Validator):
|
|
||||||
"""Validator for a type that builds on other primitive and composite
|
|
||||||
types."""
|
|
||||||
# pylint: disable=abstract-method
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class List(Composite):
|
|
||||||
"""Assumes list contents are homogeneous with respect to types."""
|
|
||||||
|
|
||||||
def __init__(self, item_validator, min_items=None, max_items=None):
|
|
||||||
"""Every list item will be validated with item_validator."""
|
|
||||||
self.item_validator = item_validator
|
|
||||||
if min_items is not None:
|
|
||||||
assert isinstance(min_items, numbers.Integral), \
|
|
||||||
'min_items must be an integral number'
|
|
||||||
assert min_items >= 0, 'min_items must be >= 0'
|
|
||||||
if max_items is not None:
|
|
||||||
assert isinstance(max_items, numbers.Integral), \
|
|
||||||
'max_items must be an integral number'
|
|
||||||
assert max_items > 0, 'max_items must be > 0'
|
|
||||||
if min_items is not None and max_items is not None:
|
|
||||||
assert max_items >= min_items, 'max_items must be >= min_items'
|
|
||||||
|
|
||||||
self.min_items = min_items
|
|
||||||
self.max_items = max_items
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, (tuple, list)):
|
|
||||||
raise ValidationError('%r is not a valid list' % val)
|
|
||||||
elif self.max_items is not None and len(val) > self.max_items:
|
|
||||||
raise ValidationError('%r has more than %s items'
|
|
||||||
% (val, self.max_items))
|
|
||||||
elif self.min_items is not None and len(val) < self.min_items:
|
|
||||||
raise ValidationError('%r has fewer than %s items'
|
|
||||||
% (val, self.min_items))
|
|
||||||
return [self.item_validator.validate(item) for item in val]
|
|
||||||
|
|
||||||
|
|
||||||
class Map(Composite):
|
|
||||||
"""Assumes map keys and values are homogeneous with respect to types."""
|
|
||||||
|
|
||||||
def __init__(self, key_validator, value_validator):
|
|
||||||
"""
|
|
||||||
Every Map key/value pair will be validated with item_validator.
|
|
||||||
key validators must be a subclass of a String validator
|
|
||||||
"""
|
|
||||||
self.key_validator = key_validator
|
|
||||||
self.value_validator = value_validator
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if not isinstance(val, dict):
|
|
||||||
raise ValidationError('%r is not a valid dict' % val)
|
|
||||||
|
|
||||||
#fix for python 2.6
|
|
||||||
result = {}
|
|
||||||
for key, value in val.items():
|
|
||||||
result[self.key_validator.validate(key)] = self.value_validator.validate(value)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class Struct(Composite):
|
|
||||||
|
|
||||||
def __init__(self, definition):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
definition (class): A generated class representing a Stone struct
|
|
||||||
from a spec. Must have a _fields_ attribute with the following
|
|
||||||
structure:
|
|
||||||
|
|
||||||
_fields_ = [(field_name, validator), ...]
|
|
||||||
|
|
||||||
where
|
|
||||||
field_name: Name of the field (str).
|
|
||||||
validator: Validator object.
|
|
||||||
"""
|
|
||||||
super(Struct, self).__init__()
|
|
||||||
self.definition = definition
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
"""
|
|
||||||
For a val to pass validation, val must be of the correct type and have
|
|
||||||
all required fields present.
|
|
||||||
"""
|
|
||||||
self.validate_type_only(val)
|
|
||||||
self.validate_fields_only(val)
|
|
||||||
return val
|
|
||||||
|
|
||||||
def validate_fields_only(self, val):
|
|
||||||
"""
|
|
||||||
To pass field validation, no required field should be missing.
|
|
||||||
|
|
||||||
This method assumes that the contents of each field have already been
|
|
||||||
validated on assignment, so it's merely a presence check.
|
|
||||||
|
|
||||||
FIXME(kelkabany): Since the definition object does not maintain a list
|
|
||||||
of which fields are required, all fields are scanned.
|
|
||||||
"""
|
|
||||||
for field_name, _ in self.definition._all_fields_:
|
|
||||||
if not hasattr(val, field_name):
|
|
||||||
raise ValidationError("missing required field '%s'" %
|
|
||||||
field_name)
|
|
||||||
|
|
||||||
def validate_type_only(self, val):
|
|
||||||
"""
|
|
||||||
Use this when you only want to validate that the type of an object
|
|
||||||
is correct, but not yet validate each field.
|
|
||||||
"""
|
|
||||||
# Since the definition maintains the list of fields for serialization,
|
|
||||||
# we're okay with a subclass that might have extra information. This
|
|
||||||
# makes it easier to return one subclass for two routes, one of which
|
|
||||||
# relies on the parent class.
|
|
||||||
if not isinstance(val, self.definition):
|
|
||||||
raise ValidationError('expected type %s, got %s' %
|
|
||||||
(self.definition.__name__, generic_type_name(val)))
|
|
||||||
|
|
||||||
def has_default(self):
|
|
||||||
return not self.definition._has_required_fields
|
|
||||||
|
|
||||||
def get_default(self):
|
|
||||||
assert not self.definition._has_required_fields, 'No default available.'
|
|
||||||
return self.definition()
|
|
||||||
|
|
||||||
|
|
||||||
class StructTree(Struct):
|
|
||||||
"""Validator for structs with enumerated subtypes.
|
|
||||||
|
|
||||||
NOTE: validate_fields_only() validates the fields known to this base
|
|
||||||
struct, but does not do any validation specific to the subtype.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# See PyCQA/pylint#1043 for why this is disabled; this should show up
|
|
||||||
# as a usless-suppression (and can be removed) once a fix is released
|
|
||||||
def __init__(self, definition): # pylint: disable=useless-super-delegation
|
|
||||||
super(StructTree, self).__init__(definition)
|
|
||||||
|
|
||||||
|
|
||||||
class Union(Composite):
|
|
||||||
|
|
||||||
def __init__(self, definition):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
definition (class): A generated class representing a Stone union
|
|
||||||
from a spec. Must have a _tagmap attribute with the following
|
|
||||||
structure:
|
|
||||||
|
|
||||||
_tagmap = {field_name: validator, ...}
|
|
||||||
|
|
||||||
where
|
|
||||||
field_name (str): Tag name.
|
|
||||||
validator (Validator): Tag value validator.
|
|
||||||
"""
|
|
||||||
self.definition = definition
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
"""
|
|
||||||
For a val to pass validation, it must have a _tag set. This assumes
|
|
||||||
that the object validated that _tag is a valid tag, and that any
|
|
||||||
associated value has also been validated.
|
|
||||||
"""
|
|
||||||
self.validate_type_only(val)
|
|
||||||
if not hasattr(val, '_tag') or val._tag is None:
|
|
||||||
raise ValidationError('no tag set')
|
|
||||||
return val
|
|
||||||
|
|
||||||
def validate_type_only(self, val):
|
|
||||||
"""
|
|
||||||
Use this when you only want to validate that the type of an object
|
|
||||||
is correct, but not yet validate each field.
|
|
||||||
|
|
||||||
We check whether val is a Python parent class of the definition. This
|
|
||||||
is because Union subtyping works in the opposite direction of Python
|
|
||||||
inheritance. For example, if a union U2 extends U1 in Python, this
|
|
||||||
validator will accept U1 in places where U2 is expected.
|
|
||||||
"""
|
|
||||||
if not issubclass(self.definition, type(val)):
|
|
||||||
raise ValidationError('expected type %s or subtype, got %s' %
|
|
||||||
(self.definition.__name__, generic_type_name(val)))
|
|
||||||
|
|
||||||
|
|
||||||
class Void(Primitive):
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if val is not None:
|
|
||||||
raise ValidationError('expected NoneType, got %s' %
|
|
||||||
generic_type_name(val))
|
|
||||||
|
|
||||||
def has_default(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_default(self):
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Nullable(Validator):
|
|
||||||
|
|
||||||
def __init__(self, validator):
|
|
||||||
assert isinstance(validator, (Primitive, Composite)), \
|
|
||||||
'validator must be for a primitive or composite type'
|
|
||||||
assert not isinstance(validator, Nullable), \
|
|
||||||
'nullables cannot be stacked'
|
|
||||||
assert not isinstance(validator, Void), \
|
|
||||||
'void cannot be made nullable'
|
|
||||||
self.validator = validator
|
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if val is None:
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
return self.validator.validate(val)
|
|
||||||
|
|
||||||
def validate_type_only(self, val):
|
|
||||||
"""Use this only if Nullable is wrapping a Composite."""
|
|
||||||
if val is None:
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
return self.validator.validate_type_only(val)
|
|
||||||
|
|
||||||
def has_default(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_default(self):
|
|
||||||
return None
|
|
||||||
@@ -1,468 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Auto-generated by Stone, do not modify.
|
|
||||||
# flake8: noqa
|
|
||||||
# pylint: skip-file
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
from . import stone_base as bb
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv
|
|
||||||
import stone_base as bb
|
|
||||||
|
|
||||||
try:
|
|
||||||
from . import (
|
|
||||||
common,
|
|
||||||
)
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
import common
|
|
||||||
|
|
||||||
class GroupManagementType(bb.Union):
|
|
||||||
"""
|
|
||||||
The group type determines how a group is managed.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar user_managed: A group which is managed by selected users.
|
|
||||||
:ivar company_managed: A group which is managed by team admins only.
|
|
||||||
:ivar system_managed: A group which is managed automatically by Dropbox.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
user_managed = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
company_managed = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
system_managed = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_user_managed(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``user_managed``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'user_managed'
|
|
||||||
|
|
||||||
def is_company_managed(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``company_managed``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'company_managed'
|
|
||||||
|
|
||||||
def is_system_managed(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``system_managed``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'system_managed'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'GroupManagementType(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
GroupManagementType_validator = bv.Union(GroupManagementType)
|
|
||||||
|
|
||||||
class GroupSummary(object):
|
|
||||||
"""
|
|
||||||
Information about a group.
|
|
||||||
|
|
||||||
:ivar group_external_id: External ID of group. This is an arbitrary ID that
|
|
||||||
an admin can attach to a group.
|
|
||||||
:ivar member_count: The number of members in the group.
|
|
||||||
:ivar group_management_type: Who is allowed to manage the group.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_group_name_value',
|
|
||||||
'_group_name_present',
|
|
||||||
'_group_id_value',
|
|
||||||
'_group_id_present',
|
|
||||||
'_group_external_id_value',
|
|
||||||
'_group_external_id_present',
|
|
||||||
'_member_count_value',
|
|
||||||
'_member_count_present',
|
|
||||||
'_group_management_type_value',
|
|
||||||
'_group_management_type_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = True
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
group_name=None,
|
|
||||||
group_id=None,
|
|
||||||
group_management_type=None,
|
|
||||||
group_external_id=None,
|
|
||||||
member_count=None):
|
|
||||||
self._group_name_value = None
|
|
||||||
self._group_name_present = False
|
|
||||||
self._group_id_value = None
|
|
||||||
self._group_id_present = False
|
|
||||||
self._group_external_id_value = None
|
|
||||||
self._group_external_id_present = False
|
|
||||||
self._member_count_value = None
|
|
||||||
self._member_count_present = False
|
|
||||||
self._group_management_type_value = None
|
|
||||||
self._group_management_type_present = False
|
|
||||||
if group_name is not None:
|
|
||||||
self.group_name = group_name
|
|
||||||
if group_id is not None:
|
|
||||||
self.group_id = group_id
|
|
||||||
if group_external_id is not None:
|
|
||||||
self.group_external_id = group_external_id
|
|
||||||
if member_count is not None:
|
|
||||||
self.member_count = member_count
|
|
||||||
if group_management_type is not None:
|
|
||||||
self.group_management_type = group_management_type
|
|
||||||
|
|
||||||
@property
|
|
||||||
def group_name(self):
|
|
||||||
"""
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._group_name_present:
|
|
||||||
return self._group_name_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'group_name'")
|
|
||||||
|
|
||||||
@group_name.setter
|
|
||||||
def group_name(self, val):
|
|
||||||
val = self._group_name_validator.validate(val)
|
|
||||||
self._group_name_value = val
|
|
||||||
self._group_name_present = True
|
|
||||||
|
|
||||||
@group_name.deleter
|
|
||||||
def group_name(self):
|
|
||||||
self._group_name_value = None
|
|
||||||
self._group_name_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def group_id(self):
|
|
||||||
"""
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._group_id_present:
|
|
||||||
return self._group_id_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'group_id'")
|
|
||||||
|
|
||||||
@group_id.setter
|
|
||||||
def group_id(self, val):
|
|
||||||
val = self._group_id_validator.validate(val)
|
|
||||||
self._group_id_value = val
|
|
||||||
self._group_id_present = True
|
|
||||||
|
|
||||||
@group_id.deleter
|
|
||||||
def group_id(self):
|
|
||||||
self._group_id_value = None
|
|
||||||
self._group_id_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def group_external_id(self):
|
|
||||||
"""
|
|
||||||
External ID of group. This is an arbitrary ID that an admin can attach
|
|
||||||
to a group.
|
|
||||||
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
if self._group_external_id_present:
|
|
||||||
return self._group_external_id_value
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@group_external_id.setter
|
|
||||||
def group_external_id(self, val):
|
|
||||||
if val is None:
|
|
||||||
del self.group_external_id
|
|
||||||
return
|
|
||||||
val = self._group_external_id_validator.validate(val)
|
|
||||||
self._group_external_id_value = val
|
|
||||||
self._group_external_id_present = True
|
|
||||||
|
|
||||||
@group_external_id.deleter
|
|
||||||
def group_external_id(self):
|
|
||||||
self._group_external_id_value = None
|
|
||||||
self._group_external_id_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def member_count(self):
|
|
||||||
"""
|
|
||||||
The number of members in the group.
|
|
||||||
|
|
||||||
:rtype: long
|
|
||||||
"""
|
|
||||||
if self._member_count_present:
|
|
||||||
return self._member_count_value
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@member_count.setter
|
|
||||||
def member_count(self, val):
|
|
||||||
if val is None:
|
|
||||||
del self.member_count
|
|
||||||
return
|
|
||||||
val = self._member_count_validator.validate(val)
|
|
||||||
self._member_count_value = val
|
|
||||||
self._member_count_present = True
|
|
||||||
|
|
||||||
@member_count.deleter
|
|
||||||
def member_count(self):
|
|
||||||
self._member_count_value = None
|
|
||||||
self._member_count_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def group_management_type(self):
|
|
||||||
"""
|
|
||||||
Who is allowed to manage the group.
|
|
||||||
|
|
||||||
:rtype: GroupManagementType
|
|
||||||
"""
|
|
||||||
if self._group_management_type_present:
|
|
||||||
return self._group_management_type_value
|
|
||||||
else:
|
|
||||||
raise AttributeError("missing required field 'group_management_type'")
|
|
||||||
|
|
||||||
@group_management_type.setter
|
|
||||||
def group_management_type(self, val):
|
|
||||||
self._group_management_type_validator.validate_type_only(val)
|
|
||||||
self._group_management_type_value = val
|
|
||||||
self._group_management_type_present = True
|
|
||||||
|
|
||||||
@group_management_type.deleter
|
|
||||||
def group_management_type(self):
|
|
||||||
self._group_management_type_value = None
|
|
||||||
self._group_management_type_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'GroupSummary(group_name={!r}, group_id={!r}, group_management_type={!r}, group_external_id={!r}, member_count={!r})'.format(
|
|
||||||
self._group_name_value,
|
|
||||||
self._group_id_value,
|
|
||||||
self._group_management_type_value,
|
|
||||||
self._group_external_id_value,
|
|
||||||
self._member_count_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
GroupSummary_validator = bv.Struct(GroupSummary)
|
|
||||||
|
|
||||||
class GroupType(bb.Union):
|
|
||||||
"""
|
|
||||||
The group type determines how a group is created and managed.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar team: A group to which team members are automatically added.
|
|
||||||
Applicable to `team folders <https://www.dropbox.com/help/986>`_ only.
|
|
||||||
:ivar user_managed: A group is created and managed by a user.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = 'other'
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
team = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
user_managed = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
other = None
|
|
||||||
|
|
||||||
def is_team(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``team``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'team'
|
|
||||||
|
|
||||||
def is_user_managed(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``user_managed``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'user_managed'
|
|
||||||
|
|
||||||
def is_other(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``other``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'other'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'GroupType(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
GroupType_validator = bv.Union(GroupType)
|
|
||||||
|
|
||||||
class TimeRange(object):
|
|
||||||
"""
|
|
||||||
Time range.
|
|
||||||
|
|
||||||
:ivar start_time: Optional starting time (inclusive).
|
|
||||||
:ivar end_time: Optional ending time (exclusive).
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
|
||||||
'_start_time_value',
|
|
||||||
'_start_time_present',
|
|
||||||
'_end_time_value',
|
|
||||||
'_end_time_present',
|
|
||||||
]
|
|
||||||
|
|
||||||
_has_required_fields = False
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
start_time=None,
|
|
||||||
end_time=None):
|
|
||||||
self._start_time_value = None
|
|
||||||
self._start_time_present = False
|
|
||||||
self._end_time_value = None
|
|
||||||
self._end_time_present = False
|
|
||||||
if start_time is not None:
|
|
||||||
self.start_time = start_time
|
|
||||||
if end_time is not None:
|
|
||||||
self.end_time = end_time
|
|
||||||
|
|
||||||
@property
|
|
||||||
def start_time(self):
|
|
||||||
"""
|
|
||||||
Optional starting time (inclusive).
|
|
||||||
|
|
||||||
:rtype: datetime.datetime
|
|
||||||
"""
|
|
||||||
if self._start_time_present:
|
|
||||||
return self._start_time_value
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@start_time.setter
|
|
||||||
def start_time(self, val):
|
|
||||||
if val is None:
|
|
||||||
del self.start_time
|
|
||||||
return
|
|
||||||
val = self._start_time_validator.validate(val)
|
|
||||||
self._start_time_value = val
|
|
||||||
self._start_time_present = True
|
|
||||||
|
|
||||||
@start_time.deleter
|
|
||||||
def start_time(self):
|
|
||||||
self._start_time_value = None
|
|
||||||
self._start_time_present = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def end_time(self):
|
|
||||||
"""
|
|
||||||
Optional ending time (exclusive).
|
|
||||||
|
|
||||||
:rtype: datetime.datetime
|
|
||||||
"""
|
|
||||||
if self._end_time_present:
|
|
||||||
return self._end_time_value
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@end_time.setter
|
|
||||||
def end_time(self, val):
|
|
||||||
if val is None:
|
|
||||||
del self.end_time
|
|
||||||
return
|
|
||||||
val = self._end_time_validator.validate(val)
|
|
||||||
self._end_time_value = val
|
|
||||||
self._end_time_present = True
|
|
||||||
|
|
||||||
@end_time.deleter
|
|
||||||
def end_time(self):
|
|
||||||
self._end_time_value = None
|
|
||||||
self._end_time_present = False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'TimeRange(start_time={!r}, end_time={!r})'.format(
|
|
||||||
self._start_time_value,
|
|
||||||
self._end_time_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
TimeRange_validator = bv.Struct(TimeRange)
|
|
||||||
|
|
||||||
GroupExternalId_validator = bv.String()
|
|
||||||
GroupId_validator = bv.String()
|
|
||||||
MemberExternalId_validator = bv.String(max_length=64)
|
|
||||||
ResellerId_validator = bv.String()
|
|
||||||
TeamMemberId_validator = bv.String()
|
|
||||||
GroupManagementType._user_managed_validator = bv.Void()
|
|
||||||
GroupManagementType._company_managed_validator = bv.Void()
|
|
||||||
GroupManagementType._system_managed_validator = bv.Void()
|
|
||||||
GroupManagementType._other_validator = bv.Void()
|
|
||||||
GroupManagementType._tagmap = {
|
|
||||||
'user_managed': GroupManagementType._user_managed_validator,
|
|
||||||
'company_managed': GroupManagementType._company_managed_validator,
|
|
||||||
'system_managed': GroupManagementType._system_managed_validator,
|
|
||||||
'other': GroupManagementType._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
GroupManagementType.user_managed = GroupManagementType('user_managed')
|
|
||||||
GroupManagementType.company_managed = GroupManagementType('company_managed')
|
|
||||||
GroupManagementType.system_managed = GroupManagementType('system_managed')
|
|
||||||
GroupManagementType.other = GroupManagementType('other')
|
|
||||||
|
|
||||||
GroupSummary._group_name_validator = bv.String()
|
|
||||||
GroupSummary._group_id_validator = GroupId_validator
|
|
||||||
GroupSummary._group_external_id_validator = bv.Nullable(GroupExternalId_validator)
|
|
||||||
GroupSummary._member_count_validator = bv.Nullable(bv.UInt32())
|
|
||||||
GroupSummary._group_management_type_validator = GroupManagementType_validator
|
|
||||||
GroupSummary._all_field_names_ = set([
|
|
||||||
'group_name',
|
|
||||||
'group_id',
|
|
||||||
'group_external_id',
|
|
||||||
'member_count',
|
|
||||||
'group_management_type',
|
|
||||||
])
|
|
||||||
GroupSummary._all_fields_ = [
|
|
||||||
('group_name', GroupSummary._group_name_validator),
|
|
||||||
('group_id', GroupSummary._group_id_validator),
|
|
||||||
('group_external_id', GroupSummary._group_external_id_validator),
|
|
||||||
('member_count', GroupSummary._member_count_validator),
|
|
||||||
('group_management_type', GroupSummary._group_management_type_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
GroupType._team_validator = bv.Void()
|
|
||||||
GroupType._user_managed_validator = bv.Void()
|
|
||||||
GroupType._other_validator = bv.Void()
|
|
||||||
GroupType._tagmap = {
|
|
||||||
'team': GroupType._team_validator,
|
|
||||||
'user_managed': GroupType._user_managed_validator,
|
|
||||||
'other': GroupType._other_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
GroupType.team = GroupType('team')
|
|
||||||
GroupType.user_managed = GroupType('user_managed')
|
|
||||||
GroupType.other = GroupType('other')
|
|
||||||
|
|
||||||
TimeRange._start_time_validator = bv.Nullable(common.DropboxTimestamp_validator)
|
|
||||||
TimeRange._end_time_validator = bv.Nullable(common.DropboxTimestamp_validator)
|
|
||||||
TimeRange._all_field_names_ = set([
|
|
||||||
'start_time',
|
|
||||||
'end_time',
|
|
||||||
])
|
|
||||||
TimeRange._all_fields_ = [
|
|
||||||
('start_time', TimeRange._start_time_validator),
|
|
||||||
('end_time', TimeRange._end_time_validator),
|
|
||||||
]
|
|
||||||
|
|
||||||
ROUTES = {
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Auto-generated by Stone, do not modify.
|
|
||||||
# flake8: noqa
|
|
||||||
# pylint: skip-file
|
|
||||||
"""
|
|
||||||
This namespace contains common data types used within the users namespace.
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
from . import stone_validators as bv
|
|
||||||
from . import stone_base as bb
|
|
||||||
except (SystemError, ValueError):
|
|
||||||
# Catch errors raised when importing a relative module when not in a package.
|
|
||||||
# This makes testing this file directly (outside of a package) easier.
|
|
||||||
import stone_validators as bv
|
|
||||||
import stone_base as bb
|
|
||||||
|
|
||||||
class AccountType(bb.Union):
|
|
||||||
"""
|
|
||||||
What type of account this user has.
|
|
||||||
|
|
||||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
|
||||||
return true. To get the associated value of a tag (if one exists), use the
|
|
||||||
corresponding ``get_*`` method.
|
|
||||||
|
|
||||||
:ivar basic: The basic account type.
|
|
||||||
:ivar pro: The Dropbox Pro account type.
|
|
||||||
:ivar business: The Dropbox Business account type.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_catch_all = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
basic = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
pro = None
|
|
||||||
# Attribute is overwritten below the class definition
|
|
||||||
business = None
|
|
||||||
|
|
||||||
def is_basic(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``basic``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'basic'
|
|
||||||
|
|
||||||
def is_pro(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``pro``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'pro'
|
|
||||||
|
|
||||||
def is_business(self):
|
|
||||||
"""
|
|
||||||
Check if the union tag is ``business``.
|
|
||||||
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return self._tag == 'business'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'AccountType(%r, %r)' % (self._tag, self._value)
|
|
||||||
|
|
||||||
AccountType_validator = bv.Union(AccountType)
|
|
||||||
|
|
||||||
AccountId_validator = bv.String(min_length=40, max_length=40)
|
|
||||||
AccountType._basic_validator = bv.Void()
|
|
||||||
AccountType._pro_validator = bv.Void()
|
|
||||||
AccountType._business_validator = bv.Void()
|
|
||||||
AccountType._tagmap = {
|
|
||||||
'basic': AccountType._basic_validator,
|
|
||||||
'pro': AccountType._pro_validator,
|
|
||||||
'business': AccountType._business_validator,
|
|
||||||
}
|
|
||||||
|
|
||||||
AccountType.basic = AccountType('basic')
|
|
||||||
AccountType.pro = AccountType('pro')
|
|
||||||
AccountType.business = AccountType('business')
|
|
||||||
|
|
||||||
ROUTES = {
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import utils as utils
|
from . import utils as utils
|
||||||
|
|
||||||
class ZipExtractor:
|
class ZipExtractor:
|
||||||
|
|
||||||
|
|||||||
@@ -1,99 +1,45 @@
|
|||||||
import utils as utils
|
|
||||||
from xml.dom import minidom
|
|
||||||
from xml.parsers.expat import ExpatError
|
|
||||||
import json
|
import json
|
||||||
import xbmc,xbmcvfs
|
import xbmc,xbmcvfs
|
||||||
|
from . import utils as utils
|
||||||
|
|
||||||
|
|
||||||
class GuiSettingsManager:
|
class GuiSettingsManager:
|
||||||
settingsFile = None
|
systemSettings = None
|
||||||
doc = None
|
|
||||||
settings_allowed = list()
|
|
||||||
found_settings = list()
|
|
||||||
|
|
||||||
def __init__(self,settingsFile):
|
|
||||||
self._readFile(xbmc.translatePath(settingsFile))
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
#get a list of all the settings we can manipulate via json
|
|
||||||
json_response = json.loads(xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.GetSettings","params":{"level":"advanced"}}'))
|
|
||||||
|
|
||||||
settings = json_response['result']['settings']
|
|
||||||
|
|
||||||
for aSetting in settings:
|
|
||||||
self.settings_allowed.append(aSetting['id'])
|
|
||||||
|
|
||||||
#parse the existing xml file and get all the settings
|
|
||||||
root_nodes = self.__parseNodes(self.doc.documentElement)
|
|
||||||
|
|
||||||
for aNode in root_nodes:
|
|
||||||
secondary_list = self.__parseNodes(self.doc.getElementsByTagName(aNode.name)[0])
|
|
||||||
|
|
||||||
for secondNode in secondary_list:
|
|
||||||
#if the node does not have children and is not default
|
|
||||||
if(not secondNode.hasChildren and not secondNode.isDefault):
|
|
||||||
|
|
||||||
if(secondNode.json_name() in self.settings_allowed):
|
|
||||||
self.found_settings.append(secondNode)
|
|
||||||
|
|
||||||
#go through all the found settings and update them
|
|
||||||
for aSetting in self.found_settings:
|
|
||||||
utils.log("updating: " + aSetting.json_name() + ", value: " + aSetting.value)
|
|
||||||
|
|
||||||
#check for boolean and numeric values
|
|
||||||
if(aSetting.value.isdigit() or (aSetting.value == 'true' or aSetting.value == 'false')):
|
|
||||||
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.SetSettingValue","params":{"setting":"' + aSetting.json_name() + '","value":' + aSetting.value + '}}')
|
|
||||||
else:
|
|
||||||
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.SetSettingValue","params":{"setting":"' + aSetting.json_name() + '","value":"' + utils.encode(aSetting.value) + '"}}')
|
|
||||||
|
|
||||||
#make a copy of the guisettings file to make user based restores easier
|
|
||||||
xbmcvfs.copy(self.settingsFile, xbmc.translatePath("special://home/userdata/guisettings.xml.restored"))
|
|
||||||
|
|
||||||
def __parseNodes(self,nodeList):
|
|
||||||
result = []
|
|
||||||
|
|
||||||
for node in nodeList.childNodes:
|
def __init__(self):
|
||||||
if(node.nodeType == self.doc.ELEMENT_NODE):
|
# get all of the current Kodi settings
|
||||||
aSetting = SettingNode(node.nodeName)
|
json_response = json.loads(xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.GetSettings","params":{"level":"expert"}}').decode('utf-8', errors="ignore"))
|
||||||
|
|
||||||
#detect if there are any element nodes
|
self.systemSettings = json_response['result']['settings']
|
||||||
if(len(node.childNodes) > 0):
|
|
||||||
for child_node in node.childNodes:
|
|
||||||
if(child_node.nodeType == self.doc.ELEMENT_NODE):
|
|
||||||
aSetting.hasChildren = True
|
|
||||||
|
|
||||||
if(not aSetting.hasChildren and len(node.childNodes) > 0):
|
def backup(self):
|
||||||
aSetting.value = node.firstChild.nodeValue
|
utils.log('Backing up Kodi settings')
|
||||||
|
|
||||||
if('default' not in node.attributes.keys()):
|
# return all current settings
|
||||||
aSetting.isDefault = False
|
return self.systemSettings
|
||||||
|
|
||||||
aSetting.parent = node.parentNode.nodeName
|
def restore(self, restoreSettings):
|
||||||
|
utils.log('Restoring Kodi settings')
|
||||||
result.append(aSetting)
|
|
||||||
return result
|
updateJson = {"jsonrpc": "2.0", "id": 1, "method": "Settings.SetSettingValue", "params": {"setting": "", "value": ""}}
|
||||||
|
|
||||||
|
# create a setting=value dict of the current settings
|
||||||
def _readFile(self,fileLoc):
|
settingsDict = {}
|
||||||
|
for aSetting in self.systemSettings:
|
||||||
if(xbmcvfs.exists(fileLoc)):
|
# ignore action types, no value
|
||||||
try:
|
if(aSetting['type'] != 'action'):
|
||||||
self.doc = minidom.parse(fileLoc)
|
settingsDict[aSetting['id']] = aSetting['value']
|
||||||
self.settingsFile = fileLoc
|
|
||||||
except ExpatError:
|
restoreCount = 0
|
||||||
utils.log("Can't read " + fileLoc)
|
for aSetting in restoreSettings:
|
||||||
|
# only update a setting if its different than the current (action types have no value)
|
||||||
class SettingNode:
|
if(aSetting['type'] != 'action' and settingsDict[aSetting['id']] != aSetting['value']):
|
||||||
name = ''
|
utils.log('%s different than current: %s' % (aSetting['id'], str(aSetting['value'])), xbmc.LOGDEBUG)
|
||||||
value = ''
|
|
||||||
hasChildren = False
|
updateJson['params']['setting'] = aSetting['id']
|
||||||
isDefault = True
|
updateJson['params']['value'] = aSetting['value']
|
||||||
parent = ''
|
|
||||||
|
xbmc.executeJSONRPC(json.dumps(updateJson))
|
||||||
def __init__(self,name):
|
restoreCount = restoreCount + 1
|
||||||
self.name = name
|
|
||||||
|
utils.log('Update %d settings' % restoreCount)
|
||||||
def json_name(self):
|
|
||||||
return self.parent + "." + self.name
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import utils as utils
|
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
|
from . import utils as utils
|
||||||
|
|
||||||
class BackupProgressBar:
|
class BackupProgressBar:
|
||||||
NONE = 2
|
NONE = 2
|
||||||
|
|||||||
@@ -14,11 +14,11 @@ def addon_dir():
|
|||||||
def openSettings():
|
def openSettings():
|
||||||
__Addon.openSettings()
|
__Addon.openSettings()
|
||||||
|
|
||||||
def log(message,loglevel=xbmc.LOGNOTICE):
|
def log(message,loglevel=xbmc.LOGDEBUG):
|
||||||
xbmc.log(encode(__addon_id__ + "-" + __Addon.getAddonInfo('version') + ": " + message),level=loglevel)
|
xbmc.log(encode(__addon_id__ + "-" + __Addon.getAddonInfo('version') + ": " + message),level=loglevel)
|
||||||
|
|
||||||
def showNotification(message):
|
def showNotification(message):
|
||||||
xbmcgui.Dialog().notification(encode(getString(30010)),encode(message),time=4000,icon=xbmc.translatePath(__Addon.getAddonInfo('path') + "/resources/media/icon.png"))
|
xbmcgui.Dialog().notification(encode(getString(30010)),encode(message),time=4000,icon=xbmc.translatePath(__Addon.getAddonInfo('path') + "/resources/images/icon.png"))
|
||||||
|
|
||||||
def getSetting(name):
|
def getSetting(name):
|
||||||
return __Addon.getSetting(name)
|
return __Addon.getSetting(name)
|
||||||
@@ -29,6 +29,14 @@ def setSetting(name,value):
|
|||||||
def getString(string_id):
|
def getString(string_id):
|
||||||
return __Addon.getLocalizedString(string_id)
|
return __Addon.getLocalizedString(string_id)
|
||||||
|
|
||||||
|
def getRegionalTimestamp(date_time,dateformat=['dateshort']):
|
||||||
|
result = ''
|
||||||
|
|
||||||
|
for aFormat in dateformat:
|
||||||
|
result = result + ("%s " % date_time.strftime(xbmc.getRegion(aFormat)))
|
||||||
|
|
||||||
|
return result.strip()
|
||||||
|
|
||||||
def encode(string):
|
def encode(string):
|
||||||
result = ''
|
result = ''
|
||||||
|
|
||||||
|
|||||||
@@ -1,34 +1,33 @@
|
|||||||
import utils as utils
|
|
||||||
import tinyurl as tinyurl
|
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
import zipfile
|
import zipfile
|
||||||
import zlib
|
|
||||||
import os
|
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import dropbox
|
import dropbox
|
||||||
|
from . import utils as utils
|
||||||
from dropbox.files import WriteMode,CommitInfo,UploadSessionCursor
|
from dropbox.files import WriteMode,CommitInfo,UploadSessionCursor
|
||||||
from pydrive.drive import GoogleDrive
|
from .authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
||||||
from authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
|
||||||
|
|
||||||
class Vfs:
|
class Vfs:
|
||||||
root_path = None
|
root_path = None
|
||||||
|
|
||||||
def __init__(self,rootString):
|
def __init__(self,rootString):
|
||||||
self.set_root(rootString)
|
self.set_root(rootString)
|
||||||
|
|
||||||
|
def clean_path(self, path):
|
||||||
|
# fix slashes
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
|
||||||
|
# check if trailing slash is included
|
||||||
|
if(path[-1:] != '/'):
|
||||||
|
path = path + '/'
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
def set_root(self,rootString):
|
def set_root(self,rootString):
|
||||||
old_root = self.root_path
|
old_root = self.root_path
|
||||||
self.root_path = rootString
|
self.root_path = self.clean_path(rootString)
|
||||||
|
|
||||||
#fix slashes
|
|
||||||
self.root_path = self.root_path.replace("\\","/")
|
|
||||||
|
|
||||||
#check if trailing slash is included
|
|
||||||
if(self.root_path[-1:] != "/"):
|
|
||||||
self.root_path = self.root_path + "/"
|
|
||||||
|
|
||||||
#return the old root
|
#return the old root
|
||||||
return old_root
|
return old_root
|
||||||
@@ -232,7 +231,7 @@ class DropboxFileSystem(Vfs):
|
|||||||
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK),upload_cursor)
|
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK),upload_cursor)
|
||||||
upload_cursor.offset = f.tell()
|
upload_cursor.offset = f.tell()
|
||||||
|
|
||||||
#if no errors we're good!
|
#if no errors we're good!
|
||||||
return True
|
return True
|
||||||
except Exception as anError:
|
except Exception as anError:
|
||||||
utils.log(str(anError))
|
utils.log(str(anError))
|
||||||
@@ -421,7 +420,7 @@ class GoogleDriveFilesystem(Vfs):
|
|||||||
if(file.endswith('/')):
|
if(file.endswith('/')):
|
||||||
file = file[:-1]
|
file = file[:-1]
|
||||||
|
|
||||||
if(self.history.has_key(file)):
|
if(file in self.history):
|
||||||
|
|
||||||
result = self.history[file]
|
result = self.history[file]
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -3,7 +3,9 @@
|
|||||||
<category id="general" label="30011">
|
<category id="general" label="30011">
|
||||||
<setting id="compress_backups" type="bool" label="30087" default="false" />
|
<setting id="compress_backups" type="bool" label="30087" default="false" />
|
||||||
<setting id="backup_rotation" type="number" label="30026" default="0" />
|
<setting id="backup_rotation" type="number" label="30026" default="0" />
|
||||||
|
<setting id="always_prompt_restore_settings" type="bool" label="30148" default="false" />
|
||||||
<setting id="progress_mode" type="enum" label="30022" lvalues="30082|30083|30084" default="0" />
|
<setting id="progress_mode" type="enum" label="30022" lvalues="30082|30083|30084" default="0" />
|
||||||
|
<setting id="upgrade_notes" type="number" label="upgrade_notes" visible="false" default="1" />
|
||||||
</category>
|
</category>
|
||||||
<category id="backup_path" label="30048">
|
<category id="backup_path" label="30048">
|
||||||
<setting id="remote_selection" type="enum" lvalues="30018|30019|30027|30098" default="0" label="30025"/>
|
<setting id="remote_selection" type="enum" lvalues="30018|30019|30027|30098" default="0" label="30025"/>
|
||||||
@@ -13,22 +15,22 @@
|
|||||||
<setting id="dropbox_secret" type="text" label="30029" visible="eq(-4,2)" default="" />
|
<setting id="dropbox_secret" type="text" label="30029" visible="eq(-4,2)" default="" />
|
||||||
<setting id="google_drive_id" type="text" label="Client ID" visible="eq(-5,3)" default="" />
|
<setting id="google_drive_id" type="text" label="Client ID" visible="eq(-5,3)" default="" />
|
||||||
<setting id="google_drive_secret" type="text" label="Client Secret" visible="eq(-6,3)" default="" />
|
<setting id="google_drive_secret" type="text" label="Client Secret" visible="eq(-6,3)" default="" />
|
||||||
<setting id="auth_dropbox_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/authorize_cloud.py,type=dropbox)" visible="eq(-7,2)"/>
|
<setting id="auth_dropbox_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=authorize_cloud,provider=dropbox)" visible="eq(-7,2)"/>
|
||||||
<setting id="auth_google_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/authorize_cloud.py,type=google_drive)" visible="eq(-8,3)"/>
|
<setting id="auth_google_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=authorize_cloud,provider=google_drive)" visible="eq(-8,3)"/>
|
||||||
<setting id="remove_auth_button" type="action" label="30093" action="RunScript(special://home/addons/script.xbmcbackup/remove_auth.py)" visible="gt(-9,1)"/>
|
<setting id="remove_auth_button" type="action" label="30093" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=remove_auth)" visible="gt(-9,1)"/>
|
||||||
</category>
|
</category>
|
||||||
<category id="selection" label="30012">
|
<category id="selection" label="30012">
|
||||||
<setting id="backup_addons" type="bool" label="30030" default="true" />
|
<setting id="backup_selection_type" type="enum" lvalues="30014|30015" default="0" label="30023" />
|
||||||
<setting id="backup_addon_data" type="bool" label="30031" default="false" />
|
<setting id="backup_addon_data" type="bool" label="30031" default="false" visible="eq(-1,0)"/>
|
||||||
<setting id="backup_database" type="bool" label="30032" default="true" />
|
<setting id="backup_config" type="bool" label="30035" default="true" visible="eq(-2,0)"/>
|
||||||
<setting id="backup_playlists" type="bool" label="30033" default="true" />
|
<setting id="backup_database" type="bool" label="30032" default="true" visible="eq(-3,0)"/>
|
||||||
<setting id="backup_profiles" type="bool" label="30080" default="false" />
|
<setting id="backup_game_saves" type="bool" label="30133" default="false" visible="eq(-4,0)" />
|
||||||
<setting id="backup_thumbnails" type="bool" label="30034" default="true" />
|
<setting id="backup_playlists" type="bool" label="30033" default="true" visible="eq(-5,0)"/>
|
||||||
<setting id="backup_config" type="bool" label="30035" default="true" />
|
<setting id="backup_profiles" type="bool" label="30080" default="false" visible="eq(-6,0)"/>
|
||||||
<setting id="custom_dir_1_enable" type="bool" label="30036" default="false" />
|
<setting id="backup_thumbnails" type="bool" label="30034" default="true" visible="eq(-7,0)"/>
|
||||||
<setting id="backup_custom_dir_1" type="folder" label="30018" default="" visible="eq(-1,true)"/>
|
<setting id="backup_addons" type="bool" label="30030" default="true" visible="eq(-8,0)" />
|
||||||
<setting id="custom_dir_2_enable" type="bool" label="30037" default="false" />
|
<setting id="advanced_button" type="action" label="30125" visible="eq(-9,1)" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=advanced_editor)" />
|
||||||
<setting id="backup_custom_dir_2" type="folder" label="30018" default="" visible="eq(-1,true)"/>
|
<setting id="advanced_defaults" type="action" label="30139" visible="eq(-10,1)" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=advanced_copy_config)" />
|
||||||
</category>
|
</category>
|
||||||
<category id="scheduling" label="30013">
|
<category id="scheduling" label="30013">
|
||||||
<setting id="enable_scheduler" type="bool" label="30060" default="false" />
|
<setting id="enable_scheduler" type="bool" label="30060" default="false" />
|
||||||
|
|||||||
389
scheduler.py
@@ -1,191 +1,198 @@
|
|||||||
import xbmc
|
import xbmc
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
import xbmcgui
|
import xbmcgui
|
||||||
import datetime
|
from datetime import datetime
|
||||||
import time
|
import time
|
||||||
import resources.lib.utils as utils
|
import resources.lib.utils as utils
|
||||||
from resources.lib.croniter import croniter
|
from resources.lib.croniter import croniter
|
||||||
from resources.lib.backup import XbmcBackup
|
from resources.lib.backup import XbmcBackup
|
||||||
|
|
||||||
class BackupScheduler:
|
UPGRADE_INT = 2 #to keep track of any upgrade notifications
|
||||||
monitor = None
|
|
||||||
enabled = "false"
|
class BackupScheduler:
|
||||||
next_run = 0
|
monitor = None
|
||||||
next_run_path = None
|
enabled = "false"
|
||||||
restore_point = None
|
next_run = 0
|
||||||
|
next_run_path = None
|
||||||
def __init__(self):
|
restore_point = None
|
||||||
self.monitor = UpdateMonitor(update_method = self.settingsChanged)
|
|
||||||
self.enabled = utils.getSetting("enable_scheduler")
|
def __init__(self):
|
||||||
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
|
self.monitor = UpdateMonitor(update_method = self.settingsChanged)
|
||||||
|
self.enabled = utils.getSetting("enable_scheduler")
|
||||||
if(self.enabled == "true"):
|
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
|
||||||
|
|
||||||
#sleep for 2 minutes so Kodi can start and time can update correctly
|
if(self.enabled == "true"):
|
||||||
xbmc.Monitor().waitForAbort(120)
|
|
||||||
|
#sleep for 2 minutes so Kodi can start and time can update correctly
|
||||||
nr = 0
|
xbmc.Monitor().waitForAbort(120)
|
||||||
if(xbmcvfs.exists(self.next_run_path)):
|
|
||||||
|
nr = 0
|
||||||
fh = xbmcvfs.File(self.next_run_path)
|
if(xbmcvfs.exists(self.next_run_path)):
|
||||||
try:
|
|
||||||
#check if we saved a run time from the last run
|
fh = xbmcvfs.File(self.next_run_path)
|
||||||
nr = float(fh.read())
|
try:
|
||||||
except ValueError:
|
#check if we saved a run time from the last run
|
||||||
nr = 0
|
nr = float(fh.read())
|
||||||
|
except ValueError:
|
||||||
fh.close()
|
nr = 0
|
||||||
|
|
||||||
#if we missed and the user wants to play catch-up
|
fh.close()
|
||||||
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
|
|
||||||
utils.log("scheduled backup was missed, doing it now...")
|
#if we missed and the user wants to play catch-up
|
||||||
progress_mode = int(utils.getSetting('progress_mode'))
|
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
|
||||||
|
utils.log("scheduled backup was missed, doing it now...")
|
||||||
if(progress_mode == 0):
|
progress_mode = int(utils.getSetting('progress_mode'))
|
||||||
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
|
|
||||||
|
if(progress_mode == 0):
|
||||||
self.doScheduledBackup(progress_mode)
|
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
|
||||||
|
|
||||||
self.setup()
|
self.doScheduledBackup(progress_mode)
|
||||||
|
|
||||||
def setup(self):
|
self.setup()
|
||||||
#scheduler was turned on, find next run time
|
|
||||||
utils.log("scheduler enabled, finding next run time")
|
def setup(self):
|
||||||
self.findNextRun(time.time())
|
#scheduler was turned on, find next run time
|
||||||
|
utils.log("scheduler enabled, finding next run time")
|
||||||
def start(self):
|
self.findNextRun(time.time())
|
||||||
|
|
||||||
#check if a backup should be resumed
|
def start(self):
|
||||||
resumeRestore = self._resumeCheck()
|
|
||||||
|
#display upgrade messages if they exist
|
||||||
if(resumeRestore):
|
if(int(utils.getSetting('upgrade_notes')) < UPGRADE_INT):
|
||||||
restore = XbmcBackup()
|
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30132))
|
||||||
restore.selectRestore(self.restore_point)
|
utils.setSetting('upgrade_notes',str(UPGRADE_INT))
|
||||||
#skip the advanced settings check
|
|
||||||
restore.skipAdvanced()
|
#check if a backup should be resumed
|
||||||
restore.run(XbmcBackup.Restore)
|
resumeRestore = self._resumeCheck()
|
||||||
|
|
||||||
while(not self.monitor.abortRequested()):
|
if(resumeRestore):
|
||||||
|
restore = XbmcBackup()
|
||||||
if(self.enabled == "true"):
|
restore.selectRestore(self.restore_point)
|
||||||
#scheduler is still on
|
#skip the advanced settings check
|
||||||
now = time.time()
|
restore.skipAdvanced()
|
||||||
|
restore.restore()
|
||||||
if(self.next_run <= now):
|
|
||||||
progress_mode = int(utils.getSetting('progress_mode'))
|
while(not self.monitor.abortRequested()):
|
||||||
self.doScheduledBackup(progress_mode)
|
|
||||||
|
if(self.enabled == "true"):
|
||||||
#check if we should shut the computer down
|
#scheduler is still on
|
||||||
if(utils.getSetting("cron_shutdown") == 'true'):
|
now = time.time()
|
||||||
#wait 10 seconds to make sure all backup processes and files are completed
|
|
||||||
time.sleep(10)
|
if(self.next_run <= now):
|
||||||
xbmc.executebuiltin('ShutDown()')
|
progress_mode = int(utils.getSetting('progress_mode'))
|
||||||
else:
|
self.doScheduledBackup(progress_mode)
|
||||||
#find the next run time like normal
|
|
||||||
self.findNextRun(now)
|
#check if we should shut the computer down
|
||||||
|
if(utils.getSetting("cron_shutdown") == 'true'):
|
||||||
xbmc.sleep(500)
|
#wait 10 seconds to make sure all backup processes and files are completed
|
||||||
|
time.sleep(10)
|
||||||
#delete monitor to free up memory
|
xbmc.executebuiltin('ShutDown()')
|
||||||
del self.monitor
|
else:
|
||||||
|
#find the next run time like normal
|
||||||
def doScheduledBackup(self,progress_mode):
|
self.findNextRun(now)
|
||||||
if(progress_mode != 2):
|
|
||||||
utils.showNotification(utils.getString(30053))
|
xbmc.sleep(500)
|
||||||
|
|
||||||
backup = XbmcBackup()
|
#delete monitor to free up memory
|
||||||
|
del self.monitor
|
||||||
if(backup.remoteConfigured()):
|
|
||||||
|
def doScheduledBackup(self,progress_mode):
|
||||||
if(int(utils.getSetting('progress_mode')) in [0,1]):
|
if(progress_mode != 2):
|
||||||
backup.run(XbmcBackup.Backup,True)
|
utils.showNotification(utils.getString(30053))
|
||||||
else:
|
|
||||||
backup.run(XbmcBackup.Backup,False)
|
backup = XbmcBackup()
|
||||||
|
|
||||||
#check if this is a "one-off"
|
if(backup.remoteConfigured()):
|
||||||
if(int(utils.getSetting("schedule_interval")) == 0):
|
|
||||||
#disable the scheduler after this run
|
if(int(utils.getSetting('progress_mode')) in [0,1]):
|
||||||
self.enabled = "false"
|
backup.backup(True)
|
||||||
utils.setSetting('enable_scheduler','false')
|
else:
|
||||||
else:
|
backup.backup(False)
|
||||||
utils.showNotification(utils.getString(30045))
|
|
||||||
|
#check if this is a "one-off"
|
||||||
def findNextRun(self,now):
|
if(int(utils.getSetting("schedule_interval")) == 0):
|
||||||
progress_mode = int(utils.getSetting('progress_mode'))
|
#disable the scheduler after this run
|
||||||
|
self.enabled = "false"
|
||||||
#find the cron expression and get the next run time
|
utils.setSetting('enable_scheduler','false')
|
||||||
cron_exp = self.parseSchedule()
|
else:
|
||||||
|
utils.showNotification(utils.getString(30045))
|
||||||
cron_ob = croniter(cron_exp,datetime.datetime.fromtimestamp(now))
|
|
||||||
new_run_time = cron_ob.get_next(float)
|
def findNextRun(self,now):
|
||||||
|
progress_mode = int(utils.getSetting('progress_mode'))
|
||||||
if(new_run_time != self.next_run):
|
|
||||||
self.next_run = new_run_time
|
#find the cron expression and get the next run time
|
||||||
utils.log("scheduler will run again on " + datetime.datetime.fromtimestamp(self.next_run).strftime('%m-%d-%Y %H:%M'))
|
cron_exp = self.parseSchedule()
|
||||||
|
|
||||||
#write the next time to a file
|
cron_ob = croniter(cron_exp,datetime.fromtimestamp(now))
|
||||||
fh = xbmcvfs.File(self.next_run_path, 'w')
|
new_run_time = cron_ob.get_next(float)
|
||||||
fh.write(str(self.next_run))
|
|
||||||
fh.close()
|
if(new_run_time != self.next_run):
|
||||||
|
self.next_run = new_run_time
|
||||||
#only show when not in silent mode
|
utils.log("scheduler will run again on " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run),['dateshort','time']))
|
||||||
if(progress_mode != 2):
|
|
||||||
utils.showNotification(utils.getString(30081) + " " + datetime.datetime.fromtimestamp(self.next_run).strftime('%m-%d-%Y %H:%M'))
|
#write the next time to a file
|
||||||
|
fh = xbmcvfs.File(self.next_run_path, 'w')
|
||||||
def settingsChanged(self):
|
fh.write(str(self.next_run))
|
||||||
current_enabled = utils.getSetting("enable_scheduler")
|
fh.close()
|
||||||
|
|
||||||
if(current_enabled == "true" and self.enabled == "false"):
|
#only show when not in silent mode
|
||||||
#scheduler was just turned on
|
if(progress_mode != 2):
|
||||||
self.enabled = current_enabled
|
utils.showNotification(utils.getString(30081) + " " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run),['dateshort','time']))
|
||||||
self.setup()
|
|
||||||
elif (current_enabled == "false" and self.enabled == "true"):
|
def settingsChanged(self):
|
||||||
#schedule was turn off
|
current_enabled = utils.getSetting("enable_scheduler")
|
||||||
self.enabled = current_enabled
|
|
||||||
|
if(current_enabled == "true" and self.enabled == "false"):
|
||||||
if(self.enabled == "true"):
|
#scheduler was just turned on
|
||||||
#always recheck the next run time after an update
|
self.enabled = current_enabled
|
||||||
self.findNextRun(time.time())
|
self.setup()
|
||||||
|
elif (current_enabled == "false" and self.enabled == "true"):
|
||||||
def parseSchedule(self):
|
#schedule was turn off
|
||||||
schedule_type = int(utils.getSetting("schedule_interval"))
|
self.enabled = current_enabled
|
||||||
cron_exp = utils.getSetting("cron_schedule")
|
|
||||||
|
if(self.enabled == "true"):
|
||||||
hour_of_day = utils.getSetting("schedule_time")
|
#always recheck the next run time after an update
|
||||||
hour_of_day = int(hour_of_day[0:2])
|
self.findNextRun(time.time())
|
||||||
if(schedule_type == 0 or schedule_type == 1):
|
|
||||||
#every day
|
def parseSchedule(self):
|
||||||
cron_exp = "0 " + str(hour_of_day) + " * * *"
|
schedule_type = int(utils.getSetting("schedule_interval"))
|
||||||
elif(schedule_type == 2):
|
cron_exp = utils.getSetting("cron_schedule")
|
||||||
#once a week
|
|
||||||
day_of_week = utils.getSetting("day_of_week")
|
hour_of_day = utils.getSetting("schedule_time")
|
||||||
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
|
hour_of_day = int(hour_of_day[0:2])
|
||||||
elif(schedule_type == 3):
|
if(schedule_type == 0 or schedule_type == 1):
|
||||||
#first day of month
|
#every day
|
||||||
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
|
cron_exp = "0 " + str(hour_of_day) + " * * *"
|
||||||
|
elif(schedule_type == 2):
|
||||||
return cron_exp
|
#once a week
|
||||||
|
day_of_week = utils.getSetting("day_of_week")
|
||||||
def _resumeCheck(self):
|
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
|
||||||
shouldContinue = False
|
elif(schedule_type == 3):
|
||||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
|
#first day of month
|
||||||
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"),'r')
|
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
|
||||||
self.restore_point = rFile.read()
|
|
||||||
rFile.close()
|
return cron_exp
|
||||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
|
|
||||||
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042),utils.getString(30043),utils.getString(30044))
|
def _resumeCheck(self):
|
||||||
|
shouldContinue = False
|
||||||
return shouldContinue
|
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
|
||||||
|
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"),'r')
|
||||||
|
self.restore_point = rFile.read()
|
||||||
class UpdateMonitor(xbmc.Monitor):
|
rFile.close()
|
||||||
update_method = None
|
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
|
||||||
|
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042),utils.getString(30043),utils.getString(30044))
|
||||||
def __init__(self,*args, **kwargs):
|
|
||||||
xbmc.Monitor.__init__(self)
|
return shouldContinue
|
||||||
self.update_method = kwargs['update_method']
|
|
||||||
|
|
||||||
def onSettingsChanged(self):
|
class UpdateMonitor(xbmc.Monitor):
|
||||||
self.update_method()
|
update_method = None
|
||||||
|
|
||||||
BackupScheduler().start()
|
def __init__(self,*args, **kwargs):
|
||||||
|
xbmc.Monitor.__init__(self)
|
||||||
|
self.update_method = kwargs['update_method']
|
||||||
|
|
||||||
|
def onSettingsChanged(self):
|
||||||
|
self.update_method()
|
||||||
|
|
||||||
|
BackupScheduler().start()
|
||||||
|
|||||||