Compare commits
148 Commits
jarvis-1.1
...
matrix-1.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16e13c7d80 | ||
|
|
dd5b99c978 | ||
|
|
6c99667afa | ||
|
|
6514b3db02 | ||
|
|
88341d9e1f | ||
|
|
95649c2b3f | ||
|
|
3e9de429dd | ||
|
|
db18c6a7b4 | ||
|
|
35e05acaf2 | ||
|
|
92ec8bf25c | ||
|
|
0c79aef4e7 | ||
|
|
fea7dca500 | ||
|
|
f7665c8ddd | ||
|
|
bbbfc3dd84 | ||
|
|
0b03914175 | ||
|
|
51553f7720 | ||
|
|
294683fb43 | ||
|
|
b74c1af704 | ||
|
|
edd4002d3f | ||
|
|
3aa912ca4c | ||
|
|
5c3e1712f6 | ||
|
|
82bdc955b5 | ||
|
|
4f1e5060e9 | ||
|
|
7d895a6028 | ||
|
|
7ede17fbbd | ||
|
|
d32620ea18 | ||
|
|
def99767e8 | ||
|
|
c7a9a8512d | ||
|
|
332afffc5b | ||
|
|
42d0f1b451 | ||
|
|
8d07310980 | ||
|
|
048d016e0e | ||
|
|
c50c5245fc | ||
|
|
e91037208b | ||
|
|
ff2ca53a22 | ||
|
|
1a27b279b0 | ||
|
|
6dfa4a5520 | ||
|
|
a7b9aeb9c1 | ||
|
|
7226178bfb | ||
|
|
f5bd7130e2 | ||
|
|
ae76d24e86 | ||
|
|
4d56331d8f | ||
|
|
45cf9a367d | ||
|
|
d8ceecb168 | ||
|
|
a2d7e8613a | ||
|
|
c0b0fa82cb | ||
|
|
6ac1d3559b | ||
|
|
d93589ecad | ||
|
|
b21c11de26 | ||
|
|
a4bb3f3feb | ||
|
|
1f6324b2d5 | ||
|
|
12b25f7cea | ||
|
|
5d9d8a1820 | ||
|
|
2fdf8d37fe | ||
|
|
aa94060cfe | ||
|
|
b9e0424ea5 | ||
|
|
495ecb1048 | ||
|
|
a1c0c0bbfe | ||
|
|
9f570233d9 | ||
|
|
b38aff2a8e | ||
|
|
456ebe9374 | ||
|
|
30f8b93629 | ||
|
|
94f872fb81 | ||
|
|
8f8402ae8a | ||
|
|
db93e40f59 | ||
|
|
72c77fb33a | ||
|
|
1f0e262c5b | ||
|
|
b75487bb2a | ||
|
|
b34e538d6b | ||
|
|
b5a7aada4c | ||
|
|
1a9c43b998 | ||
|
|
b7f4b14fe2 | ||
|
|
787b054bba | ||
|
|
a7be48a341 | ||
|
|
2fe76b7b52 | ||
|
|
3aed105fd7 | ||
|
|
c9b4554eac | ||
|
|
e736b964a5 | ||
|
|
4c5f6774df | ||
|
|
1f2e315208 | ||
|
|
138f910d07 | ||
|
|
1d3b2f58ab | ||
|
|
865416977d | ||
|
|
68093b2130 | ||
|
|
701a1831bf | ||
|
|
493e0d3a2e | ||
|
|
d87e209226 | ||
|
|
9960e2fc6b | ||
|
|
6aae9d9247 | ||
|
|
004b8dae58 | ||
|
|
6b934ed30c | ||
|
|
e950400222 | ||
|
|
cb2bb8a237 | ||
|
|
eb765c974b | ||
|
|
d18ed2960e | ||
|
|
9f1755686c | ||
|
|
534b3b108f | ||
|
|
4a8b891129 | ||
|
|
49af21a67e | ||
|
|
3ee2cb0414 | ||
|
|
061fd3efed | ||
|
|
76c2fdc0c2 | ||
|
|
2c999b46b9 | ||
|
|
4d891ab551 | ||
|
|
6c33e7c9ba | ||
|
|
f0d8e297a9 | ||
|
|
04ec3bd8a8 | ||
|
|
65ea3c98c4 | ||
|
|
4108f333e2 | ||
|
|
913090637c | ||
|
|
7139b920ad | ||
|
|
48d07c24a0 | ||
|
|
90e4b0c1f4 | ||
|
|
c9415cbf59 | ||
|
|
181654b414 | ||
|
|
71b048418e | ||
|
|
1896a684b0 | ||
|
|
545bd93e8c | ||
|
|
83a01a48bf | ||
|
|
dcc8482d73 | ||
|
|
85306f9469 | ||
|
|
c0d3b01ade | ||
|
|
5c6a8ce91a | ||
|
|
c8f148cd1a | ||
|
|
b6e57b04b1 | ||
|
|
adbf225ea2 | ||
|
|
d9d6c1ed42 | ||
|
|
f5f7bcfcb5 | ||
|
|
4608f04d96 | ||
|
|
e1c8b5a61c | ||
|
|
ada1efb165 | ||
|
|
5a43b5e340 | ||
|
|
33bc84c288 | ||
|
|
8c61616d3c | ||
|
|
622939901e | ||
|
|
756f50bba1 | ||
|
|
5fefbd286d | ||
|
|
5779784e0a | ||
|
|
24f570e888 | ||
|
|
489dcd317f | ||
|
|
216e2f4561 | ||
|
|
c3fe86293d | ||
|
|
20ee7a92ad | ||
|
|
64daaa13e8 | ||
|
|
dda08d04a3 | ||
|
|
469b5ff340 | ||
|
|
ff2f764b2f | ||
|
|
7d51ee05bc |
18
.github/stale-dontuse.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 31
|
||||
# Number of days of inactivity before a stale Issue or Pull Request is closed
|
||||
daysUntilClose: 14
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||
onlyLabels:
|
||||
- waiting for info
|
||||
- wontfix
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: inactive
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as inactive because it has not had
|
||||
recent activity. It will be closed if no further activity occurs.
|
||||
2
.settings/org.eclipse.core.resources.prefs
Normal file
@@ -0,0 +1,2 @@
|
||||
eclipse.preferences.version=1
|
||||
encoding//resources/lib/croniter.py=utf-8
|
||||
14
.travis.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
dist: xenial
|
||||
language: python
|
||||
python: 3.7
|
||||
|
||||
install:
|
||||
- pip install flake8 kodi-addon-checker
|
||||
|
||||
before_script:
|
||||
- git config core.quotepath false
|
||||
|
||||
# command to run our tests
|
||||
script:
|
||||
- flake8 ./ --statistics --show-source --ignore=E501,E722 --exclude=croniter.py,relativedelta.py,*/dropbox/* # check python structure against flake8 tests, ignore long lines
|
||||
- kodi-addon-checker --branch=matrix --allow-folder-id-mismatch
|
||||
@@ -1,6 +1,5 @@
|
||||
# Backup Addon
|
||||
|
||||
__Kodi Version Compatibility:__ Kodi 16.x (Jarvis) and greater
|
||||
 [](https://travis-ci.org/robweber/xbmcbackup) [](https://github.com/robweber/xbmcbackup/blob/master/LICENSE.txt) [](https://www.python.org/dev/peps/pep-0008/)
|
||||
|
||||
## About
|
||||
|
||||
@@ -19,6 +18,11 @@ For more specific information please check out the [wiki on Github](https://gith
|
||||
* [FAQ](https://github.com/robweber/xbmcbackup/wiki/FAQ)
|
||||
|
||||
|
||||
## Attributions
|
||||
|
||||
Icon files from Open Iconic — www.useiconic.com/open
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
34
addon.xml
@@ -1,20 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<addon id="script.xbmcbackup"
|
||||
name="Backup" version="1.1.3" provider-name="robweber">
|
||||
name="Backup" version="1.6.0" provider-name="robweber">
|
||||
<requires>
|
||||
<!-- jarvis -->
|
||||
<import addon="xbmc.python" version="2.24.0"/>
|
||||
<import addon="script.module.httplib2" version="0.8.0" />
|
||||
<import addon="script.module.oauth2client" version="4.1.2" />
|
||||
<import addon="script.module.uritemplate" version="0.6" />
|
||||
<import addon="script.module.yaml" version="3.11"/>
|
||||
<import addon="script.module.googleapi" version="1.6.4" />
|
||||
<import addon="script.module.requests" version="2.9.1" />
|
||||
<import addon="xbmc.python" version="3.0.0"/>
|
||||
<import addon="script.module.dateutil" version="2.8.0" />
|
||||
<import addon="script.module.future" version="0.16.0.4"/>
|
||||
<!-- for dropbox -->
|
||||
<import addon="script.module.requests" version="2.18.4"/>
|
||||
<import addon="script.module.six" version="1.11.0"/>
|
||||
</requires>
|
||||
<extension point="xbmc.python.script" library="default.py">
|
||||
<provides>executable</provides>
|
||||
</extension>
|
||||
<extension point="xbmc.service" library="scheduler.py" start="startup" />
|
||||
<extension point="xbmc.service" library="scheduler.py" />
|
||||
<extension point="xbmc.addon.metadata">
|
||||
<summary lang="ar_SA">إنسخ إحتياطياً قاعده بيانات إكس بى إم سى وملفات اﻹعدادات فى حاله وقوع إنهيار مع إمكانيه اﻹسترجاع</summary>
|
||||
<summary lang="be_BY">Backup and restore your Kodi database and configuration files in the event of a crash or file corruption.</summary>
|
||||
@@ -87,10 +85,18 @@
|
||||
<license>The MIT License</license>
|
||||
<forum>https://forum.kodi.tv/showthread.php?tid=129499</forum>
|
||||
<source>https://github.com/robweber/xbmcbackup</source>
|
||||
<news>Version 1.1.4
|
||||
- added file chunk support for dropbox uploads
|
||||
- fixed settings duplicate ids, thanks aster-anto
|
||||
- added scheduler delay to assist with time sync (rpi mostly)
|
||||
<email></email>
|
||||
<assets>
|
||||
<icon>resources/images/icon.png</icon>
|
||||
<screenshot>resources/images/screenshot1.png</screenshot>
|
||||
<screenshot>resources/images/screenshot2.png</screenshot>
|
||||
<screenshot>resources/images/screenshot3.png</screenshot>
|
||||
<screenshot>resources/images/screenshot4.png</screenshot>
|
||||
</assets>
|
||||
<news>Version 1.5.2
|
||||
- Added script.module.dropbox import as a dependency for Dropbox filesystem
|
||||
- Fixed issue getting xbmcbackup.val file from non-zipped remote directories. Was being copied as though it was a local file so it was failing.
|
||||
- Use linux path separator (/) all the time, Kodi will interpret this correctly on windows. Was causing issues with remote file systems since os.path.sep
|
||||
</news>
|
||||
</extension>
|
||||
</addon>
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import sys
|
||||
import urlparse
|
||||
import xbmcgui
|
||||
import resources.lib.utils as utils
|
||||
from resources.lib.authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
||||
|
||||
def get_params():
|
||||
param = {}
|
||||
try:
|
||||
for i in sys.argv:
|
||||
args = i
|
||||
if(args.startswith('?')):
|
||||
args = args[1:]
|
||||
param.update(dict(urlparse.parse_qsl(args)))
|
||||
except:
|
||||
pass
|
||||
return param
|
||||
|
||||
params = get_params()
|
||||
|
||||
#drobpox
|
||||
if(params['type'] == 'dropbox'):
|
||||
authorizer = DropboxAuthorizer()
|
||||
|
||||
if(authorizer.authorize()):
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30027) + ' ' + utils.getString(30106))
|
||||
else:
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30107) + ' ' + utils.getString(30027))
|
||||
|
||||
#google drive
|
||||
elif(params['type'] == 'google_drive'):
|
||||
authorizer = GoogleDriveAuthorizer()
|
||||
|
||||
if(authorizer.authorize()):
|
||||
xbmcgui.Dialog().ok("Backup",utils.getString(30098) + ' ' + utils.getString(30106))
|
||||
else:
|
||||
xbmcgui.Dialog().ok("Backup",utils.getString(30107) + ' ' + utils.getString(30098))
|
||||
398
changelog.md
Normal file
@@ -0,0 +1,398 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||
|
||||
## [Version 1.6.0](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.2...robweber:matrix-1.6.0) - 2019-11-26
|
||||
|
||||
### Added
|
||||
|
||||
- added new badges for Kodi Version, TravisCI and license information from shields.io
|
||||
- dependency on script.module.dateutil for relativedelta.py class
|
||||
- add Dropbox library back in v 9.4.0, for Python 3 compatibility
|
||||
|
||||
### Changed
|
||||
|
||||
- addon.xml updated to use Leia specific syntax and library imports
|
||||
- removed specific encode() calls per Python2/3 compatibility
|
||||
- call isdigit() method on the string directly instead of str.isdigit() (results in unicode error)
|
||||
- added flake8 testing to travis-ci
|
||||
- updated code to make python3 compatible
|
||||
- updated code for pep9 styling
|
||||
- use setArt() to set ListItem icons as the icon= constructor is deprecated
|
||||
|
||||
### Removed
|
||||
|
||||
- removed need for urlparse library
|
||||
- Removed GoogleDrive support - issues with python 3 compatibility
|
||||
- removed script.module.dropbox dependency, need version 9+ and it isn't in the Kodi repo yet
|
||||
- removed relativedelta.py, use the dateutil module for this
|
||||
|
||||
## [Version 1.5.2](https://github.com/robweber/xbmcbackup/compare/krypton-1.5.1...robweber:krypton-1.5.2) - 2019-09-30
|
||||
|
||||
### Added
|
||||
|
||||
- Updated Changelog format to the one suggested by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||
- Added script.module.dropbox import as a dependency for Dropbox filesystem
|
||||
|
||||
### Changed
|
||||
|
||||
- Fixed issue getting xbmcbackup.val file from non-zipped remote directories. Was being copied as though it was a local file so it was failing.
|
||||
- Use linux path separator (/) all the time, Kodi will interpret this correctly on windows. Was causing issues with remote file systems since os.path.sep
|
||||
- Fixed minor python code style changes based on kodi-addon-checker output
|
||||
|
||||
### Removed
|
||||
|
||||
- files releated to dropbox library, using script.module.dropbox import now
|
||||
|
||||
## Version 1.5.1 - 2019-09-10
|
||||
|
||||
### Changed
|
||||
- Fixed guisettings restores not working - thanks Bluerayx
|
||||
|
||||
## Version 1.5.0 - 2019-08-26
|
||||
|
||||
### Added
|
||||
- Added new Advanced file editor and file selection based on a .json
|
||||
|
||||
### Removed
|
||||
- File backups and restores will not work with old version - breaking change with previous versions PR117
|
||||
|
||||
## Version 1.1.3 - 2017-12-29
|
||||
|
||||
### Added
|
||||
- added file chunk support for Dropbox uploads
|
||||
- added scheduler delay to assist with time sync (rpi mostly), will delay startup by 2 min
|
||||
|
||||
### Changed
|
||||
- fixed settings duplicate ids, thanks aster-anto
|
||||
|
||||
## Version 1.1.2
|
||||
|
||||
### Added
|
||||
- Fixes to the Dropbox lib for python 2.6
|
||||
|
||||
## Version 1.1.1
|
||||
|
||||
### Added
|
||||
- added ability to "catchup" on missed scheduled backup
|
||||
|
||||
### Changed
|
||||
- fixed error on authorizers (missing secret/key)
|
||||
- updated google oauth and client versions
|
||||
- merged in dropbox v2 library code
|
||||
|
||||
## Version 1.1.0
|
||||
|
||||
### Added
|
||||
- added tinyurl generation for oauth urls
|
||||
|
||||
### Changed
|
||||
- moved authorize to settings area for cloud storage
|
||||
|
||||
## Version 1.0.9
|
||||
|
||||
### Changed
|
||||
- fixed dropbox rest.py for Python 2.6 - thanks koying!
|
||||
|
||||
## Version 1.0.8
|
||||
|
||||
### Changed
|
||||
- updated dropbox api
|
||||
|
||||
## Version 1.0.7
|
||||
|
||||
### Changed
|
||||
- updated google client api version
|
||||
|
||||
## Version 1.0.6
|
||||
|
||||
### Added
|
||||
|
||||
- added progress for zip extraction - hopefully helps with extract errors
|
||||
|
||||
### Changed
|
||||
- fix for custom directories not working recursively
|
||||
|
||||
## Version 1.0.5
|
||||
|
||||
### Added
|
||||
- added google drive support
|
||||
- added settings dialog option - thanks ed_davidson
|
||||
|
||||
### Changed
|
||||
- make compression setting compatible with python 2.6 and above
|
||||
- fix for growing backups - thanks brokeh
|
||||
|
||||
## Version 1.0.4
|
||||
|
||||
### Added
|
||||
- exit if we can't delete the old archive, non recoverable
|
||||
|
||||
## Version 1.0.3
|
||||
|
||||
### Added
|
||||
- added "delete auth" dialog to delete oauth files in settings
|
||||
|
||||
## Version 1.0.2
|
||||
|
||||
### Changed
|
||||
- updated xbmc.python version to 2.19.0 - should be helix only
|
||||
|
||||
## Version 1.0.0
|
||||
|
||||
### Changed
|
||||
- rebranded as "Backup"
|
||||
- removed XBMC references and replaced with Kodi
|
||||
- tweaked file walking for Helix
|
||||
|
||||
## Version 0.5.9
|
||||
|
||||
### Added
|
||||
|
||||
- create restored version of guisettings for easy local restoration
|
||||
|
||||
### Changed
|
||||
- fixed dropbox unicode error
|
||||
|
||||
## Version 0.5.8.7
|
||||
|
||||
### Added
|
||||
- allow limited updating of guisettings file through json
|
||||
|
||||
## Version 0.5.8.6
|
||||
|
||||
### Added
|
||||
- show notification if some files failed
|
||||
- check if destination is writeable - thanks war59312
|
||||
|
||||
## Version 0.5.8.5
|
||||
|
||||
### Added
|
||||
- added custom library nodes to config backup options - thanks Ned Scott
|
||||
|
||||
## Version 0.5.8.4
|
||||
|
||||
### Changed
|
||||
- backup compression should use zip64 as sizes may be over 2GB
|
||||
- need to expand out path -bugfix
|
||||
|
||||
## Version 0.5.8
|
||||
|
||||
- fixes path substitution errors
|
||||
|
||||
## Version 0.5.7
|
||||
|
||||
- added option to compress backups, uses local source for staging the
|
||||
zip before sending to remote
|
||||
|
||||
## Version 0.5.6
|
||||
|
||||
- fix dropbox delete recursion error - thanks durd updated language
|
||||
files
|
||||
|
||||
## Version 0.5.5
|
||||
|
||||
- fix for dropbox errors during repeated file upload attempts
|
||||
|
||||
## Version 0.5.4
|
||||
|
||||
- check xbmc version when doing a restore
|
||||
|
||||
## Version 0.5.3
|
||||
|
||||
- updated python version
|
||||
|
||||
## Version 0.5.2
|
||||
|
||||
- added additional script and window parameters, thanks Samu-rai
|
||||
- critical error in backup rotation
|
||||
- updated progress bar display
|
||||
|
||||
## Version 0.5.1
|
||||
|
||||
- updated for new Gotham xbmc python updates
|
||||
|
||||
## Version 0.5.0
|
||||
|
||||
- New Version for Gotham
|
||||
|
||||
## Version 0.4.6
|
||||
|
||||
- modified backup folder names to include time, also modified display
|
||||
listing
|
||||
|
||||
## Version 0.4.5
|
||||
|
||||
- added version info to logs
|
||||
- added try/catch for unicode errors
|
||||
|
||||
## Version 0.4.4
|
||||
|
||||
- modified the check for invalid file types
|
||||
|
||||
## Version 0.4.3
|
||||
|
||||
- added error message if remote directory is blank
|
||||
- added license tag
|
||||
|
||||
## Version 0.4.2
|
||||
|
||||
- Added support for userdata/profiles folder - thanks TUSSFC
|
||||
|
||||
## Version 0.4.1
|
||||
|
||||
- added encode() around notifications
|
||||
|
||||
## Version 0.4.0
|
||||
|
||||
- fixed settings display error - thanks zer04c
|
||||
|
||||
## Version 0.3.9
|
||||
|
||||
- added "just once" scheduler for one-off type backups
|
||||
- show notification on scheduler
|
||||
- update updated language files from Transifex
|
||||
|
||||
## Version 0.3.8
|
||||
|
||||
- added advancedsettings check on restore. prompts user to restore only this file and restart xbmc to continue. This fixes issues where path substitution was not working during restores - thanks ctrlbru
|
||||
|
||||
## [Version 0.3.7]
|
||||
|
||||
- added optional addon.xml tags
|
||||
- update language files from Transifex
|
||||
|
||||
## Version 0.3.6
|
||||
|
||||
- added up to 2 custom directories, can be toggled on/off
|
||||
- added a check for backup verification before rotation - no more
|
||||
deleting non backup related files
|
||||
- use monitor class for onSettingsChanged method
|
||||
|
||||
## Version 0.3.5
|
||||
|
||||
- test of custom directories - only 1 at the moment
|
||||
|
||||
## Version 0.3.4
|
||||
|
||||
- added ability to take parameters via RunScript() or
|
||||
JSONRPC.Addons.ExecuteAddon()
|
||||
|
||||
## Version 0.3.3
|
||||
|
||||
- updated xbmc python version (2.1.0)
|
||||
|
||||
## Version 0.3.2
|
||||
|
||||
- added settings for user provided Dropbox key and secret
|
||||
|
||||
## Version 0.3.1
|
||||
|
||||
- added try/except for multiple character encodings
|
||||
- remove token.txt file if Dropbox Authorization is revoked
|
||||
- can shutdown xbmc after scheduled backup
|
||||
|
||||
## Version 0.3.0
|
||||
|
||||
- major vfs rewrite
|
||||
- Added Dropbox as storage target
|
||||
- updated gui/removed settings - thanks SFX Group for idea!
|
||||
|
||||
## Version 0.2.3
|
||||
|
||||
- first official frodo build
|
||||
|
||||
## Version 0.2.2
|
||||
|
||||
- fix for backup rotation sort
|
||||
|
||||
## Version 0.2.1
|
||||
|
||||
- added ability to rotate backups, keeping a set number of days
|
||||
|
||||
## Version 0.2.0
|
||||
|
||||
- removed the vfs.py helper library
|
||||
- default.py file now uses xbmcvfs python library exclusively for
|
||||
listing directories and copy operations
|
||||
|
||||
## Version 0.1.7
|
||||
|
||||
- minor bug fixes and translations updates
|
||||
|
||||
## Version 0.1.6
|
||||
|
||||
- merged scheduler branch with master, can now schedule backups on an
|
||||
interval
|
||||
|
||||
## Version 0.1.5
|
||||
|
||||
- pulled xbmcbackup class into separate library
|
||||
|
||||
## Version 0.1.4
|
||||
|
||||
- added more verbose error message for incorrect paths
|
||||
|
||||
## Version 0.1.3
|
||||
|
||||
- backup folder format - thanks zeroram
|
||||
- added German translations - thanks dersphere
|
||||
- removed need for separate verbose logging setting
|
||||
- updated utf-8 encoding for all logging
|
||||
- backup now uses date as folder name, restore allows user to type date
|
||||
of last backup
|
||||
|
||||
## Version 0.1.2
|
||||
|
||||
- added French language translation - thanks mikebzh44
|
||||
- added some utf-8 encoding tags to filenames
|
||||
|
||||
## Version 0.1.1
|
||||
|
||||
- added check for key in vfs.py - Thanks Martijn!
|
||||
|
||||
## Version 0.1.0
|
||||
|
||||
- removed transparency from icon.png
|
||||
|
||||
## Version 0.0.9
|
||||
|
||||
- modified vfs.py again to filter out xsp files (smart playlists).
|
||||
Created running list for these types of compressed files
|
||||
- added enable/disable logging toggle in settings
|
||||
|
||||
## Version 0.0.8
|
||||
|
||||
- modified vfs.py script to exclude handling zip files as directories,
|
||||
added keymap and peripheral data folders in the "config" section
|
||||
|
||||
## Version 0.0.7
|
||||
|
||||
- removed "restore.txt" file and now write file listing to memory list
|
||||
instead
|
||||
|
||||
## Version 0.0.6
|
||||
|
||||
- Added the vfs module created by paddycarey
|
||||
- File Selection is now followed for both backup and restore options
|
||||
|
||||
## Version 0.0.5
|
||||
|
||||
- Added option to manually type a path rather than browse for one (only
|
||||
one used)
|
||||
- Show progress bar right away so you know this is doing something
|
||||
|
||||
## Version 0.0.4
|
||||
|
||||
- Finished code for restore mode.
|
||||
|
||||
## Version 0.0.3
|
||||
|
||||
- Added progress bar and "silent" option for running on startup or as a
|
||||
script
|
||||
|
||||
## Version 0.0.2
|
||||
|
||||
- First version, should backup directories as needed
|
||||
314
changelog.txt
@@ -1,314 +0,0 @@
|
||||
Version 1.1.3
|
||||
|
||||
added file chunk support for dropbox uploads
|
||||
fixed settings duplicate ids, thanks aster-anto
|
||||
added scheduler delay to assist with time sync (rpi mostly)
|
||||
|
||||
Version 1.1.2
|
||||
|
||||
added fixes to the Dropbox lib for python 2.6
|
||||
|
||||
Version 1.1.1
|
||||
|
||||
fixed error on authorizers (missing secret/key)
|
||||
added ability to "catchup" on missed scheduled backup
|
||||
updated google oauth and client versions
|
||||
merged in dropbox v2 code
|
||||
|
||||
Version 1.1.0
|
||||
|
||||
added tinyurl for oauth urls
|
||||
moved authorize to settings area for cloud storage
|
||||
bug fixes
|
||||
|
||||
Version 1.0.9
|
||||
|
||||
fixed dropbox rest.py for Python 2.6 - thanks koying!
|
||||
|
||||
Version 1.0.8
|
||||
|
||||
bug fixes
|
||||
updated dropbox api
|
||||
|
||||
Version 1.0.7
|
||||
|
||||
updated google client api version
|
||||
|
||||
Version 1.0.6
|
||||
|
||||
fix for custom directories not working recursively
|
||||
added progress for zip extraction - hopefully helps with extract errors
|
||||
|
||||
Version 1.0.5
|
||||
|
||||
added google drive support
|
||||
make compression setting compatible with python 2.6 and above
|
||||
added settings dialog option - thanks ed_davidson
|
||||
fix for growing backups - thanks brokeh
|
||||
|
||||
Version 1.0.4
|
||||
|
||||
exit if we can't delete the old archive, non recoverable
|
||||
|
||||
Version 1.0.3
|
||||
|
||||
added "delete auth" dialog to delete oauth files in settings
|
||||
|
||||
Version 1.0.2
|
||||
|
||||
updated xbmc.python version to 2.19.0 - should be helix only
|
||||
|
||||
Version 1.0.0
|
||||
|
||||
rebranded as "Backup"
|
||||
removed XBMC references and replaced with Kodi
|
||||
tweaked file walking for Helix
|
||||
|
||||
Version 0.5.9
|
||||
|
||||
fixed dropbox unicode error
|
||||
create restored version of guisettings for easy local restoration
|
||||
|
||||
Version 0.5.8.7
|
||||
|
||||
allow limited updating of guisettings file through json
|
||||
|
||||
Version 0.5.8.6
|
||||
|
||||
show notification if some files failed
|
||||
check if destination is writeable - thanks war59312
|
||||
|
||||
Version 0.5.8.5
|
||||
|
||||
added custom library nodes to config backup options - thanks Ned Scott
|
||||
|
||||
Version 0.5.8.4
|
||||
|
||||
backup compression should use zip64 as sizes may be over 2GB
|
||||
need to expand out path -bugfix
|
||||
|
||||
Version 0.5.8
|
||||
|
||||
fixes path substitution errors
|
||||
|
||||
Version 0.5.7
|
||||
|
||||
added option to compress backups, uses local source for staging the zip before sending to remote
|
||||
|
||||
Version 0.5.6
|
||||
|
||||
fix dropbox delete recursion error - thanks durd
|
||||
updated language files
|
||||
|
||||
Version 0.5.5
|
||||
|
||||
fix for dropbox errors during repeated file upload attempts
|
||||
|
||||
Version 0.5.4
|
||||
|
||||
check xbmc version when doing a restore
|
||||
|
||||
Version 0.5.3
|
||||
|
||||
updated python version
|
||||
|
||||
Version 0.5.2
|
||||
|
||||
added additional script and window parameters, thanks Samu-rai
|
||||
|
||||
critical error in backup rotation
|
||||
|
||||
updated progress bar display
|
||||
|
||||
Version 0.5.1
|
||||
|
||||
updated for new Gotham xbmc python updates
|
||||
|
||||
Version 0.5.0
|
||||
|
||||
New Version for Gotham
|
||||
|
||||
Version 0.4.6
|
||||
|
||||
modified backup folder names to include time, also modified display listing
|
||||
|
||||
Version 0.4.5
|
||||
|
||||
added version info to logs
|
||||
|
||||
added try/catch for unicode errors
|
||||
|
||||
Version 0.4.4
|
||||
|
||||
modified the check for invalid file types
|
||||
|
||||
Version 0.4.3
|
||||
|
||||
added error message if remote directory is blank
|
||||
|
||||
added license tag
|
||||
|
||||
Version 0.4.2
|
||||
|
||||
Added support for userdata/profiles folder - thanks TUSSFC
|
||||
|
||||
Version 0.4.1
|
||||
|
||||
added encode() around notifications
|
||||
|
||||
Version 0.4.0
|
||||
|
||||
fixed settings display error - thanks zer04c
|
||||
|
||||
Version 0.3.9
|
||||
|
||||
added "just once" scheduler for one-off type backups
|
||||
show notification on scheduler update
|
||||
updated language files from Transifex
|
||||
|
||||
Version 0.3.8
|
||||
|
||||
added advancedsettings check on restore. prompts user to restore only this file and restart xbmc to continue. This fixes issues where path substitution was not working during restores - thanks ctrlbru
|
||||
|
||||
Version 0.3.7
|
||||
|
||||
added optional addon.xml tags
|
||||
|
||||
update language files from Transifex
|
||||
|
||||
Version 0.3.6
|
||||
|
||||
added up to 2 custom directories, can be toggled on/off
|
||||
|
||||
added a check for backup verification before rotation - no more deleting non backup related files
|
||||
|
||||
use monitor class for onSettingsChanged method
|
||||
|
||||
Version 0.3.5
|
||||
|
||||
test of custom directories - only 1 at the moment
|
||||
|
||||
Version 0.3.4
|
||||
|
||||
added ability to take parameters via RunScript() or JSONRPC.Addons.ExecuteAddon()
|
||||
|
||||
Version 0.3.3
|
||||
|
||||
updated xbmc python version (2.1.0)
|
||||
|
||||
Version 0.3.2
|
||||
|
||||
added settings for user provided Dropbox key and secret
|
||||
|
||||
Version 0.3.1
|
||||
|
||||
added try/except for multiple character encodings
|
||||
|
||||
remove token.txt file if Dropbox Authorization is revoked
|
||||
|
||||
can shutdown xbmc after scheduled backup
|
||||
|
||||
Version 0.3.0
|
||||
|
||||
major vfs rewrite
|
||||
|
||||
Added Dropbox as storage target
|
||||
|
||||
updated gui/removed settings - thanks SFX Group for idea!
|
||||
|
||||
Version 0.2.3
|
||||
|
||||
first official frodo build
|
||||
|
||||
Version 0.2.2
|
||||
|
||||
fix for backup rotation sort
|
||||
|
||||
Version 0.2.1
|
||||
|
||||
added ability to rotate backups, keeping a set number of days
|
||||
|
||||
Version 0.2.0
|
||||
|
||||
removed the vfs.py helper library
|
||||
|
||||
default.py file now uses xbmcvfs python library exclusively for listing directories and copy operations
|
||||
|
||||
Version 0.1.7
|
||||
|
||||
minor bug fixes and translations updates
|
||||
|
||||
Version 0.1.6
|
||||
|
||||
merged scheduler branch with master, can now schedule backups on an interval
|
||||
|
||||
Version 0.1.5
|
||||
|
||||
pulled xbmcbackup class into separate library
|
||||
|
||||
Version 0.1.4
|
||||
|
||||
added more verbose error message for incorrect paths
|
||||
|
||||
Version 0.1.3
|
||||
|
||||
backup folder format - thanks zeroram
|
||||
|
||||
added German translations - thanks dersphere
|
||||
|
||||
removed need for separate verbose logging setting
|
||||
|
||||
updated utf-8 encoding for all logging
|
||||
|
||||
backup now uses date as folder name, restore allows user to type date of last backup
|
||||
|
||||
Version 0.1.2
|
||||
|
||||
added French language translation - thanks mikebzh44
|
||||
|
||||
added some utf-8 encoding tags to filenames
|
||||
|
||||
Version 0.1.1
|
||||
|
||||
added check for key in vfs.py - Thanks Martijn!
|
||||
|
||||
Version 0.1.0
|
||||
|
||||
removed transparency from icon.png
|
||||
|
||||
Version 0.0.9
|
||||
|
||||
modified vfs.py again to filter out xsp files (smart playlists). Created running list for these types of compressed files
|
||||
|
||||
added enable/disable logging toggle in settings
|
||||
|
||||
Version 0.0.8
|
||||
|
||||
modified vfs.py script to exclude handling zip files as directories, added keymap and peripheral data folders in the "config" section
|
||||
|
||||
Version 0.0.7
|
||||
|
||||
removed "restore.txt" file and now write file listing to memory list instead
|
||||
|
||||
Version 0.0.6
|
||||
|
||||
Added the vfs module created by paddycarey
|
||||
File Selection is now followed for both backup and restore options
|
||||
|
||||
Version 0.0.5
|
||||
|
||||
Added option to manually type a path rather than browse for one (only one used)
|
||||
Show progress bar right away so you know this is doing something
|
||||
|
||||
Version 0.0.4
|
||||
|
||||
Finished code for restore mode.
|
||||
|
||||
Version 0.0.3
|
||||
|
||||
Added progress bar and "silent" option for running on startup or as a script
|
||||
|
||||
Version 0.0.2
|
||||
|
||||
First version, should backup directories as needed
|
||||
|
||||
175
default.py
@@ -1,77 +1,98 @@
|
||||
import urlparse
|
||||
import xbmcgui
|
||||
import resources.lib.utils as utils
|
||||
from resources.lib.backup import XbmcBackup
|
||||
|
||||
def get_params():
|
||||
param = {}
|
||||
|
||||
if(len(sys.argv) > 1):
|
||||
for i in sys.argv:
|
||||
args = i
|
||||
if(args.startswith('?')):
|
||||
args = args[1:]
|
||||
param.update(dict(urlparse.parse_qsl(args)))
|
||||
|
||||
return param
|
||||
|
||||
#the program mode
|
||||
mode = -1
|
||||
params = get_params()
|
||||
|
||||
|
||||
if("mode" in params):
|
||||
if(params['mode'] == 'backup'):
|
||||
mode = 0
|
||||
elif(params['mode'] == 'restore'):
|
||||
mode = 1
|
||||
|
||||
#if mode wasn't passed in as arg, get from user
|
||||
if(mode == -1):
|
||||
#figure out if this is a backup or a restore from the user
|
||||
mode = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30023),[utils.getString(30016),utils.getString(30017),utils.getString(30099)])
|
||||
|
||||
#check if program should be run
|
||||
if(mode != -1):
|
||||
#run the profile backup
|
||||
backup = XbmcBackup()
|
||||
|
||||
if(mode == 2):
|
||||
#open the settings dialog
|
||||
utils.openSettings()
|
||||
|
||||
elif(backup.remoteConfigured()):
|
||||
|
||||
if(mode == backup.Restore):
|
||||
#get list of valid restore points
|
||||
restorePoints = backup.listBackups()
|
||||
pointNames = []
|
||||
folderNames = []
|
||||
|
||||
for aDir in restorePoints:
|
||||
pointNames.append(aDir[1])
|
||||
folderNames.append(aDir[0])
|
||||
|
||||
selectedRestore = -1
|
||||
|
||||
if("archive" in params):
|
||||
#check that the user give archive exists
|
||||
if(params['archive'] in folderNames):
|
||||
#set the index
|
||||
selectedRestore = folderNames.index(params['archive'])
|
||||
utils.log(str(selectedRestore) + " : " + params['archive'])
|
||||
else:
|
||||
utils.showNotification(utils.getString(30045))
|
||||
utils.log(params['archive'] + ' is not a valid restore point')
|
||||
else:
|
||||
#allow user to select the backup to restore from
|
||||
selectedRestore = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30021),pointNames)
|
||||
|
||||
if(selectedRestore != -1):
|
||||
backup.selectRestore(restorePoints[selectedRestore][0])
|
||||
|
||||
backup.run(mode)
|
||||
else:
|
||||
#can't go any further
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30045))
|
||||
utils.openSettings()
|
||||
import sys
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
import resources.lib.utils as utils
|
||||
from resources.lib.backup import XbmcBackup
|
||||
|
||||
|
||||
def get_params():
|
||||
param = {}
|
||||
try:
|
||||
for i in sys.argv:
|
||||
args = i
|
||||
if('=' in args):
|
||||
if(args.startswith('?')):
|
||||
args = args[1:] # legacy in case of url params
|
||||
splitString = args.split('=')
|
||||
param[splitString[0]] = splitString[1]
|
||||
except:
|
||||
pass
|
||||
|
||||
return param
|
||||
|
||||
|
||||
# the program mode
|
||||
mode = -1
|
||||
params = get_params()
|
||||
|
||||
|
||||
if("mode" in params):
|
||||
if(params['mode'] == 'backup'):
|
||||
mode = 0
|
||||
elif(params['mode'] == 'restore'):
|
||||
mode = 1
|
||||
|
||||
|
||||
# if mode wasn't passed in as arg, get from user
|
||||
if(mode == -1):
|
||||
# by default, Backup,Restore,Open Settings
|
||||
options = [utils.getString(30016), utils.getString(30017), utils.getString(30099)]
|
||||
|
||||
# find out if we're using the advanced editor
|
||||
if(int(utils.getSetting('backup_selection_type')) == 1):
|
||||
options.append(utils.getString(30125))
|
||||
|
||||
# figure out if this is a backup or a restore from the user
|
||||
mode = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30023), options)
|
||||
|
||||
# check if program should be run
|
||||
if(mode != -1):
|
||||
# run the profile backup
|
||||
backup = XbmcBackup()
|
||||
|
||||
if(mode == 2):
|
||||
# open the settings dialog
|
||||
utils.openSettings()
|
||||
elif(mode == 3 and int(utils.getSetting('backup_selection_type')) == 1):
|
||||
# open the advanced editor
|
||||
xbmc.executebuiltin('RunScript(special://home/addons/script.xbmcbackup/launcher.py, action=advanced_editor)')
|
||||
elif(backup.remoteConfigured()):
|
||||
|
||||
if(mode == backup.Restore):
|
||||
# get list of valid restore points
|
||||
restorePoints = backup.listBackups()
|
||||
pointNames = []
|
||||
folderNames = []
|
||||
|
||||
for aDir in restorePoints:
|
||||
pointNames.append(aDir[1])
|
||||
folderNames.append(aDir[0])
|
||||
|
||||
selectedRestore = -1
|
||||
|
||||
if("archive" in params):
|
||||
# check that the user give archive exists
|
||||
if(params['archive'] in folderNames):
|
||||
# set the index
|
||||
selectedRestore = folderNames.index(params['archive'])
|
||||
utils.log(str(selectedRestore) + " : " + params['archive'])
|
||||
else:
|
||||
utils.showNotification(utils.getString(30045))
|
||||
utils.log(params['archive'] + ' is not a valid restore point')
|
||||
else:
|
||||
# allow user to select the backup to restore from
|
||||
selectedRestore = xbmcgui.Dialog().select(utils.getString(30010) + " - " + utils.getString(30021), pointNames)
|
||||
|
||||
if(selectedRestore != -1):
|
||||
backup.selectRestore(restorePoints[selectedRestore][0])
|
||||
|
||||
if('sets' in params):
|
||||
backup.restore(selectedSets=params['sets'].split('|'))
|
||||
else:
|
||||
backup.restore()
|
||||
else:
|
||||
backup.backup()
|
||||
else:
|
||||
# can't go any further
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30045))
|
||||
utils.openSettings()
|
||||
|
||||
59
launcher.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# launcher for various helpful functions found in the settings.xml area
|
||||
import sys
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
import xbmcvfs
|
||||
import resources.lib.utils as utils
|
||||
from resources.lib.authorizers import DropboxAuthorizer
|
||||
from resources.lib.advanced_editor import AdvancedBackupEditor
|
||||
|
||||
|
||||
def authorize_cloud(cloudProvider):
|
||||
# drobpox
|
||||
if(cloudProvider == 'dropbox'):
|
||||
authorizer = DropboxAuthorizer()
|
||||
|
||||
if(authorizer.authorize()):
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30027) + ' ' + utils.getString(30106))
|
||||
else:
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30107) + ' ' + utils.getString(30027))
|
||||
|
||||
|
||||
def remove_auth():
|
||||
# triggered from settings.xml - asks if user wants to delete OAuth token information
|
||||
shouldDelete = xbmcgui.Dialog().yesno(utils.getString(30093), utils.getString(30094), utils.getString(30095), autoclose=7000)
|
||||
|
||||
if(shouldDelete):
|
||||
# delete any of the known token file types
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt")) # dropbox
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "google_drive.dat")) # google drive
|
||||
|
||||
|
||||
def get_params():
|
||||
param = {}
|
||||
try:
|
||||
for i in sys.argv:
|
||||
args = i
|
||||
if('=' in args):
|
||||
if(args.startswith('?')):
|
||||
args = args[1:] # legacy in case of url params
|
||||
splitString = args.split('=')
|
||||
param[splitString[0]] = splitString[1]
|
||||
except:
|
||||
pass
|
||||
|
||||
return param
|
||||
|
||||
|
||||
params = get_params()
|
||||
|
||||
if(params['action'] == 'authorize_cloud'):
|
||||
authorize_cloud(params['provider'])
|
||||
elif(params['action'] == 'remove_auth'):
|
||||
remove_auth()
|
||||
elif(params['action'] == 'advanced_editor'):
|
||||
editor = AdvancedBackupEditor()
|
||||
editor.showMainScreen()
|
||||
elif(params['action'] == 'advanced_copy_config'):
|
||||
editor = AdvancedBackupEditor()
|
||||
editor.copySimpleConfig()
|
||||
@@ -1,13 +0,0 @@
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
import xbmcvfs
|
||||
import resources.lib.utils as utils
|
||||
|
||||
#triggered from settings.xml - asks if user wants to delete OAuth token information
|
||||
shouldDelete = xbmcgui.Dialog().yesno(utils.getString(30093),utils.getString(30094),utils.getString(30095),autoclose=7000)
|
||||
|
||||
if(shouldDelete):
|
||||
#delete any of the known token file types
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt")) #dropbox
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "google_drive.dat")) #google drive
|
||||
|
||||
105
resources/data/default_files.json
Normal file
@@ -0,0 +1,105 @@
|
||||
{
|
||||
"addons":{
|
||||
"root":"special://home/addons/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/addons/",
|
||||
"recurse":true
|
||||
},
|
||||
{
|
||||
"type":"exclude",
|
||||
"path":"special://home/addons/packages/"
|
||||
},
|
||||
{
|
||||
"type":"exclude",
|
||||
"path":"special://home/addons/temp/"
|
||||
}
|
||||
]
|
||||
},
|
||||
"addon_data":{
|
||||
"root":"special://home/userdata/addon_data/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/addon_data/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
},
|
||||
"database":{
|
||||
"root":"special://home/userdata/Database/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/Database/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
},
|
||||
"game_saves":{
|
||||
"root":"special://home/userdata/Savestates/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/Savestates/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
},
|
||||
"playlists":{
|
||||
"root":"special://home/userdata/playlists/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/playlists/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
},
|
||||
"profiles":{
|
||||
"root":"special://home/userdata/profiles/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/profiles/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
},
|
||||
"thumbnails":{
|
||||
"root":"special://home/userdata/Thumbnails/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/Thumbnails/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
},
|
||||
"config":{
|
||||
"root":"special://home/userdata/",
|
||||
"dirs":[
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/",
|
||||
"recurse":false
|
||||
},
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/keymaps/",
|
||||
"recurse":true
|
||||
},
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/peripheral_data/",
|
||||
"recurse":true
|
||||
},
|
||||
{
|
||||
"type":"include",
|
||||
"path":"special://home/userdata/library/",
|
||||
"recurse":true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
BIN
resources/images/folder-icon.png
Normal file
|
After Width: | Height: | Size: 226 B |
|
Before Width: | Height: | Size: 8.9 KiB After Width: | Height: | Size: 8.9 KiB |
BIN
resources/images/plus-icon.png
Normal file
|
After Width: | Height: | Size: 196 B |
BIN
resources/images/screenshot1.PNG
Normal file
|
After Width: | Height: | Size: 125 KiB |
BIN
resources/images/screenshot1.png
Normal file
|
After Width: | Height: | Size: 125 KiB |
BIN
resources/images/screenshot2.PNG
Normal file
|
After Width: | Height: | Size: 129 KiB |
BIN
resources/images/screenshot2.png
Normal file
|
After Width: | Height: | Size: 129 KiB |
BIN
resources/images/screenshot3.PNG
Normal file
|
After Width: | Height: | Size: 270 KiB |
BIN
resources/images/screenshot3.png
Normal file
|
After Width: | Height: | Size: 270 KiB |
BIN
resources/images/screenshot4.PNG
Normal file
|
After Width: | Height: | Size: 150 KiB |
BIN
resources/images/screenshot4.png
Normal file
|
After Width: | Height: | Size: 150 KiB |
BIN
resources/images/screenshot5.png
Normal file
|
After Width: | Height: | Size: 102 KiB |
BIN
resources/images/screenshot6.png
Normal file
|
After Width: | Height: | Size: 76 KiB |
@@ -48,6 +48,14 @@ msgctxt "#30013"
|
||||
msgid "Scheduling"
|
||||
msgstr "Scheduling"
|
||||
|
||||
msgctxt "#30014"
|
||||
msgid "Simple"
|
||||
msgstr "Simple"
|
||||
|
||||
msgctxt "#30015"
|
||||
msgid "Advanced"
|
||||
msgstr "Advanced"
|
||||
|
||||
msgctxt "#30016"
|
||||
msgid "Backup"
|
||||
msgstr "Backup"
|
||||
@@ -129,12 +137,12 @@ msgid "Config Files"
|
||||
msgstr "Config Files"
|
||||
|
||||
msgctxt "#30036"
|
||||
msgid "Custom Directory 1"
|
||||
msgstr "Custom Directory 1"
|
||||
msgid "Disclaimer"
|
||||
msgstr "Disclaimer"
|
||||
|
||||
msgctxt "#30037"
|
||||
msgid "Custom Directory 2"
|
||||
msgstr "Custom Directory 2"
|
||||
msgid "Canceling this menu will close and save changes"
|
||||
msgstr "Canceling this menu will close and save changes"
|
||||
|
||||
msgctxt "#30038"
|
||||
msgid "Advanced Settings Detected"
|
||||
@@ -420,3 +428,131 @@ msgstr "Visit https://console.developers.google.com/"
|
||||
msgctxt "#30109"
|
||||
msgid "Run on startup if missed"
|
||||
msgstr "Run on startup if missed"
|
||||
|
||||
msgctxt "#30110"
|
||||
msgid "Set Name"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30111"
|
||||
msgid "Root folder selection"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30112"
|
||||
msgid "Browse Folder"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30113"
|
||||
msgid "Enter Own"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30114"
|
||||
msgid "starts in Kodi home"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30115"
|
||||
msgid "enter path to start there"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30116"
|
||||
msgid "Enter root path"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30117"
|
||||
msgid "Path Error"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30118"
|
||||
msgid "Path does not exist"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30119"
|
||||
msgid "Select root"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30120"
|
||||
msgid "Add Exclude Folder"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30121"
|
||||
msgid "Root Folder"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30122"
|
||||
msgid "Edit"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30123"
|
||||
msgid "Delete"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30124"
|
||||
msgid "Choose Action"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30125"
|
||||
msgid "Advanced Editor"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30126"
|
||||
msgid "Add Set"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30127"
|
||||
msgid "Delete Set"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30128"
|
||||
msgid "Are you sure you want to delete?"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30129"
|
||||
msgid "Exclude"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30130"
|
||||
msgid "The root folder cannot be changed"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30131"
|
||||
msgid "Choose Sets to Restore"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30132"
|
||||
msgid "Version 1.5.0 requires you to setup your file selections again - this is a breaking change"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30133"
|
||||
msgid "Game Saves"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30134"
|
||||
msgid "Include"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30135"
|
||||
msgid "Add Include Folder"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30136"
|
||||
msgid "Path must be within root folder"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30137"
|
||||
msgid "This path is part of a rule already"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30138"
|
||||
msgid "Set Name exists already"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30139"
|
||||
msgid "Copy Simple Config"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30140"
|
||||
msgid "This will copy the default Simple file selection to the Advanced Editor"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30141"
|
||||
msgid "This will erase any current Advanced Editor settings"
|
||||
msgstr ""
|
||||
@@ -129,14 +129,6 @@ msgctxt "#30035"
|
||||
msgid "Config Files"
|
||||
msgstr "Config Files"
|
||||
|
||||
msgctxt "#30036"
|
||||
msgid "Custom Directory 1"
|
||||
msgstr "Custom Directory 1"
|
||||
|
||||
msgctxt "#30037"
|
||||
msgid "Custom Directory 2"
|
||||
msgstr "Custom Directory 2"
|
||||
|
||||
msgctxt "#30038"
|
||||
msgid "Advanced Settings Detected"
|
||||
msgstr "Advanced Settings Detected"
|
||||
|
||||
2
resources/lib/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
|
||||
*.pyo
|
||||
232
resources/lib/advanced_editor.py
Normal file
@@ -0,0 +1,232 @@
|
||||
import json
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
import xbmcvfs
|
||||
from . import utils as utils
|
||||
|
||||
|
||||
class BackupSetManager:
|
||||
jsonFile = xbmc.translatePath(utils.data_dir() + "custom_paths.json")
|
||||
paths = None
|
||||
|
||||
def __init__(self):
|
||||
self.paths = {}
|
||||
|
||||
# try and read in the custom file
|
||||
self._readFile()
|
||||
|
||||
def addSet(self, aSet):
|
||||
self.paths[aSet['name']] = {'root': aSet['root'], 'dirs': [{"type": "include", "path": aSet['root'], 'recurse': True}]}
|
||||
|
||||
# save the file
|
||||
self._writeFile()
|
||||
|
||||
def updateSet(self, name, aSet):
|
||||
self.paths[name] = aSet
|
||||
|
||||
# save the file
|
||||
self._writeFile()
|
||||
|
||||
def deleteSet(self, index):
|
||||
# match the index to a key
|
||||
keys = self.getSets()
|
||||
|
||||
# delete this set
|
||||
del self.paths[keys[index]]
|
||||
|
||||
# save the file
|
||||
self._writeFile()
|
||||
|
||||
def getSets(self):
|
||||
# list all current sets by name
|
||||
keys = list(self.paths.keys())
|
||||
keys.sort()
|
||||
|
||||
return keys
|
||||
|
||||
def getSet(self, index):
|
||||
keys = self.getSets()
|
||||
|
||||
# return the set at this index
|
||||
return {'name': keys[index], 'set': self.paths[keys[index]]}
|
||||
|
||||
def validateSetName(self, name):
|
||||
return (name not in self.getSets())
|
||||
|
||||
def _writeFile(self):
|
||||
# create the custom file
|
||||
aFile = xbmcvfs.File(self.jsonFile, 'w')
|
||||
aFile.write(json.dumps(self.paths))
|
||||
aFile.close()
|
||||
|
||||
def _readFile(self):
|
||||
|
||||
if(xbmcvfs.exists(self.jsonFile)):
|
||||
|
||||
# read in the custom file
|
||||
aFile = xbmcvfs.File(self.jsonFile)
|
||||
|
||||
# load custom dirs
|
||||
self.paths = json.loads(aFile.read())
|
||||
aFile.close()
|
||||
else:
|
||||
# write a blank file
|
||||
self._writeFile()
|
||||
|
||||
|
||||
class AdvancedBackupEditor:
|
||||
dialog = None
|
||||
|
||||
def __init__(self):
|
||||
self.dialog = xbmcgui.Dialog()
|
||||
|
||||
def _cleanPath(self, root, path):
|
||||
return path[len(root) - 1:]
|
||||
|
||||
def _validatePath(self, root, path):
|
||||
return path.startswith(root)
|
||||
|
||||
def createSet(self):
|
||||
backupSet = None
|
||||
|
||||
name = self.dialog.input(utils.getString(30110), defaultt='Backup Set')
|
||||
|
||||
if(name is not None):
|
||||
|
||||
# give a choice to start in home or enter a root path
|
||||
enterHome = self.dialog.yesno(utils.getString(30111), line1=utils.getString(30112) + " - " + utils.getString(30114), line2=utils.getString(30113) + " - " + utils.getString(30115), nolabel=utils.getString(30112), yeslabel=utils.getString(30113))
|
||||
|
||||
rootFolder = 'special://home'
|
||||
if(enterHome):
|
||||
rootFolder = self.dialog.input(utils.getString(30116), defaultt=rootFolder)
|
||||
|
||||
# direcotry has to end in slash
|
||||
if(rootFolder[:-1] != '/'):
|
||||
rootFolder = rootFolder + '/'
|
||||
|
||||
# check that this path even exists
|
||||
if(not xbmcvfs.exists(xbmc.translatePath(rootFolder))):
|
||||
self.dialog.ok(utils.getString(30117), utils.getString(30118), rootFolder)
|
||||
return None
|
||||
else:
|
||||
# select path to start set
|
||||
rootFolder = self.dialog.browse(type=0, heading=utils.getString(30119), shares='files', defaultt=rootFolder)
|
||||
|
||||
backupSet = {'name': name, 'root': rootFolder}
|
||||
|
||||
return backupSet
|
||||
|
||||
def editSet(self, name, backupSet):
|
||||
optionSelected = ''
|
||||
rootPath = backupSet['root']
|
||||
|
||||
while(optionSelected != -1):
|
||||
options = [xbmcgui.ListItem(utils.getString(30120), "Exclude a specific folder from this backup set"), xbmcgui.ListItem(utils.getString(30135), "Include a specific folder to this backup set"), xbmcgui.ListItem(rootPath, utils.getString(30121))]
|
||||
|
||||
for aDir in backupSet['dirs']:
|
||||
if(aDir['type'] == 'exclude'):
|
||||
options.append(xbmcgui.ListItem(self._cleanPath(rootPath, aDir['path']), "%s: %s" % ("Type", utils.getString(30129))))
|
||||
elif(aDir['type'] == 'include'):
|
||||
options.append(xbmcgui.ListItem(self._cleanPath(rootPath, aDir['path']), "%s: %s | %s: %s" % ("Type", utils.getString(30134), "Include Sub Folders", str(aDir['recurse']))))
|
||||
|
||||
optionSelected = self.dialog.select(utils.getString(30122) + ' ' + name, options, useDetails=True)
|
||||
|
||||
if(optionSelected == 0 or optionSelected == 1):
|
||||
# add a folder, will equal root if cancel is hit
|
||||
addFolder = self.dialog.browse(type=0, heading=utils.getString(30120), shares='files', defaultt=backupSet['root'])
|
||||
|
||||
if(addFolder.startswith(rootPath)):
|
||||
|
||||
if(not any(addFolder == aDir['path'] for aDir in backupSet['dirs'])):
|
||||
# cannot add root as an exclusion
|
||||
if(optionSelected == 0 and addFolder != backupSet['root']):
|
||||
backupSet['dirs'].append({"path": addFolder, "type": "exclude"})
|
||||
elif(optionSelected == 1):
|
||||
# can add root as inclusion
|
||||
backupSet['dirs'].append({"path": addFolder, "type": "include", "recurse": True})
|
||||
else:
|
||||
# this path is already part of another include/exclude rule
|
||||
self.dialog.ok(utils.getString(30117), utils.getString(30137), addFolder)
|
||||
else:
|
||||
# folder must be under root folder
|
||||
self.dialog.ok(utils.getString(30117), utils.getString(30136), rootPath)
|
||||
elif(optionSelected == 2):
|
||||
self.dialog.ok(utils.getString(30121), utils.getString(30130), backupSet['root'])
|
||||
elif(optionSelected > 2):
|
||||
|
||||
cOptions = ['Delete']
|
||||
if(backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
||||
cOptions.append('Toggle Sub Folders')
|
||||
|
||||
contextOption = self.dialog.contextmenu(cOptions)
|
||||
|
||||
if(contextOption == 0):
|
||||
if(self.dialog.yesno(heading=utils.getString(30123), line1=utils.getString(30128))):
|
||||
# remove folder
|
||||
del backupSet['dirs'][optionSelected - 3]
|
||||
elif(contextOption == 1 and backupSet['dirs'][optionSelected - 3]['type'] == 'include'):
|
||||
# toggle if this folder should be recursive
|
||||
backupSet['dirs'][optionSelected - 3]['recurse'] = not backupSet['dirs'][optionSelected - 3]['recurse']
|
||||
|
||||
return backupSet
|
||||
|
||||
def showMainScreen(self):
|
||||
exitCondition = ""
|
||||
customPaths = BackupSetManager()
|
||||
|
||||
# show this every time
|
||||
self.dialog.ok(utils.getString(30036), utils.getString(30037))
|
||||
|
||||
while(exitCondition != -1):
|
||||
# load the custom paths
|
||||
listItem = xbmcgui.ListItem(utils.getString(30126), '')
|
||||
listItem.setArt({'icon': utils.addon_dir() + 'resources/images/plus-icon.png'})
|
||||
options = [listItem]
|
||||
|
||||
for index in range(0, len(customPaths.getSets())):
|
||||
aSet = customPaths.getSet(index)
|
||||
|
||||
listItem = xbmcgui.ListItem(aSet['name'], utils.getString(30121) + ': ' + aSet['set']['root'])
|
||||
listItem.setArt({'icon': utils.addon_dir() + 'resources/images/folder-icon.png'})
|
||||
options.append(listItem)
|
||||
|
||||
# show the gui
|
||||
exitCondition = self.dialog.select(utils.getString(30125), options, useDetails=True)
|
||||
|
||||
if(exitCondition >= 0):
|
||||
if(exitCondition == 0):
|
||||
newSet = self.createSet()
|
||||
|
||||
# check that the name is unique
|
||||
if(customPaths.validateSetName(newSet['name'])):
|
||||
customPaths.addSet(newSet)
|
||||
else:
|
||||
self.dialog.ok(utils.getString(30117), utils.getString(30138), newSet['name'])
|
||||
else:
|
||||
# bring up a context menu
|
||||
menuOption = self.dialog.contextmenu([utils.getString(30122), utils.getString(30123)])
|
||||
|
||||
if(menuOption == 0):
|
||||
# get the set
|
||||
aSet = customPaths.getSet(exitCondition - 1)
|
||||
|
||||
# edit the set
|
||||
updatedSet = self.editSet(aSet['name'], aSet['set'])
|
||||
|
||||
# save it
|
||||
customPaths.updateSet(aSet['name'], updatedSet)
|
||||
|
||||
elif(menuOption == 1):
|
||||
if(self.dialog.yesno(heading=utils.getString(30127), line1=utils.getString(30128))):
|
||||
# delete this path - subtract one because of "add" item
|
||||
customPaths.deleteSet(exitCondition - 1)
|
||||
|
||||
def copySimpleConfig(self):
|
||||
# disclaimer in case the user hit this on accident
|
||||
shouldContinue = self.dialog.yesno(utils.getString(30139), utils.getString(30140), utils.getString(30141))
|
||||
|
||||
if(shouldContinue):
|
||||
source = xbmc.translatePath(utils.addon_dir() + "/resources/data/default_files.json")
|
||||
dest = xbmc.translatePath(utils.data_dir() + "/custom_paths.json")
|
||||
|
||||
xbmcvfs.copy(source, dest)
|
||||
@@ -3,57 +3,61 @@ import xbmcgui
|
||||
import xbmcvfs
|
||||
import resources.lib.tinyurl as tinyurl
|
||||
import resources.lib.utils as utils
|
||||
import dropbox
|
||||
from resources.lib.pydrive.auth import GoogleAuth
|
||||
from resources.lib.pydrive.drive import GoogleDrive
|
||||
|
||||
# don't die on import error yet, these might not even get used
|
||||
try:
|
||||
from . import dropbox
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class DropboxAuthorizer:
|
||||
APP_KEY = ""
|
||||
APP_SECRET = ""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.APP_KEY = utils.getSetting('dropbox_key')
|
||||
self.APP_SECRET = utils.getSetting('dropbox_secret')
|
||||
|
||||
def setup(self):
|
||||
result = True
|
||||
|
||||
|
||||
if(self.APP_KEY == '' and self.APP_SECRET == ''):
|
||||
#we can't go any farther, need these for sure
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30027) + ' ' + utils.getString(30058),utils.getString(30059))
|
||||
# we can't go any farther, need these for sure
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30027) + ' ' + utils.getString(30058), utils.getString(30059))
|
||||
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
return result
|
||||
|
||||
def isAuthorized(self):
|
||||
user_token = self._getToken()
|
||||
|
||||
return user_token != ''
|
||||
return user_token != ''
|
||||
|
||||
def authorize(self):
|
||||
result = True
|
||||
|
||||
if(not self.setup()):
|
||||
return False
|
||||
|
||||
|
||||
if(self.isAuthorized()):
|
||||
#delete the token to start over
|
||||
# delete the token to start over
|
||||
self._deleteToken()
|
||||
|
||||
#copied flow from http://dropbox-sdk-python.readthedocs.io/en/latest/moduledoc.html#dropbox.oauth.DropboxOAuth2FlowNoRedirect
|
||||
flow = dropbox.oauth.DropboxOAuth2FlowNoRedirect(self.APP_KEY,self.APP_SECRET)
|
||||
# copied flow from http://dropbox-sdk-python.readthedocs.io/en/latest/moduledoc.html#dropbox.oauth.DropboxOAuth2FlowNoRedirect
|
||||
flow = dropbox.oauth.DropboxOAuth2FlowNoRedirect(self.APP_KEY, self.APP_SECRET)
|
||||
|
||||
url = flow.start()
|
||||
|
||||
#print url in log
|
||||
# print url in log
|
||||
utils.log("Authorize URL: " + url)
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30056),utils.getString(30057),tinyurl.shorten(url))
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30056), utils.getString(30057), tinyurl.shorten(url))
|
||||
|
||||
#get the auth code
|
||||
# get the auth code
|
||||
code = xbmcgui.Dialog().input(utils.getString(30027) + ' ' + utils.getString(30103))
|
||||
|
||||
#if user authorized this will work
|
||||
|
||||
# if user authorized this will work
|
||||
|
||||
try:
|
||||
user_token = flow.finish(code)
|
||||
@@ -61,36 +65,36 @@ class DropboxAuthorizer:
|
||||
except Exception as e:
|
||||
utils.log("Error: %s" % (e,))
|
||||
result = False
|
||||
|
||||
return result;
|
||||
|
||||
#return the DropboxClient, or None if can't be created
|
||||
return result
|
||||
|
||||
# return the DropboxClient, or None if can't be created
|
||||
def getClient(self):
|
||||
result = None
|
||||
|
||||
user_token = self._getToken()
|
||||
|
||||
if(user_token != ''):
|
||||
#create the client
|
||||
# create the client
|
||||
result = dropbox.Dropbox(user_token)
|
||||
|
||||
try:
|
||||
result.users_get_current_account()
|
||||
except:
|
||||
#this didn't work, delete the token file
|
||||
# this didn't work, delete the token file
|
||||
self._deleteToken()
|
||||
result = None
|
||||
|
||||
|
||||
return result
|
||||
|
||||
def _setToken(self,token):
|
||||
#write the token files
|
||||
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"),'w')
|
||||
def _setToken(self, token):
|
||||
# write the token files
|
||||
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"), 'w')
|
||||
token_file.write(token)
|
||||
token_file.close()
|
||||
|
||||
def _getToken(self):
|
||||
#get token, if it exists
|
||||
# get token, if it exists
|
||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "tokens.txt"))):
|
||||
token_file = open(xbmc.translatePath(utils.data_dir() + "tokens.txt"))
|
||||
token = token_file.read()
|
||||
@@ -99,66 +103,7 @@ class DropboxAuthorizer:
|
||||
return token
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def _deleteToken(self):
|
||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "tokens.txt"))):
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "tokens.txt"))
|
||||
|
||||
class GoogleDriveAuthorizer:
|
||||
CLIENT_ID = ''
|
||||
CLIENT_SECRET = ''
|
||||
|
||||
def __init__(self):
|
||||
self.CLIENT_ID = utils.getSetting('google_drive_id')
|
||||
self.CLIENT_SECRET = utils.getSetting('google_drive_secret')
|
||||
|
||||
def setup(self):
|
||||
result = True
|
||||
|
||||
if(self.CLIENT_ID == '' and self.CLIENT_SECRET == ''):
|
||||
#we can't go any farther, need these for sure
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30098) + ' ' + utils.getString(30058),utils.getString(30108))
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def isAuthorized(self):
|
||||
return xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "google_drive.dat"))
|
||||
|
||||
def authorize(self):
|
||||
result = True
|
||||
|
||||
if(not self.setup()):
|
||||
return False
|
||||
|
||||
#create authorization helper and load default settings
|
||||
gauth = GoogleAuth(xbmc.validatePath(xbmc.translatePath(utils.addon_dir() + '/resources/lib/pydrive/settings.yaml')))
|
||||
gauth.LoadClientConfigSettings()
|
||||
|
||||
settings = {"client_id":self.CLIENT_ID,'client_secret':self.CLIENT_SECRET}
|
||||
|
||||
drive_url = gauth.GetAuthUrl(settings)
|
||||
|
||||
utils.log("Google Drive Authorize URL: " + drive_url)
|
||||
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30056),utils.getString(30102),tinyurl.shorten(drive_url))
|
||||
code = xbmcgui.Dialog().input(utils.getString(30098) + ' ' + utils.getString(30103))
|
||||
|
||||
gauth.Auth(code)
|
||||
gauth.SaveCredentialsFile(xbmc.validatePath(xbmc.translatePath(utils.data_dir() + 'google_drive.dat')))
|
||||
|
||||
return result
|
||||
|
||||
def getClient(self):
|
||||
#create authorization helper and load default settings
|
||||
gauth = GoogleAuth(xbmc.validatePath(xbmc.translatePath(utils.addon_dir() + '/resources/lib/pydrive/settings.yaml')))
|
||||
gauth.LoadClientConfigSettings()
|
||||
|
||||
gauth.LoadCredentialsFile(xbmc.validatePath(xbmc.translatePath(utils.data_dir() + 'google_drive.dat')))
|
||||
|
||||
result = GoogleDrive(gauth)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
|
||||
import re
|
||||
from time import time, mktime
|
||||
from datetime import datetime, date
|
||||
from .relativedelta import relativedelta
|
||||
from datetime import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
search_re = re.compile(r'^([^-]+)-([^-/]+)(/(.*))?$')
|
||||
only_int_re = re.compile(r'^\d+$')
|
||||
|
||||
@@ -1,310 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
class LaunchResultBase(bb.Union):
|
||||
"""
|
||||
Result returned by methods that launch an asynchronous job. A method who may
|
||||
either launch an asynchronous job, or complete the request synchronously,
|
||||
can use this union by extending it, and adding a 'complete' field with the
|
||||
type of the synchronous response. See :class:`LaunchEmptyResult` for an
|
||||
example.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar str async_job_id: This response indicates that the processing is
|
||||
asynchronous. The string is an id that can be used to obtain the status
|
||||
of the asynchronous job.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
|
||||
@classmethod
|
||||
def async_job_id(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``async_job_id`` tag with
|
||||
value ``val``.
|
||||
|
||||
:param str val:
|
||||
:rtype: LaunchResultBase
|
||||
"""
|
||||
return cls('async_job_id', val)
|
||||
|
||||
def is_async_job_id(self):
|
||||
"""
|
||||
Check if the union tag is ``async_job_id``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'async_job_id'
|
||||
|
||||
def get_async_job_id(self):
|
||||
"""
|
||||
This response indicates that the processing is asynchronous. The string
|
||||
is an id that can be used to obtain the status of the asynchronous job.
|
||||
|
||||
Only call this if :meth:`is_async_job_id` is true.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if not self.is_async_job_id():
|
||||
raise AttributeError("tag 'async_job_id' not set")
|
||||
return self._value
|
||||
|
||||
def __repr__(self):
|
||||
return 'LaunchResultBase(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
LaunchResultBase_validator = bv.Union(LaunchResultBase)
|
||||
|
||||
class LaunchEmptyResult(LaunchResultBase):
|
||||
"""
|
||||
Result returned by methods that may either launch an asynchronous job or
|
||||
complete synchronously. Upon synchronous completion of the job, no
|
||||
additional information is returned.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar complete: The job finished synchronously and successfully.
|
||||
"""
|
||||
|
||||
# Attribute is overwritten below the class definition
|
||||
complete = None
|
||||
|
||||
def is_complete(self):
|
||||
"""
|
||||
Check if the union tag is ``complete``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'complete'
|
||||
|
||||
def __repr__(self):
|
||||
return 'LaunchEmptyResult(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
LaunchEmptyResult_validator = bv.Union(LaunchEmptyResult)
|
||||
|
||||
class PollArg(object):
|
||||
"""
|
||||
Arguments for methods that poll the status of an asynchronous job.
|
||||
|
||||
:ivar async_job_id: Id of the asynchronous job. This is the value of a
|
||||
response returned from the method that launched the job.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_async_job_id_value',
|
||||
'_async_job_id_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
async_job_id=None):
|
||||
self._async_job_id_value = None
|
||||
self._async_job_id_present = False
|
||||
if async_job_id is not None:
|
||||
self.async_job_id = async_job_id
|
||||
|
||||
@property
|
||||
def async_job_id(self):
|
||||
"""
|
||||
Id of the asynchronous job. This is the value of a response returned
|
||||
from the method that launched the job.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._async_job_id_present:
|
||||
return self._async_job_id_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'async_job_id'")
|
||||
|
||||
@async_job_id.setter
|
||||
def async_job_id(self, val):
|
||||
val = self._async_job_id_validator.validate(val)
|
||||
self._async_job_id_value = val
|
||||
self._async_job_id_present = True
|
||||
|
||||
@async_job_id.deleter
|
||||
def async_job_id(self):
|
||||
self._async_job_id_value = None
|
||||
self._async_job_id_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollArg(async_job_id={!r})'.format(
|
||||
self._async_job_id_value,
|
||||
)
|
||||
|
||||
PollArg_validator = bv.Struct(PollArg)
|
||||
|
||||
class PollResultBase(bb.Union):
|
||||
"""
|
||||
Result returned by methods that poll for the status of an asynchronous job.
|
||||
Unions that extend this union should add a 'complete' field with a type of
|
||||
the information returned upon job completion. See :class:`PollEmptyResult`
|
||||
for an example.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar in_progress: The asynchronous job is still in progress.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
# Attribute is overwritten below the class definition
|
||||
in_progress = None
|
||||
|
||||
def is_in_progress(self):
|
||||
"""
|
||||
Check if the union tag is ``in_progress``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'in_progress'
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollResultBase(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PollResultBase_validator = bv.Union(PollResultBase)
|
||||
|
||||
class PollEmptyResult(PollResultBase):
|
||||
"""
|
||||
Result returned by methods that poll for the status of an asynchronous job.
|
||||
Upon completion of the job, no additional information is returned.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar complete: The asynchronous job has completed successfully.
|
||||
"""
|
||||
|
||||
# Attribute is overwritten below the class definition
|
||||
complete = None
|
||||
|
||||
def is_complete(self):
|
||||
"""
|
||||
Check if the union tag is ``complete``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'complete'
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollEmptyResult(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PollEmptyResult_validator = bv.Union(PollEmptyResult)
|
||||
|
||||
class PollError(bb.Union):
|
||||
"""
|
||||
Error returned by methods for polling the status of asynchronous job.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar invalid_async_job_id: The job ID is invalid.
|
||||
:ivar internal_error: Something went wrong with the job on Dropbox's end.
|
||||
You'll need to verify that the action you were taking succeeded, and if
|
||||
not, try again. This should happen very rarely.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
invalid_async_job_id = None
|
||||
# Attribute is overwritten below the class definition
|
||||
internal_error = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_invalid_async_job_id(self):
|
||||
"""
|
||||
Check if the union tag is ``invalid_async_job_id``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'invalid_async_job_id'
|
||||
|
||||
def is_internal_error(self):
|
||||
"""
|
||||
Check if the union tag is ``internal_error``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'internal_error'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PollError_validator = bv.Union(PollError)
|
||||
|
||||
AsyncJobId_validator = bv.String(min_length=1)
|
||||
LaunchResultBase._async_job_id_validator = AsyncJobId_validator
|
||||
LaunchResultBase._tagmap = {
|
||||
'async_job_id': LaunchResultBase._async_job_id_validator,
|
||||
}
|
||||
|
||||
LaunchEmptyResult._complete_validator = bv.Void()
|
||||
LaunchEmptyResult._tagmap = {
|
||||
'complete': LaunchEmptyResult._complete_validator,
|
||||
}
|
||||
LaunchEmptyResult._tagmap.update(LaunchResultBase._tagmap)
|
||||
|
||||
LaunchEmptyResult.complete = LaunchEmptyResult('complete')
|
||||
|
||||
PollArg._async_job_id_validator = AsyncJobId_validator
|
||||
PollArg._all_field_names_ = set(['async_job_id'])
|
||||
PollArg._all_fields_ = [('async_job_id', PollArg._async_job_id_validator)]
|
||||
|
||||
PollResultBase._in_progress_validator = bv.Void()
|
||||
PollResultBase._tagmap = {
|
||||
'in_progress': PollResultBase._in_progress_validator,
|
||||
}
|
||||
|
||||
PollResultBase.in_progress = PollResultBase('in_progress')
|
||||
|
||||
PollEmptyResult._complete_validator = bv.Void()
|
||||
PollEmptyResult._tagmap = {
|
||||
'complete': PollEmptyResult._complete_validator,
|
||||
}
|
||||
PollEmptyResult._tagmap.update(PollResultBase._tagmap)
|
||||
|
||||
PollEmptyResult.complete = PollEmptyResult('complete')
|
||||
|
||||
PollError._invalid_async_job_id_validator = bv.Void()
|
||||
PollError._internal_error_validator = bv.Void()
|
||||
PollError._other_validator = bv.Void()
|
||||
PollError._tagmap = {
|
||||
'invalid_async_job_id': PollError._invalid_async_job_id_validator,
|
||||
'internal_error': PollError._internal_error_validator,
|
||||
'other': PollError._other_validator,
|
||||
}
|
||||
|
||||
PollError.invalid_async_job_id = PollError('invalid_async_job_id')
|
||||
PollError.internal_error = PollError('internal_error')
|
||||
PollError.other = PollError('other')
|
||||
|
||||
ROUTES = {
|
||||
}
|
||||
|
||||
# If you have issues importing this module because Python recognizes it as a keyword, use async_ instead.
|
||||
from .async_ import *
|
||||
|
||||
332
resources/lib/dropbox/async_.py
Normal file
@@ -0,0 +1,332 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
class LaunchResultBase(bb.Union):
|
||||
"""
|
||||
Result returned by methods that launch an asynchronous job. A method who may
|
||||
either launch an asynchronous job, or complete the request synchronously,
|
||||
can use this union by extending it, and adding a 'complete' field with the
|
||||
type of the synchronous response. See :class:`LaunchEmptyResult` for an
|
||||
example.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar str async.LaunchResultBase.async_job_id: This response indicates that
|
||||
the processing is asynchronous. The string is an id that can be used to
|
||||
obtain the status of the asynchronous job.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
|
||||
@classmethod
|
||||
def async_job_id(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``async_job_id`` tag with
|
||||
value ``val``.
|
||||
|
||||
:param str val:
|
||||
:rtype: LaunchResultBase
|
||||
"""
|
||||
return cls('async_job_id', val)
|
||||
|
||||
def is_async_job_id(self):
|
||||
"""
|
||||
Check if the union tag is ``async_job_id``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'async_job_id'
|
||||
|
||||
def get_async_job_id(self):
|
||||
"""
|
||||
This response indicates that the processing is asynchronous. The string
|
||||
is an id that can be used to obtain the status of the asynchronous job.
|
||||
|
||||
Only call this if :meth:`is_async_job_id` is true.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if not self.is_async_job_id():
|
||||
raise AttributeError("tag 'async_job_id' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(LaunchResultBase, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'LaunchResultBase(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
LaunchResultBase_validator = bv.Union(LaunchResultBase)
|
||||
|
||||
class LaunchEmptyResult(LaunchResultBase):
|
||||
"""
|
||||
Result returned by methods that may either launch an asynchronous job or
|
||||
complete synchronously. Upon synchronous completion of the job, no
|
||||
additional information is returned.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar async.LaunchEmptyResult.complete: The job finished synchronously and
|
||||
successfully.
|
||||
"""
|
||||
|
||||
# Attribute is overwritten below the class definition
|
||||
complete = None
|
||||
|
||||
def is_complete(self):
|
||||
"""
|
||||
Check if the union tag is ``complete``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'complete'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(LaunchEmptyResult, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'LaunchEmptyResult(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
LaunchEmptyResult_validator = bv.Union(LaunchEmptyResult)
|
||||
|
||||
class PollArg(bb.Struct):
|
||||
"""
|
||||
Arguments for methods that poll the status of an asynchronous job.
|
||||
|
||||
:ivar async.PollArg.async_job_id: Id of the asynchronous job. This is the
|
||||
value of a response returned from the method that launched the job.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_async_job_id_value',
|
||||
'_async_job_id_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
async_job_id=None):
|
||||
self._async_job_id_value = None
|
||||
self._async_job_id_present = False
|
||||
if async_job_id is not None:
|
||||
self.async_job_id = async_job_id
|
||||
|
||||
@property
|
||||
def async_job_id(self):
|
||||
"""
|
||||
Id of the asynchronous job. This is the value of a response returned
|
||||
from the method that launched the job.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._async_job_id_present:
|
||||
return self._async_job_id_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'async_job_id'")
|
||||
|
||||
@async_job_id.setter
|
||||
def async_job_id(self, val):
|
||||
val = self._async_job_id_validator.validate(val)
|
||||
self._async_job_id_value = val
|
||||
self._async_job_id_present = True
|
||||
|
||||
@async_job_id.deleter
|
||||
def async_job_id(self):
|
||||
self._async_job_id_value = None
|
||||
self._async_job_id_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PollArg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollArg(async_job_id={!r})'.format(
|
||||
self._async_job_id_value,
|
||||
)
|
||||
|
||||
PollArg_validator = bv.Struct(PollArg)
|
||||
|
||||
class PollResultBase(bb.Union):
|
||||
"""
|
||||
Result returned by methods that poll for the status of an asynchronous job.
|
||||
Unions that extend this union should add a 'complete' field with a type of
|
||||
the information returned upon job completion. See :class:`PollEmptyResult`
|
||||
for an example.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar async.PollResultBase.in_progress: The asynchronous job is still in
|
||||
progress.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
# Attribute is overwritten below the class definition
|
||||
in_progress = None
|
||||
|
||||
def is_in_progress(self):
|
||||
"""
|
||||
Check if the union tag is ``in_progress``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'in_progress'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PollResultBase, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollResultBase(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PollResultBase_validator = bv.Union(PollResultBase)
|
||||
|
||||
class PollEmptyResult(PollResultBase):
|
||||
"""
|
||||
Result returned by methods that poll for the status of an asynchronous job.
|
||||
Upon completion of the job, no additional information is returned.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar async.PollEmptyResult.complete: The asynchronous job has completed
|
||||
successfully.
|
||||
"""
|
||||
|
||||
# Attribute is overwritten below the class definition
|
||||
complete = None
|
||||
|
||||
def is_complete(self):
|
||||
"""
|
||||
Check if the union tag is ``complete``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'complete'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PollEmptyResult, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollEmptyResult(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PollEmptyResult_validator = bv.Union(PollEmptyResult)
|
||||
|
||||
class PollError(bb.Union):
|
||||
"""
|
||||
Error returned by methods for polling the status of asynchronous job.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar async.PollError.invalid_async_job_id: The job ID is invalid.
|
||||
:ivar async.PollError.internal_error: Something went wrong with the job on
|
||||
Dropbox's end. You'll need to verify that the action you were taking
|
||||
succeeded, and if not, try again. This should happen very rarely.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
invalid_async_job_id = None
|
||||
# Attribute is overwritten below the class definition
|
||||
internal_error = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_invalid_async_job_id(self):
|
||||
"""
|
||||
Check if the union tag is ``invalid_async_job_id``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'invalid_async_job_id'
|
||||
|
||||
def is_internal_error(self):
|
||||
"""
|
||||
Check if the union tag is ``internal_error``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'internal_error'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PollError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PollError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PollError_validator = bv.Union(PollError)
|
||||
|
||||
AsyncJobId_validator = bv.String(min_length=1)
|
||||
LaunchResultBase._async_job_id_validator = AsyncJobId_validator
|
||||
LaunchResultBase._tagmap = {
|
||||
'async_job_id': LaunchResultBase._async_job_id_validator,
|
||||
}
|
||||
|
||||
LaunchEmptyResult._complete_validator = bv.Void()
|
||||
LaunchEmptyResult._tagmap = {
|
||||
'complete': LaunchEmptyResult._complete_validator,
|
||||
}
|
||||
LaunchEmptyResult._tagmap.update(LaunchResultBase._tagmap)
|
||||
|
||||
LaunchEmptyResult.complete = LaunchEmptyResult('complete')
|
||||
|
||||
PollArg._async_job_id_validator = AsyncJobId_validator
|
||||
PollArg._all_field_names_ = set(['async_job_id'])
|
||||
PollArg._all_fields_ = [('async_job_id', PollArg._async_job_id_validator)]
|
||||
|
||||
PollResultBase._in_progress_validator = bv.Void()
|
||||
PollResultBase._tagmap = {
|
||||
'in_progress': PollResultBase._in_progress_validator,
|
||||
}
|
||||
|
||||
PollResultBase.in_progress = PollResultBase('in_progress')
|
||||
|
||||
PollEmptyResult._complete_validator = bv.Void()
|
||||
PollEmptyResult._tagmap = {
|
||||
'complete': PollEmptyResult._complete_validator,
|
||||
}
|
||||
PollEmptyResult._tagmap.update(PollResultBase._tagmap)
|
||||
|
||||
PollEmptyResult.complete = PollEmptyResult('complete')
|
||||
|
||||
PollError._invalid_async_job_id_validator = bv.Void()
|
||||
PollError._internal_error_validator = bv.Void()
|
||||
PollError._other_validator = bv.Void()
|
||||
PollError._tagmap = {
|
||||
'invalid_async_job_id': PollError._invalid_async_job_id_validator,
|
||||
'internal_error': PollError._internal_error_validator,
|
||||
'other': PollError._other_validator,
|
||||
}
|
||||
|
||||
PollError.invalid_async_job_id = PollError('invalid_async_job_id')
|
||||
PollError.internal_error = PollError('internal_error')
|
||||
PollError.other = PollError('other')
|
||||
|
||||
ROUTES = {
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
@@ -20,10 +21,10 @@ class AccessError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar InvalidAccountTypeError invalid_account_type: Current account type
|
||||
cannot access the resource.
|
||||
:ivar PaperAccessError paper_access_denied: Current account cannot access
|
||||
Paper.
|
||||
:ivar InvalidAccountTypeError AccessError.invalid_account_type: Current
|
||||
account type cannot access the resource.
|
||||
:ivar PaperAccessError AccessError.paper_access_denied: Current account
|
||||
cannot access Paper.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -100,6 +101,9 @@ class AccessError(bb.Union):
|
||||
raise AttributeError("tag 'paper_access_denied' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(AccessError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'AccessError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -113,12 +117,15 @@ class AuthError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar invalid_access_token: The access token is invalid.
|
||||
:ivar invalid_select_user: The user specified in 'Dropbox-API-Select-User'
|
||||
is no longer on the team.
|
||||
:ivar invalid_select_admin: The user specified in 'Dropbox-API-Select-Admin'
|
||||
is not a Dropbox Business team admin.
|
||||
:ivar user_suspended: The user has been suspended.
|
||||
:ivar auth.AuthError.invalid_access_token: The access token is invalid.
|
||||
:ivar auth.AuthError.invalid_select_user: The user specified in
|
||||
'Dropbox-API-Select-User' is no longer on the team.
|
||||
:ivar auth.AuthError.invalid_select_admin: The user specified in
|
||||
'Dropbox-API-Select-Admin' is not a Dropbox Business team admin.
|
||||
:ivar auth.AuthError.user_suspended: The user has been suspended.
|
||||
:ivar auth.AuthError.expired_access_token: The access token has expired.
|
||||
:ivar TokenScopeError AuthError.missing_scope: The access token does not
|
||||
have the required scope to access the route.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -131,8 +138,21 @@ class AuthError(bb.Union):
|
||||
# Attribute is overwritten below the class definition
|
||||
user_suspended = None
|
||||
# Attribute is overwritten below the class definition
|
||||
expired_access_token = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
@classmethod
|
||||
def missing_scope(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``missing_scope`` tag with
|
||||
value ``val``.
|
||||
|
||||
:param TokenScopeError val:
|
||||
:rtype: AuthError
|
||||
"""
|
||||
return cls('missing_scope', val)
|
||||
|
||||
def is_invalid_access_token(self):
|
||||
"""
|
||||
Check if the union tag is ``invalid_access_token``.
|
||||
@@ -165,6 +185,22 @@ class AuthError(bb.Union):
|
||||
"""
|
||||
return self._tag == 'user_suspended'
|
||||
|
||||
def is_expired_access_token(self):
|
||||
"""
|
||||
Check if the union tag is ``expired_access_token``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'expired_access_token'
|
||||
|
||||
def is_missing_scope(self):
|
||||
"""
|
||||
Check if the union tag is ``missing_scope``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'missing_scope'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
@@ -173,6 +209,21 @@ class AuthError(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def get_missing_scope(self):
|
||||
"""
|
||||
The access token does not have the required scope to access the route.
|
||||
|
||||
Only call this if :meth:`is_missing_scope` is true.
|
||||
|
||||
:rtype: TokenScopeError
|
||||
"""
|
||||
if not self.is_missing_scope():
|
||||
raise AttributeError("tag 'missing_scope' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(AuthError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'AuthError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -184,10 +235,10 @@ class InvalidAccountTypeError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar endpoint: Current account type doesn't have permission to access this
|
||||
route endpoint.
|
||||
:ivar feature: Current account type doesn't have permission to access this
|
||||
feature.
|
||||
:ivar auth.InvalidAccountTypeError.endpoint: Current account type doesn't
|
||||
have permission to access this route endpoint.
|
||||
:ivar auth.InvalidAccountTypeError.feature: Current account type doesn't
|
||||
have permission to access this feature.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -222,6 +273,9 @@ class InvalidAccountTypeError(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(InvalidAccountTypeError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'InvalidAccountTypeError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -233,8 +287,9 @@ class PaperAccessError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar paper_disabled: Paper is disabled.
|
||||
:ivar not_paper_user: The provided user has not used Paper yet.
|
||||
:ivar auth.PaperAccessError.paper_disabled: Paper is disabled.
|
||||
:ivar auth.PaperAccessError.not_paper_user: The provided user has not used
|
||||
Paper yet.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -269,18 +324,22 @@ class PaperAccessError(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PaperAccessError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PaperAccessError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PaperAccessError_validator = bv.Union(PaperAccessError)
|
||||
|
||||
class RateLimitError(object):
|
||||
class RateLimitError(bb.Struct):
|
||||
"""
|
||||
Error occurred because the app is being rate limited.
|
||||
|
||||
:ivar reason: The reason why the app is being rate limited.
|
||||
:ivar retry_after: The number of seconds that the app should wait before
|
||||
making another request.
|
||||
:ivar auth.RateLimitError.reason: The reason why the app is being rate
|
||||
limited.
|
||||
:ivar auth.RateLimitError.retry_after: The number of seconds that the app
|
||||
should wait before making another request.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -333,7 +392,7 @@ class RateLimitError(object):
|
||||
The number of seconds that the app should wait before making another
|
||||
request.
|
||||
|
||||
:rtype: long
|
||||
:rtype: int
|
||||
"""
|
||||
if self._retry_after_present:
|
||||
return self._retry_after_value
|
||||
@@ -351,6 +410,9 @@ class RateLimitError(object):
|
||||
self._retry_after_value = None
|
||||
self._retry_after_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(RateLimitError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'RateLimitError(reason={!r}, retry_after={!r})'.format(
|
||||
self._reason_value,
|
||||
@@ -365,10 +427,10 @@ class RateLimitReason(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar too_many_requests: You are making too many requests in the past few
|
||||
minutes.
|
||||
:ivar too_many_write_operations: There are currently too many write
|
||||
operations happening in the user's Dropbox.
|
||||
:ivar auth.RateLimitReason.too_many_requests: You are making too many
|
||||
requests in the past few minutes.
|
||||
:ivar auth.RateLimitReason.too_many_write_operations: There are currently
|
||||
too many write operations happening in the user's Dropbox.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -403,16 +465,20 @@ class RateLimitReason(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(RateLimitReason, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'RateLimitReason(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
RateLimitReason_validator = bv.Union(RateLimitReason)
|
||||
|
||||
class TokenFromOAuth1Arg(object):
|
||||
class TokenFromOAuth1Arg(bb.Struct):
|
||||
"""
|
||||
:ivar oauth1_token: The supplied OAuth 1.0 access token.
|
||||
:ivar oauth1_token_secret: The token secret associated with the supplied
|
||||
access token.
|
||||
:ivar auth.TokenFromOAuth1Arg.oauth1_token: The supplied OAuth 1.0 access
|
||||
token.
|
||||
:ivar auth.TokenFromOAuth1Arg.oauth1_token_secret: The token secret
|
||||
associated with the supplied access token.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -482,6 +548,9 @@ class TokenFromOAuth1Arg(object):
|
||||
self._oauth1_token_secret_value = None
|
||||
self._oauth1_token_secret_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TokenFromOAuth1Arg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TokenFromOAuth1Arg(oauth1_token={!r}, oauth1_token_secret={!r})'.format(
|
||||
self._oauth1_token_value,
|
||||
@@ -496,10 +565,10 @@ class TokenFromOAuth1Error(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar invalid_oauth1_token_info: Part or all of the OAuth 1.0 access token
|
||||
info is invalid.
|
||||
:ivar app_id_mismatch: The authorized app does not match the app associated
|
||||
with the supplied access token.
|
||||
:ivar auth.TokenFromOAuth1Error.invalid_oauth1_token_info: Part or all of
|
||||
the OAuth 1.0 access token info is invalid.
|
||||
:ivar auth.TokenFromOAuth1Error.app_id_mismatch: The authorized app does not
|
||||
match the app associated with the supplied access token.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -534,15 +603,18 @@ class TokenFromOAuth1Error(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TokenFromOAuth1Error, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TokenFromOAuth1Error(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
TokenFromOAuth1Error_validator = bv.Union(TokenFromOAuth1Error)
|
||||
|
||||
class TokenFromOAuth1Result(object):
|
||||
class TokenFromOAuth1Result(bb.Struct):
|
||||
"""
|
||||
:ivar oauth2_token: The OAuth 2.0 token generated from the supplied OAuth
|
||||
1.0 token.
|
||||
:ivar auth.TokenFromOAuth1Result.oauth2_token: The OAuth 2.0 token generated
|
||||
from the supplied OAuth 1.0 token.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -582,6 +654,9 @@ class TokenFromOAuth1Result(object):
|
||||
self._oauth2_token_value = None
|
||||
self._oauth2_token_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TokenFromOAuth1Result, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TokenFromOAuth1Result(oauth2_token={!r})'.format(
|
||||
self._oauth2_token_value,
|
||||
@@ -589,6 +664,59 @@ class TokenFromOAuth1Result(object):
|
||||
|
||||
TokenFromOAuth1Result_validator = bv.Struct(TokenFromOAuth1Result)
|
||||
|
||||
class TokenScopeError(bb.Struct):
|
||||
"""
|
||||
:ivar auth.TokenScopeError.required_scope: The required scope to access the
|
||||
route.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_required_scope_value',
|
||||
'_required_scope_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
required_scope=None):
|
||||
self._required_scope_value = None
|
||||
self._required_scope_present = False
|
||||
if required_scope is not None:
|
||||
self.required_scope = required_scope
|
||||
|
||||
@property
|
||||
def required_scope(self):
|
||||
"""
|
||||
The required scope to access the route.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._required_scope_present:
|
||||
return self._required_scope_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'required_scope'")
|
||||
|
||||
@required_scope.setter
|
||||
def required_scope(self, val):
|
||||
val = self._required_scope_validator.validate(val)
|
||||
self._required_scope_value = val
|
||||
self._required_scope_present = True
|
||||
|
||||
@required_scope.deleter
|
||||
def required_scope(self):
|
||||
self._required_scope_value = None
|
||||
self._required_scope_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TokenScopeError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TokenScopeError(required_scope={!r})'.format(
|
||||
self._required_scope_value,
|
||||
)
|
||||
|
||||
TokenScopeError_validator = bv.Struct(TokenScopeError)
|
||||
|
||||
AccessError._invalid_account_type_validator = InvalidAccountTypeError_validator
|
||||
AccessError._paper_access_denied_validator = PaperAccessError_validator
|
||||
AccessError._other_validator = bv.Void()
|
||||
@@ -604,12 +732,16 @@ AuthError._invalid_access_token_validator = bv.Void()
|
||||
AuthError._invalid_select_user_validator = bv.Void()
|
||||
AuthError._invalid_select_admin_validator = bv.Void()
|
||||
AuthError._user_suspended_validator = bv.Void()
|
||||
AuthError._expired_access_token_validator = bv.Void()
|
||||
AuthError._missing_scope_validator = TokenScopeError_validator
|
||||
AuthError._other_validator = bv.Void()
|
||||
AuthError._tagmap = {
|
||||
'invalid_access_token': AuthError._invalid_access_token_validator,
|
||||
'invalid_select_user': AuthError._invalid_select_user_validator,
|
||||
'invalid_select_admin': AuthError._invalid_select_admin_validator,
|
||||
'user_suspended': AuthError._user_suspended_validator,
|
||||
'expired_access_token': AuthError._expired_access_token_validator,
|
||||
'missing_scope': AuthError._missing_scope_validator,
|
||||
'other': AuthError._other_validator,
|
||||
}
|
||||
|
||||
@@ -617,6 +749,7 @@ AuthError.invalid_access_token = AuthError('invalid_access_token')
|
||||
AuthError.invalid_select_user = AuthError('invalid_select_user')
|
||||
AuthError.invalid_select_admin = AuthError('invalid_select_admin')
|
||||
AuthError.user_suspended = AuthError('user_suspended')
|
||||
AuthError.expired_access_token = AuthError('expired_access_token')
|
||||
AuthError.other = AuthError('other')
|
||||
|
||||
InvalidAccountTypeError._endpoint_validator = bv.Void()
|
||||
@@ -697,8 +830,13 @@ TokenFromOAuth1Result._oauth2_token_validator = bv.String(min_length=1)
|
||||
TokenFromOAuth1Result._all_field_names_ = set(['oauth2_token'])
|
||||
TokenFromOAuth1Result._all_fields_ = [('oauth2_token', TokenFromOAuth1Result._oauth2_token_validator)]
|
||||
|
||||
TokenScopeError._required_scope_validator = bv.String()
|
||||
TokenScopeError._all_field_names_ = set(['required_scope'])
|
||||
TokenScopeError._all_fields_ = [('required_scope', TokenScopeError._required_scope_validator)]
|
||||
|
||||
token_from_oauth1 = bb.Route(
|
||||
'token/from_oauth1',
|
||||
1,
|
||||
False,
|
||||
TokenFromOAuth1Arg_validator,
|
||||
TokenFromOAuth1Result_validator,
|
||||
@@ -708,6 +846,7 @@ token_from_oauth1 = bb.Route(
|
||||
)
|
||||
token_revoke = bb.Route(
|
||||
'token/revoke',
|
||||
1,
|
||||
False,
|
||||
bv.Void(),
|
||||
bv.Void(),
|
||||
|
||||
@@ -1,110 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
class InvalidPathRootError(object):
|
||||
"""
|
||||
:ivar path_root: The latest path root id for user's team if the user is
|
||||
still in a team.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_path_root_value',
|
||||
'_path_root_present',
|
||||
]
|
||||
|
||||
_has_required_fields = False
|
||||
|
||||
def __init__(self,
|
||||
path_root=None):
|
||||
self._path_root_value = None
|
||||
self._path_root_present = False
|
||||
if path_root is not None:
|
||||
self.path_root = path_root
|
||||
|
||||
@property
|
||||
def path_root(self):
|
||||
"""
|
||||
The latest path root id for user's team if the user is still in a team.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._path_root_present:
|
||||
return self._path_root_value
|
||||
else:
|
||||
return None
|
||||
|
||||
@path_root.setter
|
||||
def path_root(self, val):
|
||||
if val is None:
|
||||
del self.path_root
|
||||
return
|
||||
val = self._path_root_validator.validate(val)
|
||||
self._path_root_value = val
|
||||
self._path_root_present = True
|
||||
|
||||
@path_root.deleter
|
||||
def path_root(self):
|
||||
self._path_root_value = None
|
||||
self._path_root_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'InvalidPathRootError(path_root={!r})'.format(
|
||||
self._path_root_value,
|
||||
)
|
||||
|
||||
InvalidPathRootError_validator = bv.Struct(InvalidPathRootError)
|
||||
|
||||
class PathRoot(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar home: Paths are relative to the authenticating user's home directory,
|
||||
whether or not that user belongs to a team.
|
||||
:ivar member_home: Paths are relative to the authenticating team member's
|
||||
home directory. (This results in ``PathRootError.invalid`` if the user
|
||||
does not belong to a team.).
|
||||
:ivar str team: Paths are relative to the given team directory. (This
|
||||
results in :field:`PathRootError.invalid` if the user is not a member of
|
||||
the team associated with that path root id.).
|
||||
:ivar user_home: Paths are relative to the user's home directory. (This
|
||||
results in ``PathRootError.invalid`` if the belongs to a team.).
|
||||
:ivar str namespace_id: Paths are relative to given namespace id (This
|
||||
results in :field:`PathRootError.no_permission` if you don't have access
|
||||
to this namespace.).
|
||||
:ivar common.PathRoot.home: Paths are relative to the authenticating user's
|
||||
home namespace, whether or not that user belongs to a team.
|
||||
:ivar str common.PathRoot.root: Paths are relative to the authenticating
|
||||
user's root namespace (This results in
|
||||
:field:`PathRootError.invalid_root` if the user's root namespace has
|
||||
changed.).
|
||||
:ivar str common.PathRoot.namespace_id: Paths are relative to given
|
||||
namespace id (This results in :field:`PathRootError.no_permission` if
|
||||
you don't have access to this namespace.).
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
home = None
|
||||
# Attribute is overwritten below the class definition
|
||||
member_home = None
|
||||
# Attribute is overwritten below the class definition
|
||||
user_home = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
@classmethod
|
||||
def team(cls, val):
|
||||
def root(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``team`` tag with value
|
||||
Create an instance of this class set to the ``root`` tag with value
|
||||
``val``.
|
||||
|
||||
:param str val:
|
||||
:rtype: PathRoot
|
||||
"""
|
||||
return cls('team', val)
|
||||
return cls('root', val)
|
||||
|
||||
@classmethod
|
||||
def namespace_id(cls, val):
|
||||
@@ -125,29 +65,13 @@ class PathRoot(bb.Union):
|
||||
"""
|
||||
return self._tag == 'home'
|
||||
|
||||
def is_member_home(self):
|
||||
def is_root(self):
|
||||
"""
|
||||
Check if the union tag is ``member_home``.
|
||||
Check if the union tag is ``root``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'member_home'
|
||||
|
||||
def is_team(self):
|
||||
"""
|
||||
Check if the union tag is ``team``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'team'
|
||||
|
||||
def is_user_home(self):
|
||||
"""
|
||||
Check if the union tag is ``user_home``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'user_home'
|
||||
return self._tag == 'root'
|
||||
|
||||
def is_namespace_id(self):
|
||||
"""
|
||||
@@ -165,18 +89,18 @@ class PathRoot(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def get_team(self):
|
||||
def get_root(self):
|
||||
"""
|
||||
Paths are relative to the given team directory. (This results in
|
||||
``PathRootError.invalid`` if the user is not a member of the team
|
||||
associated with that path root id.).
|
||||
Paths are relative to the authenticating user's root namespace (This
|
||||
results in ``PathRootError.invalid_root`` if the user's root namespace
|
||||
has changed.).
|
||||
|
||||
Only call this if :meth:`is_team` is true.
|
||||
Only call this if :meth:`is_root` is true.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if not self.is_team():
|
||||
raise AttributeError("tag 'team' not set")
|
||||
if not self.is_root():
|
||||
raise AttributeError("tag 'root' not set")
|
||||
return self._value
|
||||
|
||||
def get_namespace_id(self):
|
||||
@@ -193,6 +117,9 @@ class PathRoot(bb.Union):
|
||||
raise AttributeError("tag 'namespace_id' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PathRoot, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PathRoot(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -204,10 +131,11 @@ class PathRootError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar InvalidPathRootError invalid: The path root id value in
|
||||
Dropbox-API-Path-Root header is no longer valid.
|
||||
:ivar no_permission: You don't have permission to access the path root id in
|
||||
Dropbox-API-Path-Root header.
|
||||
:ivar RootInfo PathRootError.invalid_root: The root namespace id in
|
||||
Dropbox-API-Path-Root header is not valid. The value of this error is
|
||||
use's latest root info.
|
||||
:ivar common.PathRootError.no_permission: You don't have permission to
|
||||
access the namespace id in Dropbox-API-Path-Root header.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -217,23 +145,23 @@ class PathRootError(bb.Union):
|
||||
other = None
|
||||
|
||||
@classmethod
|
||||
def invalid(cls, val):
|
||||
def invalid_root(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``invalid`` tag with value
|
||||
``val``.
|
||||
Create an instance of this class set to the ``invalid_root`` tag with
|
||||
value ``val``.
|
||||
|
||||
:param InvalidPathRootError val:
|
||||
:param RootInfo val:
|
||||
:rtype: PathRootError
|
||||
"""
|
||||
return cls('invalid', val)
|
||||
return cls('invalid_root', val)
|
||||
|
||||
def is_invalid(self):
|
||||
def is_invalid_root(self):
|
||||
"""
|
||||
Check if the union tag is ``invalid``.
|
||||
Check if the union tag is ``invalid_root``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'invalid'
|
||||
return self._tag == 'invalid_root'
|
||||
|
||||
def is_no_permission(self):
|
||||
"""
|
||||
@@ -251,66 +179,239 @@ class PathRootError(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def get_invalid(self):
|
||||
def get_invalid_root(self):
|
||||
"""
|
||||
The path root id value in Dropbox-API-Path-Root header is no longer
|
||||
valid.
|
||||
The root namespace id in Dropbox-API-Path-Root header is not valid. The
|
||||
value of this error is use's latest root info.
|
||||
|
||||
Only call this if :meth:`is_invalid` is true.
|
||||
Only call this if :meth:`is_invalid_root` is true.
|
||||
|
||||
:rtype: InvalidPathRootError
|
||||
:rtype: RootInfo
|
||||
"""
|
||||
if not self.is_invalid():
|
||||
raise AttributeError("tag 'invalid' not set")
|
||||
if not self.is_invalid_root():
|
||||
raise AttributeError("tag 'invalid_root' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PathRootError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PathRootError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PathRootError_validator = bv.Union(PathRootError)
|
||||
|
||||
class RootInfo(bb.Struct):
|
||||
"""
|
||||
Information about current user's root.
|
||||
|
||||
:ivar common.RootInfo.root_namespace_id: The namespace ID for user's root
|
||||
namespace. It will be the namespace ID of the shared team root if the
|
||||
user is member of a team with a separate team root. Otherwise it will be
|
||||
same as ``RootInfo.home_namespace_id``.
|
||||
:ivar common.RootInfo.home_namespace_id: The namespace ID for user's home
|
||||
namespace.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_root_namespace_id_value',
|
||||
'_root_namespace_id_present',
|
||||
'_home_namespace_id_value',
|
||||
'_home_namespace_id_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
root_namespace_id=None,
|
||||
home_namespace_id=None):
|
||||
self._root_namespace_id_value = None
|
||||
self._root_namespace_id_present = False
|
||||
self._home_namespace_id_value = None
|
||||
self._home_namespace_id_present = False
|
||||
if root_namespace_id is not None:
|
||||
self.root_namespace_id = root_namespace_id
|
||||
if home_namespace_id is not None:
|
||||
self.home_namespace_id = home_namespace_id
|
||||
|
||||
@property
|
||||
def root_namespace_id(self):
|
||||
"""
|
||||
The namespace ID for user's root namespace. It will be the namespace ID
|
||||
of the shared team root if the user is member of a team with a separate
|
||||
team root. Otherwise it will be same as ``RootInfo.home_namespace_id``.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._root_namespace_id_present:
|
||||
return self._root_namespace_id_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'root_namespace_id'")
|
||||
|
||||
@root_namespace_id.setter
|
||||
def root_namespace_id(self, val):
|
||||
val = self._root_namespace_id_validator.validate(val)
|
||||
self._root_namespace_id_value = val
|
||||
self._root_namespace_id_present = True
|
||||
|
||||
@root_namespace_id.deleter
|
||||
def root_namespace_id(self):
|
||||
self._root_namespace_id_value = None
|
||||
self._root_namespace_id_present = False
|
||||
|
||||
@property
|
||||
def home_namespace_id(self):
|
||||
"""
|
||||
The namespace ID for user's home namespace.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._home_namespace_id_present:
|
||||
return self._home_namespace_id_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'home_namespace_id'")
|
||||
|
||||
@home_namespace_id.setter
|
||||
def home_namespace_id(self, val):
|
||||
val = self._home_namespace_id_validator.validate(val)
|
||||
self._home_namespace_id_value = val
|
||||
self._home_namespace_id_present = True
|
||||
|
||||
@home_namespace_id.deleter
|
||||
def home_namespace_id(self):
|
||||
self._home_namespace_id_value = None
|
||||
self._home_namespace_id_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(RootInfo, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'RootInfo(root_namespace_id={!r}, home_namespace_id={!r})'.format(
|
||||
self._root_namespace_id_value,
|
||||
self._home_namespace_id_value,
|
||||
)
|
||||
|
||||
RootInfo_validator = bv.StructTree(RootInfo)
|
||||
|
||||
class TeamRootInfo(RootInfo):
|
||||
"""
|
||||
Root info when user is member of a team with a separate root namespace ID.
|
||||
|
||||
:ivar common.TeamRootInfo.home_path: The path for user's home directory
|
||||
under the shared team root.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_home_path_value',
|
||||
'_home_path_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
root_namespace_id=None,
|
||||
home_namespace_id=None,
|
||||
home_path=None):
|
||||
super(TeamRootInfo, self).__init__(root_namespace_id,
|
||||
home_namespace_id)
|
||||
self._home_path_value = None
|
||||
self._home_path_present = False
|
||||
if home_path is not None:
|
||||
self.home_path = home_path
|
||||
|
||||
@property
|
||||
def home_path(self):
|
||||
"""
|
||||
The path for user's home directory under the shared team root.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._home_path_present:
|
||||
return self._home_path_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'home_path'")
|
||||
|
||||
@home_path.setter
|
||||
def home_path(self, val):
|
||||
val = self._home_path_validator.validate(val)
|
||||
self._home_path_value = val
|
||||
self._home_path_present = True
|
||||
|
||||
@home_path.deleter
|
||||
def home_path(self):
|
||||
self._home_path_value = None
|
||||
self._home_path_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TeamRootInfo, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TeamRootInfo(root_namespace_id={!r}, home_namespace_id={!r}, home_path={!r})'.format(
|
||||
self._root_namespace_id_value,
|
||||
self._home_namespace_id_value,
|
||||
self._home_path_value,
|
||||
)
|
||||
|
||||
TeamRootInfo_validator = bv.Struct(TeamRootInfo)
|
||||
|
||||
class UserRootInfo(RootInfo):
|
||||
"""
|
||||
Root info when user is not member of a team or the user is a member of a
|
||||
team and the team does not have a separate root namespace.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
root_namespace_id=None,
|
||||
home_namespace_id=None):
|
||||
super(UserRootInfo, self).__init__(root_namespace_id,
|
||||
home_namespace_id)
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(UserRootInfo, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'UserRootInfo(root_namespace_id={!r}, home_namespace_id={!r})'.format(
|
||||
self._root_namespace_id_value,
|
||||
self._home_namespace_id_value,
|
||||
)
|
||||
|
||||
UserRootInfo_validator = bv.Struct(UserRootInfo)
|
||||
|
||||
Date_validator = bv.Timestamp(u'%Y-%m-%d')
|
||||
DisplayName_validator = bv.String(min_length=1, pattern=u'[^/:?*<>"|]*')
|
||||
DisplayNameLegacy_validator = bv.String(min_length=1)
|
||||
DisplayNameLegacy_validator = bv.String()
|
||||
DropboxTimestamp_validator = bv.Timestamp(u'%Y-%m-%dT%H:%M:%SZ')
|
||||
EmailAddress_validator = bv.String(max_length=255, pattern=u"^['&A-Za-z0-9._%+-]+@[A-Za-z0-9-][A-Za-z0-9.-]*.[A-Za-z]{2,15}$")
|
||||
EmailAddress_validator = bv.String(max_length=255, pattern=u"^['&A-Za-z0-9._%+-]+@[A-Za-z0-9-][A-Za-z0-9.-]*\\.[A-Za-z]{2,15}$")
|
||||
# A ISO639-1 code.
|
||||
LanguageCode_validator = bv.String(min_length=2)
|
||||
NamePart_validator = bv.String(min_length=1, max_length=100, pattern=u'[^/:?*<>"|]*')
|
||||
NamespaceId_validator = bv.String(pattern=u'[-_0-9a-zA-Z:]+')
|
||||
OptionalNamePart_validator = bv.String(max_length=100, pattern=u'[^/:?*<>"|]*')
|
||||
PathRootId_validator = NamespaceId_validator
|
||||
SessionId_validator = bv.String()
|
||||
SharedFolderId_validator = NamespaceId_validator
|
||||
InvalidPathRootError._path_root_validator = bv.Nullable(PathRootId_validator)
|
||||
InvalidPathRootError._all_field_names_ = set(['path_root'])
|
||||
InvalidPathRootError._all_fields_ = [('path_root', InvalidPathRootError._path_root_validator)]
|
||||
|
||||
PathRoot._home_validator = bv.Void()
|
||||
PathRoot._member_home_validator = bv.Void()
|
||||
PathRoot._team_validator = PathRootId_validator
|
||||
PathRoot._user_home_validator = bv.Void()
|
||||
PathRoot._namespace_id_validator = PathRootId_validator
|
||||
PathRoot._root_validator = NamespaceId_validator
|
||||
PathRoot._namespace_id_validator = NamespaceId_validator
|
||||
PathRoot._other_validator = bv.Void()
|
||||
PathRoot._tagmap = {
|
||||
'home': PathRoot._home_validator,
|
||||
'member_home': PathRoot._member_home_validator,
|
||||
'team': PathRoot._team_validator,
|
||||
'user_home': PathRoot._user_home_validator,
|
||||
'root': PathRoot._root_validator,
|
||||
'namespace_id': PathRoot._namespace_id_validator,
|
||||
'other': PathRoot._other_validator,
|
||||
}
|
||||
|
||||
PathRoot.home = PathRoot('home')
|
||||
PathRoot.member_home = PathRoot('member_home')
|
||||
PathRoot.user_home = PathRoot('user_home')
|
||||
PathRoot.other = PathRoot('other')
|
||||
|
||||
PathRootError._invalid_validator = InvalidPathRootError_validator
|
||||
PathRootError._invalid_root_validator = RootInfo_validator
|
||||
PathRootError._no_permission_validator = bv.Void()
|
||||
PathRootError._other_validator = bv.Void()
|
||||
PathRootError._tagmap = {
|
||||
'invalid': PathRootError._invalid_validator,
|
||||
'invalid_root': PathRootError._invalid_root_validator,
|
||||
'no_permission': PathRootError._no_permission_validator,
|
||||
'other': PathRootError._other_validator,
|
||||
}
|
||||
@@ -318,6 +419,40 @@ PathRootError._tagmap = {
|
||||
PathRootError.no_permission = PathRootError('no_permission')
|
||||
PathRootError.other = PathRootError('other')
|
||||
|
||||
RootInfo._root_namespace_id_validator = NamespaceId_validator
|
||||
RootInfo._home_namespace_id_validator = NamespaceId_validator
|
||||
RootInfo._field_names_ = set([
|
||||
'root_namespace_id',
|
||||
'home_namespace_id',
|
||||
])
|
||||
RootInfo._all_field_names_ = RootInfo._field_names_
|
||||
RootInfo._fields_ = [
|
||||
('root_namespace_id', RootInfo._root_namespace_id_validator),
|
||||
('home_namespace_id', RootInfo._home_namespace_id_validator),
|
||||
]
|
||||
RootInfo._all_fields_ = RootInfo._fields_
|
||||
|
||||
RootInfo._tag_to_subtype_ = {
|
||||
(u'team',): TeamRootInfo_validator,
|
||||
(u'user',): UserRootInfo_validator,
|
||||
}
|
||||
RootInfo._pytype_to_tag_and_subtype_ = {
|
||||
TeamRootInfo: ((u'team',), TeamRootInfo_validator),
|
||||
UserRootInfo: ((u'user',), UserRootInfo_validator),
|
||||
}
|
||||
RootInfo._is_catch_all_ = True
|
||||
|
||||
TeamRootInfo._home_path_validator = bv.String()
|
||||
TeamRootInfo._field_names_ = set(['home_path'])
|
||||
TeamRootInfo._all_field_names_ = RootInfo._all_field_names_.union(TeamRootInfo._field_names_)
|
||||
TeamRootInfo._fields_ = [('home_path', TeamRootInfo._home_path_validator)]
|
||||
TeamRootInfo._all_fields_ = RootInfo._all_fields_ + TeamRootInfo._fields_
|
||||
|
||||
UserRootInfo._field_names_ = set([])
|
||||
UserRootInfo._all_field_names_ = RootInfo._all_field_names_.union(UserRootInfo._field_names_)
|
||||
UserRootInfo._fields_ = []
|
||||
UserRootInfo._all_fields_ = RootInfo._all_fields_ + UserRootInfo._fields_
|
||||
|
||||
ROUTES = {
|
||||
}
|
||||
|
||||
|
||||
176
resources/lib/dropbox/contacts.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
try:
|
||||
from . import (
|
||||
common,
|
||||
)
|
||||
except (ImportError, SystemError, ValueError):
|
||||
import common
|
||||
|
||||
class DeleteManualContactsArg(bb.Struct):
|
||||
"""
|
||||
:ivar contacts.DeleteManualContactsArg.email_addresses: List of manually
|
||||
added contacts to be deleted.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_email_addresses_value',
|
||||
'_email_addresses_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
email_addresses=None):
|
||||
self._email_addresses_value = None
|
||||
self._email_addresses_present = False
|
||||
if email_addresses is not None:
|
||||
self.email_addresses = email_addresses
|
||||
|
||||
@property
|
||||
def email_addresses(self):
|
||||
"""
|
||||
List of manually added contacts to be deleted.
|
||||
|
||||
:rtype: list of [str]
|
||||
"""
|
||||
if self._email_addresses_present:
|
||||
return self._email_addresses_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'email_addresses'")
|
||||
|
||||
@email_addresses.setter
|
||||
def email_addresses(self, val):
|
||||
val = self._email_addresses_validator.validate(val)
|
||||
self._email_addresses_value = val
|
||||
self._email_addresses_present = True
|
||||
|
||||
@email_addresses.deleter
|
||||
def email_addresses(self):
|
||||
self._email_addresses_value = None
|
||||
self._email_addresses_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(DeleteManualContactsArg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'DeleteManualContactsArg(email_addresses={!r})'.format(
|
||||
self._email_addresses_value,
|
||||
)
|
||||
|
||||
DeleteManualContactsArg_validator = bv.Struct(DeleteManualContactsArg)
|
||||
|
||||
class DeleteManualContactsError(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar list of [str] contacts.DeleteManualContactsError.contacts_not_found:
|
||||
Can't delete contacts from this list. Make sure the list only has
|
||||
manually added contacts. The deletion was cancelled.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
@classmethod
|
||||
def contacts_not_found(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``contacts_not_found`` tag
|
||||
with value ``val``.
|
||||
|
||||
:param list of [str] val:
|
||||
:rtype: DeleteManualContactsError
|
||||
"""
|
||||
return cls('contacts_not_found', val)
|
||||
|
||||
def is_contacts_not_found(self):
|
||||
"""
|
||||
Check if the union tag is ``contacts_not_found``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'contacts_not_found'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def get_contacts_not_found(self):
|
||||
"""
|
||||
Can't delete contacts from this list. Make sure the list only has
|
||||
manually added contacts. The deletion was cancelled.
|
||||
|
||||
Only call this if :meth:`is_contacts_not_found` is true.
|
||||
|
||||
:rtype: list of [str]
|
||||
"""
|
||||
if not self.is_contacts_not_found():
|
||||
raise AttributeError("tag 'contacts_not_found' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(DeleteManualContactsError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'DeleteManualContactsError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
DeleteManualContactsError_validator = bv.Union(DeleteManualContactsError)
|
||||
|
||||
DeleteManualContactsArg._email_addresses_validator = bv.List(common.EmailAddress_validator)
|
||||
DeleteManualContactsArg._all_field_names_ = set(['email_addresses'])
|
||||
DeleteManualContactsArg._all_fields_ = [('email_addresses', DeleteManualContactsArg._email_addresses_validator)]
|
||||
|
||||
DeleteManualContactsError._contacts_not_found_validator = bv.List(common.EmailAddress_validator)
|
||||
DeleteManualContactsError._other_validator = bv.Void()
|
||||
DeleteManualContactsError._tagmap = {
|
||||
'contacts_not_found': DeleteManualContactsError._contacts_not_found_validator,
|
||||
'other': DeleteManualContactsError._other_validator,
|
||||
}
|
||||
|
||||
DeleteManualContactsError.other = DeleteManualContactsError('other')
|
||||
|
||||
delete_manual_contacts = bb.Route(
|
||||
'delete_manual_contacts',
|
||||
1,
|
||||
False,
|
||||
bv.Void(),
|
||||
bv.Void(),
|
||||
bv.Void(),
|
||||
{'host': u'api',
|
||||
'style': u'rpc'},
|
||||
)
|
||||
delete_manual_contacts_batch = bb.Route(
|
||||
'delete_manual_contacts_batch',
|
||||
1,
|
||||
False,
|
||||
DeleteManualContactsArg_validator,
|
||||
bv.Void(),
|
||||
DeleteManualContactsError_validator,
|
||||
{'host': u'api',
|
||||
'style': u'rpc'},
|
||||
)
|
||||
|
||||
ROUTES = {
|
||||
'delete_manual_contacts': delete_manual_contacts,
|
||||
'delete_manual_contacts_batch': delete_manual_contacts_batch,
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ __all__ = [
|
||||
|
||||
# This should always be 0.0.0 in master. Only update this after tagging
|
||||
# before release.
|
||||
__version__ = '0.0.0'
|
||||
__version__ = '9.4.0'
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
@@ -22,6 +22,11 @@ from .auth import (
|
||||
AuthError_validator,
|
||||
RateLimitError_validator,
|
||||
)
|
||||
from .common import (
|
||||
PathRoot,
|
||||
PathRoot_validator,
|
||||
PathRootError_validator
|
||||
)
|
||||
from .base import DropboxBase
|
||||
from .base_team import DropboxTeamBase
|
||||
from .exceptions import (
|
||||
@@ -29,6 +34,7 @@ from .exceptions import (
|
||||
AuthError,
|
||||
BadInputError,
|
||||
HttpError,
|
||||
PathRootError,
|
||||
InternalServerError,
|
||||
RateLimitError,
|
||||
)
|
||||
@@ -42,6 +48,9 @@ from .session import (
|
||||
pinned_session,
|
||||
)
|
||||
|
||||
PATH_ROOT_HEADER = 'Dropbox-API-Path-Root'
|
||||
HTTP_STATUS_INVALID_PATH_ROOT = 422
|
||||
|
||||
class RouteResult(object):
|
||||
"""The successful result of a call to a route."""
|
||||
|
||||
@@ -182,6 +191,35 @@ class _DropboxTransport(object):
|
||||
|
||||
self._timeout = timeout
|
||||
|
||||
def clone(
|
||||
self,
|
||||
oauth2_access_token=None,
|
||||
max_retries_on_error=None,
|
||||
max_retries_on_rate_limit=None,
|
||||
user_agent=None,
|
||||
session=None,
|
||||
headers=None,
|
||||
timeout=None):
|
||||
"""
|
||||
Creates a new copy of the Dropbox client with the same defaults unless modified by
|
||||
arguments to clone()
|
||||
|
||||
See constructor for original parameter descriptions.
|
||||
|
||||
:return: New instance of Dropbox clent
|
||||
:rtype: Dropbox
|
||||
"""
|
||||
|
||||
return self.__class__(
|
||||
oauth2_access_token or self._oauth2_access_token,
|
||||
max_retries_on_error or self._max_retries_on_error,
|
||||
max_retries_on_rate_limit or self._max_retries_on_rate_limit,
|
||||
user_agent or self._user_agent,
|
||||
session or self._session,
|
||||
headers or self._headers,
|
||||
timeout or self._timeout
|
||||
)
|
||||
|
||||
def request(self,
|
||||
route,
|
||||
namespace,
|
||||
@@ -211,6 +249,8 @@ class _DropboxTransport(object):
|
||||
"""
|
||||
host = route.attrs['host'] or 'api'
|
||||
route_name = namespace + '/' + route.name
|
||||
if route.version > 1:
|
||||
route_name += '_v{}'.format(route.version)
|
||||
route_style = route.attrs['style'] or 'rpc'
|
||||
serialized_arg = stone_serializers.json_encode(route.arg_type,
|
||||
request_arg)
|
||||
@@ -421,6 +461,10 @@ class _DropboxTransport(object):
|
||||
err = stone_serializers.json_compat_obj_decode(
|
||||
AuthError_validator, r.json()['error'])
|
||||
raise AuthError(request_id, err)
|
||||
elif r.status_code == HTTP_STATUS_INVALID_PATH_ROOT:
|
||||
err = stone_serializers.json_compat_obj_decode(
|
||||
PathRootError_validator, r.json()['error'])
|
||||
raise PathRootError(request_id, err)
|
||||
elif r.status_code == 429:
|
||||
err = None
|
||||
if r.headers.get('content-type') == 'application/json':
|
||||
@@ -479,6 +523,28 @@ class _DropboxTransport(object):
|
||||
for c in http_resp.iter_content(chunksize):
|
||||
f.write(c)
|
||||
|
||||
def with_path_root(self, path_root):
|
||||
"""
|
||||
Creates a clone of the Dropbox instance with the Dropbox-API-Path-Root header
|
||||
as the appropriate serialized instance of PathRoot.
|
||||
|
||||
For more information, see
|
||||
https://www.dropbox.com/developers/reference/namespace-guide#pathrootmodes
|
||||
|
||||
:param PathRoot path_root: instance of PathRoot to serialize into the headers field
|
||||
:return: A :class: `Dropbox`
|
||||
:rtype: Dropbox
|
||||
"""
|
||||
|
||||
if not isinstance(path_root, PathRoot):
|
||||
raise ValueError("path_root must be an instance of PathRoot")
|
||||
|
||||
return self.clone(
|
||||
headers={
|
||||
PATH_ROOT_HEADER: stone_serializers.json_encode(PathRoot_validator, path_root)
|
||||
}
|
||||
)
|
||||
|
||||
class Dropbox(_DropboxTransport, DropboxBase):
|
||||
"""
|
||||
Use this class to make requests to the Dropbox API using a user's access
|
||||
@@ -493,22 +559,50 @@ class DropboxTeam(_DropboxTransport, DropboxTeamBase):
|
||||
token. Methods of this class are meant to act on the team, but there is
|
||||
also an :meth:`as_user` method for assuming a team member's identity.
|
||||
"""
|
||||
def as_admin(self, team_member_id):
|
||||
"""
|
||||
Allows a team credential to assume the identity of an administrator on the team
|
||||
and perform operations on any team-owned content.
|
||||
|
||||
:param str team_member_id: team member id of administrator to perform actions with
|
||||
:return: A :class:`Dropbox` object that can be used to query on behalf
|
||||
of this admin of the team.
|
||||
:rtype: Dropbox
|
||||
"""
|
||||
return self._get_dropbox_client_with_select_header('Dropbox-API-Select-Admin',
|
||||
team_member_id)
|
||||
|
||||
def as_user(self, team_member_id):
|
||||
"""
|
||||
Allows a team credential to assume the identity of a member of the
|
||||
team.
|
||||
|
||||
:param str team_member_id: team member id of team member to perform actions with
|
||||
:return: A :class:`Dropbox` object that can be used to query on behalf
|
||||
of this member of the team.
|
||||
:rtype: Dropbox
|
||||
"""
|
||||
return self._get_dropbox_client_with_select_header('Dropbox-API-Select-User',
|
||||
team_member_id)
|
||||
|
||||
def _get_dropbox_client_with_select_header(self, select_header_name, team_member_id):
|
||||
"""
|
||||
Get Dropbox client with modified headers
|
||||
|
||||
:param str select_header_name: Header name used to select users
|
||||
:param str team_member_id: team member id of team member to perform actions with
|
||||
:return: A :class:`Dropbox` object that can be used to query on behalf
|
||||
of a member or admin of the team
|
||||
:rtype: Dropbox
|
||||
"""
|
||||
|
||||
new_headers = self._headers.copy() if self._headers else {}
|
||||
new_headers['Dropbox-API-Select-User'] = team_member_id
|
||||
new_headers[select_header_name] = team_member_id
|
||||
return Dropbox(
|
||||
self._oauth2_access_token,
|
||||
max_retries_on_error=self._max_retries_on_error,
|
||||
max_retries_on_rate_limit=self._max_retries_on_rate_limit,
|
||||
timeout=self._timeout,
|
||||
user_agent=self._raw_user_agent,
|
||||
session=self._session,
|
||||
headers=new_headers,
|
||||
|
||||
@@ -46,6 +46,17 @@ class HttpError(DropboxException):
|
||||
self.status_code, self.body)
|
||||
|
||||
|
||||
class PathRootError(HttpError):
|
||||
"""Error caused by an invalid path root."""
|
||||
|
||||
def __init__(self, request_id, error=None):
|
||||
super(PathRootError, self).__init__(request_id, 422, None)
|
||||
self.error = error
|
||||
|
||||
def __repr__(self):
|
||||
return 'PathRootError({!r}, {!r})'.format(self.request_id, self.error)
|
||||
|
||||
|
||||
class BadInputError(HttpError):
|
||||
"""Errors due to bad input parameters to an API Operation."""
|
||||
|
||||
|
||||
@@ -511,9 +511,5 @@ def _params_to_urlencoded(params):
|
||||
else:
|
||||
return str(o).encode('utf-8')
|
||||
|
||||
#fix for python 2.6
|
||||
utf8_params = {}
|
||||
for k,v in six.iteritems(params):
|
||||
utf8_params[encode(k)] = encode(v)
|
||||
|
||||
utf8_params = {encode(k): encode(v) for k, v in six.iteritems(params)}
|
||||
return url_encode(utf8_params)
|
||||
|
||||
@@ -1,4 +1,2 @@
|
||||
import resources.lib.utils as utils
|
||||
|
||||
def resource_filename(*args):
|
||||
return utils.addon_dir() + "/resources/lib/dropbox/trusted-certs.crt"
|
||||
def resource_filename(package_or_requirement, resource_name):
|
||||
return resource_name
|
||||
@@ -1,835 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
"""
|
||||
This namespace contains helper entities for property and property/template endpoints.
|
||||
"""
|
||||
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
class GetPropertyTemplateArg(object):
|
||||
"""
|
||||
:ivar template_id: An identifier for property template added by route
|
||||
properties/template/add.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_template_id_value',
|
||||
'_template_id_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
template_id=None):
|
||||
self._template_id_value = None
|
||||
self._template_id_present = False
|
||||
if template_id is not None:
|
||||
self.template_id = template_id
|
||||
|
||||
@property
|
||||
def template_id(self):
|
||||
"""
|
||||
An identifier for property template added by route
|
||||
properties/template/add.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._template_id_present:
|
||||
return self._template_id_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'template_id'")
|
||||
|
||||
@template_id.setter
|
||||
def template_id(self, val):
|
||||
val = self._template_id_validator.validate(val)
|
||||
self._template_id_value = val
|
||||
self._template_id_present = True
|
||||
|
||||
@template_id.deleter
|
||||
def template_id(self):
|
||||
self._template_id_value = None
|
||||
self._template_id_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'GetPropertyTemplateArg(template_id={!r})'.format(
|
||||
self._template_id_value,
|
||||
)
|
||||
|
||||
GetPropertyTemplateArg_validator = bv.Struct(GetPropertyTemplateArg)
|
||||
|
||||
class PropertyGroupTemplate(object):
|
||||
"""
|
||||
Describes property templates that can be filled and associated with a file.
|
||||
|
||||
:ivar name: A display name for the property template. Property template
|
||||
names can be up to 256 bytes.
|
||||
:ivar description: Description for new property template. Property template
|
||||
descriptions can be up to 1024 bytes.
|
||||
:ivar fields: This is a list of custom properties associated with a property
|
||||
template. There can be up to 64 properties in a single property
|
||||
template.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_name_value',
|
||||
'_name_present',
|
||||
'_description_value',
|
||||
'_description_present',
|
||||
'_fields_value',
|
||||
'_fields_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
name=None,
|
||||
description=None,
|
||||
fields=None):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
self._description_value = None
|
||||
self._description_present = False
|
||||
self._fields_value = None
|
||||
self._fields_present = False
|
||||
if name is not None:
|
||||
self.name = name
|
||||
if description is not None:
|
||||
self.description = description
|
||||
if fields is not None:
|
||||
self.fields = fields
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""
|
||||
A display name for the property template. Property template names can be
|
||||
up to 256 bytes.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._name_present:
|
||||
return self._name_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'name'")
|
||||
|
||||
@name.setter
|
||||
def name(self, val):
|
||||
val = self._name_validator.validate(val)
|
||||
self._name_value = val
|
||||
self._name_present = True
|
||||
|
||||
@name.deleter
|
||||
def name(self):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""
|
||||
Description for new property template. Property template descriptions
|
||||
can be up to 1024 bytes.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._description_present:
|
||||
return self._description_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'description'")
|
||||
|
||||
@description.setter
|
||||
def description(self, val):
|
||||
val = self._description_validator.validate(val)
|
||||
self._description_value = val
|
||||
self._description_present = True
|
||||
|
||||
@description.deleter
|
||||
def description(self):
|
||||
self._description_value = None
|
||||
self._description_present = False
|
||||
|
||||
@property
|
||||
def fields(self):
|
||||
"""
|
||||
This is a list of custom properties associated with a property template.
|
||||
There can be up to 64 properties in a single property template.
|
||||
|
||||
:rtype: list of [PropertyFieldTemplate]
|
||||
"""
|
||||
if self._fields_present:
|
||||
return self._fields_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'fields'")
|
||||
|
||||
@fields.setter
|
||||
def fields(self, val):
|
||||
val = self._fields_validator.validate(val)
|
||||
self._fields_value = val
|
||||
self._fields_present = True
|
||||
|
||||
@fields.deleter
|
||||
def fields(self):
|
||||
self._fields_value = None
|
||||
self._fields_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'PropertyGroupTemplate(name={!r}, description={!r}, fields={!r})'.format(
|
||||
self._name_value,
|
||||
self._description_value,
|
||||
self._fields_value,
|
||||
)
|
||||
|
||||
PropertyGroupTemplate_validator = bv.Struct(PropertyGroupTemplate)
|
||||
|
||||
class GetPropertyTemplateResult(PropertyGroupTemplate):
|
||||
"""
|
||||
The Property template for the specified template.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
name=None,
|
||||
description=None,
|
||||
fields=None):
|
||||
super(GetPropertyTemplateResult, self).__init__(name,
|
||||
description,
|
||||
fields)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GetPropertyTemplateResult(name={!r}, description={!r}, fields={!r})'.format(
|
||||
self._name_value,
|
||||
self._description_value,
|
||||
self._fields_value,
|
||||
)
|
||||
|
||||
GetPropertyTemplateResult_validator = bv.Struct(GetPropertyTemplateResult)
|
||||
|
||||
class ListPropertyTemplateIds(object):
|
||||
"""
|
||||
:ivar template_ids: List of identifiers for templates added by route
|
||||
properties/template/add.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_template_ids_value',
|
||||
'_template_ids_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
template_ids=None):
|
||||
self._template_ids_value = None
|
||||
self._template_ids_present = False
|
||||
if template_ids is not None:
|
||||
self.template_ids = template_ids
|
||||
|
||||
@property
|
||||
def template_ids(self):
|
||||
"""
|
||||
List of identifiers for templates added by route
|
||||
properties/template/add.
|
||||
|
||||
:rtype: list of [str]
|
||||
"""
|
||||
if self._template_ids_present:
|
||||
return self._template_ids_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'template_ids'")
|
||||
|
||||
@template_ids.setter
|
||||
def template_ids(self, val):
|
||||
val = self._template_ids_validator.validate(val)
|
||||
self._template_ids_value = val
|
||||
self._template_ids_present = True
|
||||
|
||||
@template_ids.deleter
|
||||
def template_ids(self):
|
||||
self._template_ids_value = None
|
||||
self._template_ids_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'ListPropertyTemplateIds(template_ids={!r})'.format(
|
||||
self._template_ids_value,
|
||||
)
|
||||
|
||||
ListPropertyTemplateIds_validator = bv.Struct(ListPropertyTemplateIds)
|
||||
|
||||
class PropertyTemplateError(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar str template_not_found: Property template does not exist for given
|
||||
identifier.
|
||||
:ivar restricted_content: You do not have the permissions to modify this
|
||||
property template.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
restricted_content = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
@classmethod
|
||||
def template_not_found(cls, val):
|
||||
"""
|
||||
Create an instance of this class set to the ``template_not_found`` tag
|
||||
with value ``val``.
|
||||
|
||||
:param str val:
|
||||
:rtype: PropertyTemplateError
|
||||
"""
|
||||
return cls('template_not_found', val)
|
||||
|
||||
def is_template_not_found(self):
|
||||
"""
|
||||
Check if the union tag is ``template_not_found``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'template_not_found'
|
||||
|
||||
def is_restricted_content(self):
|
||||
"""
|
||||
Check if the union tag is ``restricted_content``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'restricted_content'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def get_template_not_found(self):
|
||||
"""
|
||||
Property template does not exist for given identifier.
|
||||
|
||||
Only call this if :meth:`is_template_not_found` is true.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if not self.is_template_not_found():
|
||||
raise AttributeError("tag 'template_not_found' not set")
|
||||
return self._value
|
||||
|
||||
def __repr__(self):
|
||||
return 'PropertyTemplateError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PropertyTemplateError_validator = bv.Union(PropertyTemplateError)
|
||||
|
||||
class ModifyPropertyTemplateError(PropertyTemplateError):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar conflicting_property_names: A property field name already exists in
|
||||
the template.
|
||||
:ivar too_many_properties: There are too many properties in the changed
|
||||
template. The maximum number of properties per template is 32.
|
||||
:ivar too_many_templates: There are too many templates for the team.
|
||||
:ivar template_attribute_too_large: The template name, description or field
|
||||
names is too large.
|
||||
"""
|
||||
|
||||
# Attribute is overwritten below the class definition
|
||||
conflicting_property_names = None
|
||||
# Attribute is overwritten below the class definition
|
||||
too_many_properties = None
|
||||
# Attribute is overwritten below the class definition
|
||||
too_many_templates = None
|
||||
# Attribute is overwritten below the class definition
|
||||
template_attribute_too_large = None
|
||||
|
||||
def is_conflicting_property_names(self):
|
||||
"""
|
||||
Check if the union tag is ``conflicting_property_names``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'conflicting_property_names'
|
||||
|
||||
def is_too_many_properties(self):
|
||||
"""
|
||||
Check if the union tag is ``too_many_properties``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'too_many_properties'
|
||||
|
||||
def is_too_many_templates(self):
|
||||
"""
|
||||
Check if the union tag is ``too_many_templates``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'too_many_templates'
|
||||
|
||||
def is_template_attribute_too_large(self):
|
||||
"""
|
||||
Check if the union tag is ``template_attribute_too_large``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'template_attribute_too_large'
|
||||
|
||||
def __repr__(self):
|
||||
return 'ModifyPropertyTemplateError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
ModifyPropertyTemplateError_validator = bv.Union(ModifyPropertyTemplateError)
|
||||
|
||||
class PropertyField(object):
|
||||
"""
|
||||
:ivar name: This is the name or key of a custom property in a property
|
||||
template. File property names can be up to 256 bytes.
|
||||
:ivar value: Value of a custom property attached to a file. Values can be up
|
||||
to 1024 bytes.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_name_value',
|
||||
'_name_present',
|
||||
'_value_value',
|
||||
'_value_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
name=None,
|
||||
value=None):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
self._value_value = None
|
||||
self._value_present = False
|
||||
if name is not None:
|
||||
self.name = name
|
||||
if value is not None:
|
||||
self.value = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""
|
||||
This is the name or key of a custom property in a property template.
|
||||
File property names can be up to 256 bytes.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._name_present:
|
||||
return self._name_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'name'")
|
||||
|
||||
@name.setter
|
||||
def name(self, val):
|
||||
val = self._name_validator.validate(val)
|
||||
self._name_value = val
|
||||
self._name_present = True
|
||||
|
||||
@name.deleter
|
||||
def name(self):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
"""
|
||||
Value of a custom property attached to a file. Values can be up to 1024
|
||||
bytes.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._value_present:
|
||||
return self._value_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'value'")
|
||||
|
||||
@value.setter
|
||||
def value(self, val):
|
||||
val = self._value_validator.validate(val)
|
||||
self._value_value = val
|
||||
self._value_present = True
|
||||
|
||||
@value.deleter
|
||||
def value(self):
|
||||
self._value_value = None
|
||||
self._value_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'PropertyField(name={!r}, value={!r})'.format(
|
||||
self._name_value,
|
||||
self._value_value,
|
||||
)
|
||||
|
||||
PropertyField_validator = bv.Struct(PropertyField)
|
||||
|
||||
class PropertyFieldTemplate(object):
|
||||
"""
|
||||
Describe a single property field type which that can be part of a property
|
||||
template.
|
||||
|
||||
:ivar name: This is the name or key of a custom property in a property
|
||||
template. File property names can be up to 256 bytes.
|
||||
:ivar description: This is the description for a custom property in a
|
||||
property template. File property description can be up to 1024 bytes.
|
||||
:ivar type: This is the data type of the value of this property. This type
|
||||
will be enforced upon property creation and modifications.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_name_value',
|
||||
'_name_present',
|
||||
'_description_value',
|
||||
'_description_present',
|
||||
'_type_value',
|
||||
'_type_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
name=None,
|
||||
description=None,
|
||||
type=None):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
self._description_value = None
|
||||
self._description_present = False
|
||||
self._type_value = None
|
||||
self._type_present = False
|
||||
if name is not None:
|
||||
self.name = name
|
||||
if description is not None:
|
||||
self.description = description
|
||||
if type is not None:
|
||||
self.type = type
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""
|
||||
This is the name or key of a custom property in a property template.
|
||||
File property names can be up to 256 bytes.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._name_present:
|
||||
return self._name_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'name'")
|
||||
|
||||
@name.setter
|
||||
def name(self, val):
|
||||
val = self._name_validator.validate(val)
|
||||
self._name_value = val
|
||||
self._name_present = True
|
||||
|
||||
@name.deleter
|
||||
def name(self):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""
|
||||
This is the description for a custom property in a property template.
|
||||
File property description can be up to 1024 bytes.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._description_present:
|
||||
return self._description_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'description'")
|
||||
|
||||
@description.setter
|
||||
def description(self, val):
|
||||
val = self._description_validator.validate(val)
|
||||
self._description_value = val
|
||||
self._description_present = True
|
||||
|
||||
@description.deleter
|
||||
def description(self):
|
||||
self._description_value = None
|
||||
self._description_present = False
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
"""
|
||||
This is the data type of the value of this property. This type will be
|
||||
enforced upon property creation and modifications.
|
||||
|
||||
:rtype: PropertyType
|
||||
"""
|
||||
if self._type_present:
|
||||
return self._type_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'type'")
|
||||
|
||||
@type.setter
|
||||
def type(self, val):
|
||||
self._type_validator.validate_type_only(val)
|
||||
self._type_value = val
|
||||
self._type_present = True
|
||||
|
||||
@type.deleter
|
||||
def type(self):
|
||||
self._type_value = None
|
||||
self._type_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'PropertyFieldTemplate(name={!r}, description={!r}, type={!r})'.format(
|
||||
self._name_value,
|
||||
self._description_value,
|
||||
self._type_value,
|
||||
)
|
||||
|
||||
PropertyFieldTemplate_validator = bv.Struct(PropertyFieldTemplate)
|
||||
|
||||
class PropertyGroup(object):
|
||||
"""
|
||||
Collection of custom properties in filled property templates.
|
||||
|
||||
:ivar template_id: A unique identifier for a property template type.
|
||||
:ivar fields: This is a list of custom properties associated with a file.
|
||||
There can be up to 32 properties for a template.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'_template_id_value',
|
||||
'_template_id_present',
|
||||
'_fields_value',
|
||||
'_fields_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
template_id=None,
|
||||
fields=None):
|
||||
self._template_id_value = None
|
||||
self._template_id_present = False
|
||||
self._fields_value = None
|
||||
self._fields_present = False
|
||||
if template_id is not None:
|
||||
self.template_id = template_id
|
||||
if fields is not None:
|
||||
self.fields = fields
|
||||
|
||||
@property
|
||||
def template_id(self):
|
||||
"""
|
||||
A unique identifier for a property template type.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if self._template_id_present:
|
||||
return self._template_id_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'template_id'")
|
||||
|
||||
@template_id.setter
|
||||
def template_id(self, val):
|
||||
val = self._template_id_validator.validate(val)
|
||||
self._template_id_value = val
|
||||
self._template_id_present = True
|
||||
|
||||
@template_id.deleter
|
||||
def template_id(self):
|
||||
self._template_id_value = None
|
||||
self._template_id_present = False
|
||||
|
||||
@property
|
||||
def fields(self):
|
||||
"""
|
||||
This is a list of custom properties associated with a file. There can be
|
||||
up to 32 properties for a template.
|
||||
|
||||
:rtype: list of [PropertyField]
|
||||
"""
|
||||
if self._fields_present:
|
||||
return self._fields_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'fields'")
|
||||
|
||||
@fields.setter
|
||||
def fields(self, val):
|
||||
val = self._fields_validator.validate(val)
|
||||
self._fields_value = val
|
||||
self._fields_present = True
|
||||
|
||||
@fields.deleter
|
||||
def fields(self):
|
||||
self._fields_value = None
|
||||
self._fields_present = False
|
||||
|
||||
def __repr__(self):
|
||||
return 'PropertyGroup(template_id={!r}, fields={!r})'.format(
|
||||
self._template_id_value,
|
||||
self._fields_value,
|
||||
)
|
||||
|
||||
PropertyGroup_validator = bv.Struct(PropertyGroup)
|
||||
|
||||
class PropertyType(bb.Union):
|
||||
"""
|
||||
Data type of the given property added. This endpoint is in beta and only
|
||||
properties of type strings is supported.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar string: The associated property will be of type string. Unicode is
|
||||
supported.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
string = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_string(self):
|
||||
"""
|
||||
Check if the union tag is ``string``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'string'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def __repr__(self):
|
||||
return 'PropertyType(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PropertyType_validator = bv.Union(PropertyType)
|
||||
|
||||
TemplateId_validator = bv.String(min_length=1, pattern=u'(/|ptid:).*')
|
||||
GetPropertyTemplateArg._template_id_validator = TemplateId_validator
|
||||
GetPropertyTemplateArg._all_field_names_ = set(['template_id'])
|
||||
GetPropertyTemplateArg._all_fields_ = [('template_id', GetPropertyTemplateArg._template_id_validator)]
|
||||
|
||||
PropertyGroupTemplate._name_validator = bv.String()
|
||||
PropertyGroupTemplate._description_validator = bv.String()
|
||||
PropertyGroupTemplate._fields_validator = bv.List(PropertyFieldTemplate_validator)
|
||||
PropertyGroupTemplate._all_field_names_ = set([
|
||||
'name',
|
||||
'description',
|
||||
'fields',
|
||||
])
|
||||
PropertyGroupTemplate._all_fields_ = [
|
||||
('name', PropertyGroupTemplate._name_validator),
|
||||
('description', PropertyGroupTemplate._description_validator),
|
||||
('fields', PropertyGroupTemplate._fields_validator),
|
||||
]
|
||||
|
||||
GetPropertyTemplateResult._all_field_names_ = PropertyGroupTemplate._all_field_names_.union(set([]))
|
||||
GetPropertyTemplateResult._all_fields_ = PropertyGroupTemplate._all_fields_ + []
|
||||
|
||||
ListPropertyTemplateIds._template_ids_validator = bv.List(TemplateId_validator)
|
||||
ListPropertyTemplateIds._all_field_names_ = set(['template_ids'])
|
||||
ListPropertyTemplateIds._all_fields_ = [('template_ids', ListPropertyTemplateIds._template_ids_validator)]
|
||||
|
||||
PropertyTemplateError._template_not_found_validator = TemplateId_validator
|
||||
PropertyTemplateError._restricted_content_validator = bv.Void()
|
||||
PropertyTemplateError._other_validator = bv.Void()
|
||||
PropertyTemplateError._tagmap = {
|
||||
'template_not_found': PropertyTemplateError._template_not_found_validator,
|
||||
'restricted_content': PropertyTemplateError._restricted_content_validator,
|
||||
'other': PropertyTemplateError._other_validator,
|
||||
}
|
||||
|
||||
PropertyTemplateError.restricted_content = PropertyTemplateError('restricted_content')
|
||||
PropertyTemplateError.other = PropertyTemplateError('other')
|
||||
|
||||
ModifyPropertyTemplateError._conflicting_property_names_validator = bv.Void()
|
||||
ModifyPropertyTemplateError._too_many_properties_validator = bv.Void()
|
||||
ModifyPropertyTemplateError._too_many_templates_validator = bv.Void()
|
||||
ModifyPropertyTemplateError._template_attribute_too_large_validator = bv.Void()
|
||||
ModifyPropertyTemplateError._tagmap = {
|
||||
'conflicting_property_names': ModifyPropertyTemplateError._conflicting_property_names_validator,
|
||||
'too_many_properties': ModifyPropertyTemplateError._too_many_properties_validator,
|
||||
'too_many_templates': ModifyPropertyTemplateError._too_many_templates_validator,
|
||||
'template_attribute_too_large': ModifyPropertyTemplateError._template_attribute_too_large_validator,
|
||||
}
|
||||
ModifyPropertyTemplateError._tagmap.update(PropertyTemplateError._tagmap)
|
||||
|
||||
ModifyPropertyTemplateError.conflicting_property_names = ModifyPropertyTemplateError('conflicting_property_names')
|
||||
ModifyPropertyTemplateError.too_many_properties = ModifyPropertyTemplateError('too_many_properties')
|
||||
ModifyPropertyTemplateError.too_many_templates = ModifyPropertyTemplateError('too_many_templates')
|
||||
ModifyPropertyTemplateError.template_attribute_too_large = ModifyPropertyTemplateError('template_attribute_too_large')
|
||||
|
||||
PropertyField._name_validator = bv.String()
|
||||
PropertyField._value_validator = bv.String()
|
||||
PropertyField._all_field_names_ = set([
|
||||
'name',
|
||||
'value',
|
||||
])
|
||||
PropertyField._all_fields_ = [
|
||||
('name', PropertyField._name_validator),
|
||||
('value', PropertyField._value_validator),
|
||||
]
|
||||
|
||||
PropertyFieldTemplate._name_validator = bv.String()
|
||||
PropertyFieldTemplate._description_validator = bv.String()
|
||||
PropertyFieldTemplate._type_validator = PropertyType_validator
|
||||
PropertyFieldTemplate._all_field_names_ = set([
|
||||
'name',
|
||||
'description',
|
||||
'type',
|
||||
])
|
||||
PropertyFieldTemplate._all_fields_ = [
|
||||
('name', PropertyFieldTemplate._name_validator),
|
||||
('description', PropertyFieldTemplate._description_validator),
|
||||
('type', PropertyFieldTemplate._type_validator),
|
||||
]
|
||||
|
||||
PropertyGroup._template_id_validator = TemplateId_validator
|
||||
PropertyGroup._fields_validator = bv.List(PropertyField_validator)
|
||||
PropertyGroup._all_field_names_ = set([
|
||||
'template_id',
|
||||
'fields',
|
||||
])
|
||||
PropertyGroup._all_fields_ = [
|
||||
('template_id', PropertyGroup._template_id_validator),
|
||||
('fields', PropertyGroup._fields_validator),
|
||||
]
|
||||
|
||||
PropertyType._string_validator = bv.Void()
|
||||
PropertyType._other_validator = bv.Void()
|
||||
PropertyType._tagmap = {
|
||||
'string': PropertyType._string_validator,
|
||||
'other': PropertyType._other_validator,
|
||||
}
|
||||
|
||||
PropertyType.string = PropertyType('string')
|
||||
PropertyType.other = PropertyType('other')
|
||||
|
||||
ROUTES = {
|
||||
}
|
||||
|
||||
158
resources/lib/dropbox/seen_state.py
Normal file
@@ -0,0 +1,158 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
class PlatformType(bb.Union):
|
||||
"""
|
||||
Possible platforms on which a user may view content.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar seen_state.PlatformType.web: The content was viewed on the web.
|
||||
:ivar seen_state.PlatformType.desktop: The content was viewed on a desktop
|
||||
client.
|
||||
:ivar seen_state.PlatformType.mobile_ios: The content was viewed on a mobile
|
||||
iOS client.
|
||||
:ivar seen_state.PlatformType.mobile_android: The content was viewed on a
|
||||
mobile android client.
|
||||
:ivar seen_state.PlatformType.api: The content was viewed from an API
|
||||
client.
|
||||
:ivar seen_state.PlatformType.unknown: The content was viewed on an unknown
|
||||
platform.
|
||||
:ivar seen_state.PlatformType.mobile: The content was viewed on a mobile
|
||||
client. DEPRECATED: Use mobile_ios or mobile_android instead.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
web = None
|
||||
# Attribute is overwritten below the class definition
|
||||
desktop = None
|
||||
# Attribute is overwritten below the class definition
|
||||
mobile_ios = None
|
||||
# Attribute is overwritten below the class definition
|
||||
mobile_android = None
|
||||
# Attribute is overwritten below the class definition
|
||||
api = None
|
||||
# Attribute is overwritten below the class definition
|
||||
unknown = None
|
||||
# Attribute is overwritten below the class definition
|
||||
mobile = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_web(self):
|
||||
"""
|
||||
Check if the union tag is ``web``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'web'
|
||||
|
||||
def is_desktop(self):
|
||||
"""
|
||||
Check if the union tag is ``desktop``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'desktop'
|
||||
|
||||
def is_mobile_ios(self):
|
||||
"""
|
||||
Check if the union tag is ``mobile_ios``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'mobile_ios'
|
||||
|
||||
def is_mobile_android(self):
|
||||
"""
|
||||
Check if the union tag is ``mobile_android``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'mobile_android'
|
||||
|
||||
def is_api(self):
|
||||
"""
|
||||
Check if the union tag is ``api``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'api'
|
||||
|
||||
def is_unknown(self):
|
||||
"""
|
||||
Check if the union tag is ``unknown``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'unknown'
|
||||
|
||||
def is_mobile(self):
|
||||
"""
|
||||
Check if the union tag is ``mobile``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'mobile'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PlatformType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PlatformType(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PlatformType_validator = bv.Union(PlatformType)
|
||||
|
||||
PlatformType._web_validator = bv.Void()
|
||||
PlatformType._desktop_validator = bv.Void()
|
||||
PlatformType._mobile_ios_validator = bv.Void()
|
||||
PlatformType._mobile_android_validator = bv.Void()
|
||||
PlatformType._api_validator = bv.Void()
|
||||
PlatformType._unknown_validator = bv.Void()
|
||||
PlatformType._mobile_validator = bv.Void()
|
||||
PlatformType._other_validator = bv.Void()
|
||||
PlatformType._tagmap = {
|
||||
'web': PlatformType._web_validator,
|
||||
'desktop': PlatformType._desktop_validator,
|
||||
'mobile_ios': PlatformType._mobile_ios_validator,
|
||||
'mobile_android': PlatformType._mobile_android_validator,
|
||||
'api': PlatformType._api_validator,
|
||||
'unknown': PlatformType._unknown_validator,
|
||||
'mobile': PlatformType._mobile_validator,
|
||||
'other': PlatformType._other_validator,
|
||||
}
|
||||
|
||||
PlatformType.web = PlatformType('web')
|
||||
PlatformType.desktop = PlatformType('desktop')
|
||||
PlatformType.mobile_ios = PlatformType('mobile_ios')
|
||||
PlatformType.mobile_android = PlatformType('mobile_android')
|
||||
PlatformType.api = PlatformType('api')
|
||||
PlatformType.unknown = PlatformType('unknown')
|
||||
PlatformType.mobile = PlatformType('mobile')
|
||||
PlatformType.other = PlatformType('other')
|
||||
|
||||
ROUTES = {
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import pkg_resources
|
||||
from . import pkg_resources
|
||||
import os
|
||||
import ssl
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.poolmanager import PoolManager
|
||||
from urllib3.poolmanager import PoolManager
|
||||
|
||||
API_DOMAIN = os.environ.get('DROPBOX_API_DOMAIN',
|
||||
os.environ.get('DROPBOX_DOMAIN', '.dropboxapi.com'))
|
||||
|
||||
@@ -8,9 +8,11 @@ than being added to a project.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import functools
|
||||
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv # type: ignore
|
||||
@@ -19,17 +21,34 @@ _MYPY = False
|
||||
if _MYPY:
|
||||
import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression
|
||||
|
||||
class AnnotationType(object):
|
||||
# This is a base class for all annotation types.
|
||||
pass
|
||||
|
||||
if _MYPY:
|
||||
T = typing.TypeVar('T', bound=AnnotationType)
|
||||
U = typing.TypeVar('U')
|
||||
|
||||
class Struct(object):
|
||||
# This is a base class for all classes representing Stone structs.
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
# type: (typing.Type[T], typing.Text, typing.Callable[[T, U], U]) -> None
|
||||
pass
|
||||
|
||||
class Union(object):
|
||||
# TODO(kelkabany): Possible optimization is to remove _value if a
|
||||
# union is composed of only symbols.
|
||||
__slots__ = ['_tag', '_value']
|
||||
_tagmap = {} # type: typing.Dict[typing.Text, bv.Validator]
|
||||
_permissioned_tagmaps = set() # type: typing.Set[typing.Text]
|
||||
|
||||
def __init__(self, tag, value=None):
|
||||
# type: (typing.Text, typing.Optional[typing.Any]) -> None
|
||||
assert tag in self._tagmap, 'Invalid tag %r.' % tag
|
||||
validator = self._tagmap[tag]
|
||||
validator = None
|
||||
tagmap_names = ['_{}_tagmap'.format(map_name) for map_name in self._permissioned_tagmaps]
|
||||
for tagmap_name in ['_tagmap'] + tagmap_names:
|
||||
if tag in getattr(self, tagmap_name):
|
||||
validator = getattr(self, tagmap_name)[tag]
|
||||
assert validator is not None, 'Invalid tag %r.' % tag
|
||||
if isinstance(validator, bv.Void):
|
||||
assert value is None, 'Void type union member must have None value.'
|
||||
elif isinstance(validator, (bv.Struct, bv.Union)):
|
||||
@@ -54,10 +73,40 @@ class Union(object):
|
||||
def __hash__(self):
|
||||
return hash((self._tag, self._value))
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
# type: (typing.Type[T], typing.Text, typing.Callable[[T, U], U]) -> None
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _is_tag_present(cls, tag, caller_permissions):
|
||||
assert tag, 'tag value should not be None'
|
||||
|
||||
if tag in cls._tagmap:
|
||||
return True
|
||||
|
||||
for extra_permission in caller_permissions.permissions:
|
||||
tagmap_name = '_{}_tagmap'.format(extra_permission)
|
||||
if hasattr(cls, tagmap_name) and tag in getattr(cls, tagmap_name):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _get_val_data_type(cls, tag, caller_permissions):
|
||||
assert tag, 'tag value should not be None'
|
||||
|
||||
for extra_permission in caller_permissions.permissions:
|
||||
tagmap_name = '_{}_tagmap'.format(extra_permission)
|
||||
if hasattr(cls, tagmap_name) and tag in getattr(cls, tagmap_name):
|
||||
return getattr(cls, tagmap_name)[tag]
|
||||
|
||||
return cls._tagmap[tag]
|
||||
|
||||
class Route(object):
|
||||
|
||||
def __init__(self, name, deprecated, arg_type, result_type, error_type, attrs):
|
||||
def __init__(self, name, version, deprecated, arg_type, result_type, error_type, attrs):
|
||||
self.name = name
|
||||
self.version = version
|
||||
self.deprecated = deprecated
|
||||
self.arg_type = arg_type
|
||||
self.result_type = result_type
|
||||
@@ -66,10 +115,38 @@ class Route(object):
|
||||
self.attrs = attrs
|
||||
|
||||
def __repr__(self):
|
||||
return 'Route({!r}, {!r}, {!r}, {!r}, {!r}, {!r})'.format(
|
||||
return 'Route({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})'.format(
|
||||
self.name,
|
||||
self.version,
|
||||
self.deprecated,
|
||||
self.arg_type,
|
||||
self.result_type,
|
||||
self.error_type,
|
||||
self.attrs)
|
||||
|
||||
# helper functions used when constructing custom annotation processors
|
||||
|
||||
# put this here so that every other file doesn't need to import functools
|
||||
partially_apply = functools.partial
|
||||
|
||||
def make_struct_annotation_processor(annotation_type, processor):
|
||||
def g(field_path, struct):
|
||||
if struct is None:
|
||||
return struct
|
||||
struct._process_custom_annotations(annotation_type, field_path, processor)
|
||||
return struct
|
||||
return g
|
||||
|
||||
def make_list_annotation_processor(processor):
|
||||
def g(field_path, list_):
|
||||
if list_ is None:
|
||||
return list_
|
||||
return [processor('{}[{}]'.format(field_path, idx), x) for idx, x in enumerate(list_)]
|
||||
return g
|
||||
|
||||
def make_map_value_annotation_processor(processor):
|
||||
def g(field_path, map_):
|
||||
if map_ is None:
|
||||
return map_
|
||||
return {k: processor('{}[{}]'.format(field_path, repr(k)), v) for k, v in map_.items()}
|
||||
return g
|
||||
|
||||
@@ -14,6 +14,7 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import datetime
|
||||
import hashlib
|
||||
import math
|
||||
import numbers
|
||||
import re
|
||||
@@ -73,7 +74,10 @@ class ValidationError(Exception):
|
||||
def generic_type_name(v):
|
||||
"""Return a descriptive type name that isn't Python specific. For example,
|
||||
an int value will return 'integer' rather than 'int'."""
|
||||
if isinstance(v, numbers.Integral):
|
||||
if isinstance(v, bool):
|
||||
# Must come before any numbers checks since booleans are integers too
|
||||
return 'boolean'
|
||||
elif isinstance(v, numbers.Integral):
|
||||
# Must come before real numbers check since integrals are reals too
|
||||
return 'integer'
|
||||
elif isinstance(v, numbers.Real):
|
||||
@@ -422,13 +426,10 @@ class Map(Composite):
|
||||
def validate(self, val):
|
||||
if not isinstance(val, dict):
|
||||
raise ValidationError('%r is not a valid dict' % val)
|
||||
|
||||
#fix for python 2.6
|
||||
result = {}
|
||||
for key, value in val.items():
|
||||
result[self.key_validator.validate(key)] = self.value_validator.validate(value)
|
||||
|
||||
return result
|
||||
return {
|
||||
self.key_validator.validate(key):
|
||||
self.value_validator.validate(value) for key, value in val.items()
|
||||
}
|
||||
|
||||
|
||||
class Struct(Composite):
|
||||
@@ -458,6 +459,16 @@ class Struct(Composite):
|
||||
self.validate_fields_only(val)
|
||||
return val
|
||||
|
||||
def validate_with_permissions(self, val, caller_permissions):
|
||||
"""
|
||||
For a val to pass validation, val must be of the correct type and have
|
||||
all required permissioned fields present. Should only be called
|
||||
for callers with extra permissions.
|
||||
"""
|
||||
self.validate(val)
|
||||
self.validate_fields_only_with_permissions(val, caller_permissions)
|
||||
return val
|
||||
|
||||
def validate_fields_only(self, val):
|
||||
"""
|
||||
To pass field validation, no required field should be missing.
|
||||
@@ -468,11 +479,27 @@ class Struct(Composite):
|
||||
FIXME(kelkabany): Since the definition object does not maintain a list
|
||||
of which fields are required, all fields are scanned.
|
||||
"""
|
||||
for field_name, _ in self.definition._all_fields_:
|
||||
for field_name in self.definition._all_field_names_:
|
||||
if not hasattr(val, field_name):
|
||||
raise ValidationError("missing required field '%s'" %
|
||||
field_name)
|
||||
|
||||
def validate_fields_only_with_permissions(self, val, caller_permissions):
|
||||
"""
|
||||
To pass field validation, no required field should be missing.
|
||||
This method assumes that the contents of each field have already been
|
||||
validated on assignment, so it's merely a presence check.
|
||||
Should only be called for callers with extra permissions.
|
||||
"""
|
||||
self.validate_fields_only(val)
|
||||
|
||||
# check if type has been patched
|
||||
for extra_permission in caller_permissions.permissions:
|
||||
all_field_names = '_all_{}_field_names_'.format(extra_permission)
|
||||
for field_name in getattr(self.definition, all_field_names, set()):
|
||||
if not hasattr(val, field_name):
|
||||
raise ValidationError("missing required field '%s'" % field_name)
|
||||
|
||||
def validate_type_only(self, val):
|
||||
"""
|
||||
Use this when you only want to validate that the type of an object
|
||||
@@ -593,3 +620,54 @@ class Nullable(Validator):
|
||||
|
||||
def get_default(self):
|
||||
return None
|
||||
|
||||
class Redactor(object):
|
||||
def __init__(self, regex):
|
||||
"""
|
||||
Args:
|
||||
regex: What parts of the field to redact.
|
||||
"""
|
||||
self.regex = regex
|
||||
|
||||
@abstractmethod
|
||||
def apply(self, val):
|
||||
"""Redacts information from annotated field.
|
||||
Returns: A redacted version of the string provided.
|
||||
"""
|
||||
pass
|
||||
|
||||
def _get_matches(self, val):
|
||||
if not self.regex:
|
||||
return None
|
||||
try:
|
||||
return re.search(self.regex, val)
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
|
||||
class HashRedactor(Redactor):
|
||||
def apply(self, val):
|
||||
matches = self._get_matches(val)
|
||||
|
||||
val_to_hash = str(val) if isinstance(val, int) or isinstance(val, float) else val
|
||||
|
||||
try:
|
||||
# add string literal to ensure unicode
|
||||
hashed = hashlib.md5(val_to_hash.encode('utf-8')).hexdigest() + ''
|
||||
except [AttributeError, ValueError]:
|
||||
hashed = None
|
||||
|
||||
if matches:
|
||||
blotted = '***'.join(matches.groups())
|
||||
if hashed:
|
||||
return '{} ({})'.format(hashed, blotted)
|
||||
return blotted
|
||||
return hashed
|
||||
|
||||
|
||||
class BlotRedactor(Redactor):
|
||||
def apply(self, val):
|
||||
matches = self._get_matches(val)
|
||||
if matches:
|
||||
return '***'.join(matches.groups())
|
||||
return '********'
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
@@ -15,7 +16,7 @@ try:
|
||||
from . import (
|
||||
common,
|
||||
)
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
import common
|
||||
|
||||
class GroupManagementType(bb.Union):
|
||||
@@ -26,9 +27,12 @@ class GroupManagementType(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar user_managed: A group which is managed by selected users.
|
||||
:ivar company_managed: A group which is managed by team admins only.
|
||||
:ivar system_managed: A group which is managed automatically by Dropbox.
|
||||
:ivar team_common.GroupManagementType.user_managed: A group which is managed
|
||||
by selected users.
|
||||
:ivar team_common.GroupManagementType.company_managed: A group which is
|
||||
managed by team admins only.
|
||||
:ivar team_common.GroupManagementType.system_managed: A group which is
|
||||
managed automatically by Dropbox.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -73,19 +77,24 @@ class GroupManagementType(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GroupManagementType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GroupManagementType(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
GroupManagementType_validator = bv.Union(GroupManagementType)
|
||||
|
||||
class GroupSummary(object):
|
||||
class GroupSummary(bb.Struct):
|
||||
"""
|
||||
Information about a group.
|
||||
|
||||
:ivar group_external_id: External ID of group. This is an arbitrary ID that
|
||||
an admin can attach to a group.
|
||||
:ivar member_count: The number of members in the group.
|
||||
:ivar group_management_type: Who is allowed to manage the group.
|
||||
:ivar team_common.GroupSummary.group_external_id: External ID of group. This
|
||||
is an arbitrary ID that an admin can attach to a group.
|
||||
:ivar team_common.GroupSummary.member_count: The number of members in the
|
||||
group.
|
||||
:ivar team_common.GroupSummary.group_management_type: Who is allowed to
|
||||
manage the group.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -204,7 +213,7 @@ class GroupSummary(object):
|
||||
"""
|
||||
The number of members in the group.
|
||||
|
||||
:rtype: long
|
||||
:rtype: int
|
||||
"""
|
||||
if self._member_count_present:
|
||||
return self._member_count_value
|
||||
@@ -248,6 +257,9 @@ class GroupSummary(object):
|
||||
self._group_management_type_value = None
|
||||
self._group_management_type_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GroupSummary, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GroupSummary(group_name={!r}, group_id={!r}, group_management_type={!r}, group_external_id={!r}, member_count={!r})'.format(
|
||||
self._group_name_value,
|
||||
@@ -267,9 +279,11 @@ class GroupType(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team: A group to which team members are automatically added.
|
||||
Applicable to `team folders <https://www.dropbox.com/help/986>`_ only.
|
||||
:ivar user_managed: A group is created and managed by a user.
|
||||
:ivar team_common.GroupType.team: A group to which team members are
|
||||
automatically added. Applicable to `team folders
|
||||
<https://www.dropbox.com/help/986>`_ only.
|
||||
:ivar team_common.GroupType.user_managed: A group is created and managed by
|
||||
a user.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -304,17 +318,88 @@ class GroupType(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GroupType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GroupType(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
GroupType_validator = bv.Union(GroupType)
|
||||
|
||||
class TimeRange(object):
|
||||
class MemberSpaceLimitType(bb.Union):
|
||||
"""
|
||||
The type of the space limit imposed on a team member.
|
||||
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_common.MemberSpaceLimitType.off: The team member does not have
|
||||
imposed space limit.
|
||||
:ivar team_common.MemberSpaceLimitType.alert_only: The team member has soft
|
||||
imposed space limit - the limit is used for display and for
|
||||
notifications.
|
||||
:ivar team_common.MemberSpaceLimitType.stop_sync: The team member has hard
|
||||
imposed space limit - Dropbox file sync will stop after the limit is
|
||||
reached.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
off = None
|
||||
# Attribute is overwritten below the class definition
|
||||
alert_only = None
|
||||
# Attribute is overwritten below the class definition
|
||||
stop_sync = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_off(self):
|
||||
"""
|
||||
Check if the union tag is ``off``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'off'
|
||||
|
||||
def is_alert_only(self):
|
||||
"""
|
||||
Check if the union tag is ``alert_only``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'alert_only'
|
||||
|
||||
def is_stop_sync(self):
|
||||
"""
|
||||
Check if the union tag is ``stop_sync``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'stop_sync'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(MemberSpaceLimitType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'MemberSpaceLimitType(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
MemberSpaceLimitType_validator = bv.Union(MemberSpaceLimitType)
|
||||
|
||||
class TimeRange(bb.Struct):
|
||||
"""
|
||||
Time range.
|
||||
|
||||
:ivar start_time: Optional starting time (inclusive).
|
||||
:ivar end_time: Optional ending time (exclusive).
|
||||
:ivar team_common.TimeRange.start_time: Optional starting time (inclusive).
|
||||
:ivar team_common.TimeRange.end_time: Optional ending time (exclusive).
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -390,6 +475,9 @@ class TimeRange(object):
|
||||
self._end_time_value = None
|
||||
self._end_time_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TimeRange, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TimeRange(start_time={!r}, end_time={!r})'.format(
|
||||
self._start_time_value,
|
||||
@@ -452,6 +540,22 @@ GroupType.team = GroupType('team')
|
||||
GroupType.user_managed = GroupType('user_managed')
|
||||
GroupType.other = GroupType('other')
|
||||
|
||||
MemberSpaceLimitType._off_validator = bv.Void()
|
||||
MemberSpaceLimitType._alert_only_validator = bv.Void()
|
||||
MemberSpaceLimitType._stop_sync_validator = bv.Void()
|
||||
MemberSpaceLimitType._other_validator = bv.Void()
|
||||
MemberSpaceLimitType._tagmap = {
|
||||
'off': MemberSpaceLimitType._off_validator,
|
||||
'alert_only': MemberSpaceLimitType._alert_only_validator,
|
||||
'stop_sync': MemberSpaceLimitType._stop_sync_validator,
|
||||
'other': MemberSpaceLimitType._other_validator,
|
||||
}
|
||||
|
||||
MemberSpaceLimitType.off = MemberSpaceLimitType('off')
|
||||
MemberSpaceLimitType.alert_only = MemberSpaceLimitType('alert_only')
|
||||
MemberSpaceLimitType.stop_sync = MemberSpaceLimitType('stop_sync')
|
||||
MemberSpaceLimitType.other = MemberSpaceLimitType('other')
|
||||
|
||||
TimeRange._start_time_validator = bv.Nullable(common.DropboxTimestamp_validator)
|
||||
TimeRange._end_time_validator = bv.Nullable(common.DropboxTimestamp_validator)
|
||||
TimeRange._all_field_names_ = set([
|
||||
|
||||
@@ -1,25 +1,78 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
import stone_base as bb
|
||||
|
||||
class CameraUploadsPolicyState(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.CameraUploadsPolicyState.disabled: Background camera
|
||||
uploads are disabled.
|
||||
:ivar team_policies.CameraUploadsPolicyState.enabled: Background camera
|
||||
uploads are allowed.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
disabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
enabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_disabled(self):
|
||||
"""
|
||||
Check if the union tag is ``disabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'disabled'
|
||||
|
||||
def is_enabled(self):
|
||||
"""
|
||||
Check if the union tag is ``enabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'enabled'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(CameraUploadsPolicyState, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'CameraUploadsPolicyState(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
CameraUploadsPolicyState_validator = bv.Union(CameraUploadsPolicyState)
|
||||
|
||||
class EmmState(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar disabled: Emm token is disabled.
|
||||
:ivar optional: Emm token is optional.
|
||||
:ivar required: Emm token is required.
|
||||
:ivar team_policies.EmmState.disabled: Emm token is disabled.
|
||||
:ivar team_policies.EmmState.optional: Emm token is optional.
|
||||
:ivar team_policies.EmmState.required: Emm token is required.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -64,19 +117,64 @@ class EmmState(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(EmmState, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'EmmState(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
EmmState_validator = bv.Union(EmmState)
|
||||
|
||||
class GroupCreation(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.GroupCreation.admins_and_members: Team admins and
|
||||
members can create groups.
|
||||
:ivar team_policies.GroupCreation.admins_only: Only team admins can create
|
||||
groups.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
# Attribute is overwritten below the class definition
|
||||
admins_and_members = None
|
||||
# Attribute is overwritten below the class definition
|
||||
admins_only = None
|
||||
|
||||
def is_admins_and_members(self):
|
||||
"""
|
||||
Check if the union tag is ``admins_and_members``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'admins_and_members'
|
||||
|
||||
def is_admins_only(self):
|
||||
"""
|
||||
Check if the union tag is ``admins_only``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'admins_only'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GroupCreation, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GroupCreation(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
GroupCreation_validator = bv.Union(GroupCreation)
|
||||
|
||||
class OfficeAddInPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar disabled: Office Add-In is disabled.
|
||||
:ivar enabled: Office Add-In is enabled.
|
||||
:ivar team_policies.OfficeAddInPolicy.disabled: Office Add-In is disabled.
|
||||
:ivar team_policies.OfficeAddInPolicy.enabled: Office Add-In is enabled.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -111,20 +209,77 @@ class OfficeAddInPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(OfficeAddInPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'OfficeAddInPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
OfficeAddInPolicy_validator = bv.Union(OfficeAddInPolicy)
|
||||
|
||||
class PaperDefaultFolderPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.PaperDefaultFolderPolicy.everyone_in_team: Everyone in
|
||||
team will be the default option when creating a folder in Paper.
|
||||
:ivar team_policies.PaperDefaultFolderPolicy.invite_only: Invite only will
|
||||
be the default option when creating a folder in Paper.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
everyone_in_team = None
|
||||
# Attribute is overwritten below the class definition
|
||||
invite_only = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_everyone_in_team(self):
|
||||
"""
|
||||
Check if the union tag is ``everyone_in_team``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'everyone_in_team'
|
||||
|
||||
def is_invite_only(self):
|
||||
"""
|
||||
Check if the union tag is ``invite_only``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'invite_only'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PaperDefaultFolderPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PaperDefaultFolderPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PaperDefaultFolderPolicy_validator = bv.Union(PaperDefaultFolderPolicy)
|
||||
|
||||
class PaperDeploymentPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar full: All team members have access to Paper.
|
||||
:ivar partial: Only whitelisted team members can access Paper. To see which
|
||||
user is whitelisted, check 'is_paper_whitelisted' on 'account/info'.
|
||||
:ivar team_policies.PaperDeploymentPolicy.full: All team members have access
|
||||
to Paper.
|
||||
:ivar team_policies.PaperDeploymentPolicy.partial: Only whitelisted team
|
||||
members can access Paper. To see which user is whitelisted, check
|
||||
'is_paper_whitelisted' on 'account/info'.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -159,20 +314,75 @@ class PaperDeploymentPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PaperDeploymentPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PaperDeploymentPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PaperDeploymentPolicy_validator = bv.Union(PaperDeploymentPolicy)
|
||||
|
||||
class PaperDesktopPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.PaperDesktopPolicy.disabled: Do not allow team members
|
||||
to use Paper Desktop.
|
||||
:ivar team_policies.PaperDesktopPolicy.enabled: Allow team members to use
|
||||
Paper Desktop.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
disabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
enabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_disabled(self):
|
||||
"""
|
||||
Check if the union tag is ``disabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'disabled'
|
||||
|
||||
def is_enabled(self):
|
||||
"""
|
||||
Check if the union tag is ``enabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'enabled'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PaperDesktopPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PaperDesktopPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
PaperDesktopPolicy_validator = bv.Union(PaperDesktopPolicy)
|
||||
|
||||
class PaperEnabledPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar disabled: Paper is disabled.
|
||||
:ivar enabled: Paper is enabled.
|
||||
:ivar unspecified: Unspecified policy.
|
||||
:ivar team_policies.PaperEnabledPolicy.disabled: Paper is disabled.
|
||||
:ivar team_policies.PaperEnabledPolicy.enabled: Paper is enabled.
|
||||
:ivar team_policies.PaperEnabledPolicy.unspecified: Unspecified policy.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -217,6 +427,9 @@ class PaperEnabledPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PaperEnabledPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PaperEnabledPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -228,12 +441,12 @@ class PasswordStrengthPolicy(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar minimal_requirements: User passwords will adhere to the minimal
|
||||
password strength policy.
|
||||
:ivar moderate_password: User passwords will adhere to the moderate password
|
||||
strength policy.
|
||||
:ivar strong_password: User passwords will adhere to the very strong
|
||||
password strength policy.
|
||||
:ivar team_policies.PasswordStrengthPolicy.minimal_requirements: User
|
||||
passwords will adhere to the minimal password strength policy.
|
||||
:ivar team_policies.PasswordStrengthPolicy.moderate_password: User passwords
|
||||
will adhere to the moderate password strength policy.
|
||||
:ivar team_policies.PasswordStrengthPolicy.strong_password: User passwords
|
||||
will adhere to the very strong password strength policy.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -278,6 +491,9 @@ class PasswordStrengthPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(PasswordStrengthPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PasswordStrengthPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -289,9 +505,11 @@ class RolloutMethod(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar unlink_all: Unlink all.
|
||||
:ivar unlink_most_inactive: Unlink devices with the most inactivity.
|
||||
:ivar add_member_to_exceptions: Add member to Exceptions.
|
||||
:ivar team_policies.RolloutMethod.unlink_all: Unlink all.
|
||||
:ivar team_policies.RolloutMethod.unlink_most_inactive: Unlink devices with
|
||||
the most inactivity.
|
||||
:ivar team_policies.RolloutMethod.add_member_to_exceptions: Add member to
|
||||
Exceptions.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
@@ -326,6 +544,9 @@ class RolloutMethod(bb.Union):
|
||||
"""
|
||||
return self._tag == 'add_member_to_exceptions'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(RolloutMethod, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'RolloutMethod(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -339,10 +560,11 @@ class SharedFolderJoinPolicy(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar from_team_only: Team members can only join folders shared by
|
||||
teammates.
|
||||
:ivar from_anyone: Team members can join any shared folder, including those
|
||||
shared by users outside the team.
|
||||
:ivar team_policies.SharedFolderJoinPolicy.from_team_only: Team members can
|
||||
only join folders shared by teammates.
|
||||
:ivar team_policies.SharedFolderJoinPolicy.from_anyone: Team members can
|
||||
join any shared folder, including those shared by users outside the
|
||||
team.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -377,6 +599,9 @@ class SharedFolderJoinPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SharedFolderJoinPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SharedFolderJoinPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -390,9 +615,10 @@ class SharedFolderMemberPolicy(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team: Only a teammate can be a member of a folder shared by a team
|
||||
member.
|
||||
:ivar anyone: Anyone can be a member of a folder shared by a team member.
|
||||
:ivar team_policies.SharedFolderMemberPolicy.team: Only a teammate can be a
|
||||
member of a folder shared by a team member.
|
||||
:ivar team_policies.SharedFolderMemberPolicy.anyone: Anyone can be a member
|
||||
of a folder shared by a team member.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -427,6 +653,9 @@ class SharedFolderMemberPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SharedFolderMemberPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SharedFolderMemberPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -441,14 +670,15 @@ class SharedLinkCreatePolicy(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar default_public: By default, anyone can access newly created shared
|
||||
links. No login will be required to access the shared links unless
|
||||
overridden.
|
||||
:ivar default_team_only: By default, only members of the same team can
|
||||
access newly created shared links. Login will be required to access the
|
||||
shared links unless overridden.
|
||||
:ivar team_only: Only members of the same team can access all shared links.
|
||||
Login will be required to access all shared links.
|
||||
:ivar team_policies.SharedLinkCreatePolicy.default_public: By default,
|
||||
anyone can access newly created shared links. No login will be required
|
||||
to access the shared links unless overridden.
|
||||
:ivar team_policies.SharedLinkCreatePolicy.default_team_only: By default,
|
||||
only members of the same team can access newly created shared links.
|
||||
Login will be required to access the shared links unless overridden.
|
||||
:ivar team_policies.SharedLinkCreatePolicy.team_only: Only members of the
|
||||
same team can access all shared links. Login will be required to access
|
||||
all shared links.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -493,23 +723,232 @@ class SharedLinkCreatePolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SharedLinkCreatePolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SharedLinkCreatePolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
SharedLinkCreatePolicy_validator = bv.Union(SharedLinkCreatePolicy)
|
||||
|
||||
class ShowcaseDownloadPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.ShowcaseDownloadPolicy.disabled: Do not allow files to
|
||||
be downloaded from Showcases.
|
||||
:ivar team_policies.ShowcaseDownloadPolicy.enabled: Allow files to be
|
||||
downloaded from Showcases.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
disabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
enabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_disabled(self):
|
||||
"""
|
||||
Check if the union tag is ``disabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'disabled'
|
||||
|
||||
def is_enabled(self):
|
||||
"""
|
||||
Check if the union tag is ``enabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'enabled'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(ShowcaseDownloadPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'ShowcaseDownloadPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
ShowcaseDownloadPolicy_validator = bv.Union(ShowcaseDownloadPolicy)
|
||||
|
||||
class ShowcaseEnabledPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.ShowcaseEnabledPolicy.disabled: Showcase is disabled.
|
||||
:ivar team_policies.ShowcaseEnabledPolicy.enabled: Showcase is enabled.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
disabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
enabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_disabled(self):
|
||||
"""
|
||||
Check if the union tag is ``disabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'disabled'
|
||||
|
||||
def is_enabled(self):
|
||||
"""
|
||||
Check if the union tag is ``enabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'enabled'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(ShowcaseEnabledPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'ShowcaseEnabledPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
ShowcaseEnabledPolicy_validator = bv.Union(ShowcaseEnabledPolicy)
|
||||
|
||||
class ShowcaseExternalSharingPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.ShowcaseExternalSharingPolicy.disabled: Do not allow
|
||||
showcases to be shared with people not on the team.
|
||||
:ivar team_policies.ShowcaseExternalSharingPolicy.enabled: Allow showcases
|
||||
to be shared with people not on the team.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
disabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
enabled = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_disabled(self):
|
||||
"""
|
||||
Check if the union tag is ``disabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'disabled'
|
||||
|
||||
def is_enabled(self):
|
||||
"""
|
||||
Check if the union tag is ``enabled``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'enabled'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(ShowcaseExternalSharingPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'ShowcaseExternalSharingPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
ShowcaseExternalSharingPolicy_validator = bv.Union(ShowcaseExternalSharingPolicy)
|
||||
|
||||
class SmartSyncPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.SmartSyncPolicy.local: The specified content will be
|
||||
synced as local files by default.
|
||||
:ivar team_policies.SmartSyncPolicy.on_demand: The specified content will be
|
||||
synced as on-demand files by default.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
local = None
|
||||
# Attribute is overwritten below the class definition
|
||||
on_demand = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_local(self):
|
||||
"""
|
||||
Check if the union tag is ``local``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'local'
|
||||
|
||||
def is_on_demand(self):
|
||||
"""
|
||||
Check if the union tag is ``on_demand``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'on_demand'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SmartSyncPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SmartSyncPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
SmartSyncPolicy_validator = bv.Union(SmartSyncPolicy)
|
||||
|
||||
class SsoPolicy(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar disabled: Users will be able to sign in with their Dropbox
|
||||
credentials.
|
||||
:ivar optional: Users will be able to sign in with either their Dropbox or
|
||||
single sign-on credentials.
|
||||
:ivar required: Users will be required to sign in with their single sign-on
|
||||
credentials.
|
||||
:ivar team_policies.SsoPolicy.disabled: Users will be able to sign in with
|
||||
their Dropbox credentials.
|
||||
:ivar team_policies.SsoPolicy.optional: Users will be able to sign in with
|
||||
either their Dropbox or single sign-on credentials.
|
||||
:ivar team_policies.SsoPolicy.required: Users will be required to sign in
|
||||
with their single sign-on credentials.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -554,24 +993,28 @@ class SsoPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SsoPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SsoPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
SsoPolicy_validator = bv.Union(SsoPolicy)
|
||||
|
||||
class TeamMemberPolicies(object):
|
||||
class TeamMemberPolicies(bb.Struct):
|
||||
"""
|
||||
Policies governing team members.
|
||||
|
||||
:ivar sharing: Policies governing sharing.
|
||||
:ivar emm_state: This describes the Enterprise Mobility Management (EMM)
|
||||
state for this team. This information can be used to understand if an
|
||||
organization is integrating with a third-party EMM vendor to further
|
||||
manage and apply restrictions upon the team's Dropbox usage on mobile
|
||||
devices. This is a new feature and in the future we'll be adding more
|
||||
new fields and additional documentation.
|
||||
:ivar office_addin: The admin policy around the Dropbox Office Add-In for
|
||||
this team.
|
||||
:ivar team_policies.TeamMemberPolicies.sharing: Policies governing sharing.
|
||||
:ivar team_policies.TeamMemberPolicies.emm_state: This describes the
|
||||
Enterprise Mobility Management (EMM) state for this team. This
|
||||
information can be used to understand if an organization is integrating
|
||||
with a third-party EMM vendor to further manage and apply restrictions
|
||||
upon the team's Dropbox usage on mobile devices. This is a new feature
|
||||
and in the future we'll be adding more new fields and additional
|
||||
documentation.
|
||||
:ivar team_policies.TeamMemberPolicies.office_addin: The admin policy around
|
||||
the Dropbox Office Add-In for this team.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -676,6 +1119,9 @@ class TeamMemberPolicies(object):
|
||||
self._office_addin_value = None
|
||||
self._office_addin_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TeamMemberPolicies, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TeamMemberPolicies(sharing={!r}, emm_state={!r}, office_addin={!r})'.format(
|
||||
self._sharing_value,
|
||||
@@ -685,15 +1131,16 @@ class TeamMemberPolicies(object):
|
||||
|
||||
TeamMemberPolicies_validator = bv.Struct(TeamMemberPolicies)
|
||||
|
||||
class TeamSharingPolicies(object):
|
||||
class TeamSharingPolicies(bb.Struct):
|
||||
"""
|
||||
Policies governing sharing within and outside of the team.
|
||||
|
||||
:ivar shared_folder_member_policy: Who can join folders shared by team
|
||||
members.
|
||||
:ivar shared_folder_join_policy: Which shared folders team members can join.
|
||||
:ivar shared_link_create_policy: Who can view shared links owned by team
|
||||
members.
|
||||
:ivar team_policies.TeamSharingPolicies.shared_folder_member_policy: Who can
|
||||
join folders shared by team members.
|
||||
:ivar team_policies.TeamSharingPolicies.shared_folder_join_policy: Which
|
||||
shared folders team members can join.
|
||||
:ivar team_policies.TeamSharingPolicies.shared_link_create_policy: Who can
|
||||
view shared links owned by team members.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -793,6 +1240,9 @@ class TeamSharingPolicies(object):
|
||||
self._shared_link_create_policy_value = None
|
||||
self._shared_link_create_policy_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TeamSharingPolicies, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TeamSharingPolicies(shared_folder_member_policy={!r}, shared_folder_join_policy={!r}, shared_link_create_policy={!r})'.format(
|
||||
self._shared_folder_member_policy_value,
|
||||
@@ -808,8 +1258,10 @@ class TwoStepVerificationPolicy(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar require_tfa_enable: Enabled require two factor authorization.
|
||||
:ivar require_tfa_disable: Disabled require two factor authorization.
|
||||
:ivar team_policies.TwoStepVerificationPolicy.require_tfa_enable: Enabled
|
||||
require two factor authorization.
|
||||
:ivar team_policies.TwoStepVerificationPolicy.require_tfa_disable: Disabled
|
||||
require two factor authorization.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -844,11 +1296,79 @@ class TwoStepVerificationPolicy(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TwoStepVerificationPolicy, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TwoStepVerificationPolicy(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
TwoStepVerificationPolicy_validator = bv.Union(TwoStepVerificationPolicy)
|
||||
|
||||
class TwoStepVerificationState(bb.Union):
|
||||
"""
|
||||
This class acts as a tagged union. Only one of the ``is_*`` methods will
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar team_policies.TwoStepVerificationState.required: Enabled require two
|
||||
factor authorization.
|
||||
:ivar team_policies.TwoStepVerificationState.optional: Optional require two
|
||||
factor authorization.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
# Attribute is overwritten below the class definition
|
||||
required = None
|
||||
# Attribute is overwritten below the class definition
|
||||
optional = None
|
||||
# Attribute is overwritten below the class definition
|
||||
other = None
|
||||
|
||||
def is_required(self):
|
||||
"""
|
||||
Check if the union tag is ``required``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'required'
|
||||
|
||||
def is_optional(self):
|
||||
"""
|
||||
Check if the union tag is ``optional``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'optional'
|
||||
|
||||
def is_other(self):
|
||||
"""
|
||||
Check if the union tag is ``other``.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TwoStepVerificationState, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TwoStepVerificationState(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
TwoStepVerificationState_validator = bv.Union(TwoStepVerificationState)
|
||||
|
||||
CameraUploadsPolicyState._disabled_validator = bv.Void()
|
||||
CameraUploadsPolicyState._enabled_validator = bv.Void()
|
||||
CameraUploadsPolicyState._other_validator = bv.Void()
|
||||
CameraUploadsPolicyState._tagmap = {
|
||||
'disabled': CameraUploadsPolicyState._disabled_validator,
|
||||
'enabled': CameraUploadsPolicyState._enabled_validator,
|
||||
'other': CameraUploadsPolicyState._other_validator,
|
||||
}
|
||||
|
||||
CameraUploadsPolicyState.disabled = CameraUploadsPolicyState('disabled')
|
||||
CameraUploadsPolicyState.enabled = CameraUploadsPolicyState('enabled')
|
||||
CameraUploadsPolicyState.other = CameraUploadsPolicyState('other')
|
||||
|
||||
EmmState._disabled_validator = bv.Void()
|
||||
EmmState._optional_validator = bv.Void()
|
||||
EmmState._required_validator = bv.Void()
|
||||
@@ -865,6 +1385,16 @@ EmmState.optional = EmmState('optional')
|
||||
EmmState.required = EmmState('required')
|
||||
EmmState.other = EmmState('other')
|
||||
|
||||
GroupCreation._admins_and_members_validator = bv.Void()
|
||||
GroupCreation._admins_only_validator = bv.Void()
|
||||
GroupCreation._tagmap = {
|
||||
'admins_and_members': GroupCreation._admins_and_members_validator,
|
||||
'admins_only': GroupCreation._admins_only_validator,
|
||||
}
|
||||
|
||||
GroupCreation.admins_and_members = GroupCreation('admins_and_members')
|
||||
GroupCreation.admins_only = GroupCreation('admins_only')
|
||||
|
||||
OfficeAddInPolicy._disabled_validator = bv.Void()
|
||||
OfficeAddInPolicy._enabled_validator = bv.Void()
|
||||
OfficeAddInPolicy._other_validator = bv.Void()
|
||||
@@ -878,6 +1408,19 @@ OfficeAddInPolicy.disabled = OfficeAddInPolicy('disabled')
|
||||
OfficeAddInPolicy.enabled = OfficeAddInPolicy('enabled')
|
||||
OfficeAddInPolicy.other = OfficeAddInPolicy('other')
|
||||
|
||||
PaperDefaultFolderPolicy._everyone_in_team_validator = bv.Void()
|
||||
PaperDefaultFolderPolicy._invite_only_validator = bv.Void()
|
||||
PaperDefaultFolderPolicy._other_validator = bv.Void()
|
||||
PaperDefaultFolderPolicy._tagmap = {
|
||||
'everyone_in_team': PaperDefaultFolderPolicy._everyone_in_team_validator,
|
||||
'invite_only': PaperDefaultFolderPolicy._invite_only_validator,
|
||||
'other': PaperDefaultFolderPolicy._other_validator,
|
||||
}
|
||||
|
||||
PaperDefaultFolderPolicy.everyone_in_team = PaperDefaultFolderPolicy('everyone_in_team')
|
||||
PaperDefaultFolderPolicy.invite_only = PaperDefaultFolderPolicy('invite_only')
|
||||
PaperDefaultFolderPolicy.other = PaperDefaultFolderPolicy('other')
|
||||
|
||||
PaperDeploymentPolicy._full_validator = bv.Void()
|
||||
PaperDeploymentPolicy._partial_validator = bv.Void()
|
||||
PaperDeploymentPolicy._other_validator = bv.Void()
|
||||
@@ -891,6 +1434,19 @@ PaperDeploymentPolicy.full = PaperDeploymentPolicy('full')
|
||||
PaperDeploymentPolicy.partial = PaperDeploymentPolicy('partial')
|
||||
PaperDeploymentPolicy.other = PaperDeploymentPolicy('other')
|
||||
|
||||
PaperDesktopPolicy._disabled_validator = bv.Void()
|
||||
PaperDesktopPolicy._enabled_validator = bv.Void()
|
||||
PaperDesktopPolicy._other_validator = bv.Void()
|
||||
PaperDesktopPolicy._tagmap = {
|
||||
'disabled': PaperDesktopPolicy._disabled_validator,
|
||||
'enabled': PaperDesktopPolicy._enabled_validator,
|
||||
'other': PaperDesktopPolicy._other_validator,
|
||||
}
|
||||
|
||||
PaperDesktopPolicy.disabled = PaperDesktopPolicy('disabled')
|
||||
PaperDesktopPolicy.enabled = PaperDesktopPolicy('enabled')
|
||||
PaperDesktopPolicy.other = PaperDesktopPolicy('other')
|
||||
|
||||
PaperEnabledPolicy._disabled_validator = bv.Void()
|
||||
PaperEnabledPolicy._enabled_validator = bv.Void()
|
||||
PaperEnabledPolicy._unspecified_validator = bv.Void()
|
||||
@@ -978,6 +1534,58 @@ SharedLinkCreatePolicy.default_team_only = SharedLinkCreatePolicy('default_team_
|
||||
SharedLinkCreatePolicy.team_only = SharedLinkCreatePolicy('team_only')
|
||||
SharedLinkCreatePolicy.other = SharedLinkCreatePolicy('other')
|
||||
|
||||
ShowcaseDownloadPolicy._disabled_validator = bv.Void()
|
||||
ShowcaseDownloadPolicy._enabled_validator = bv.Void()
|
||||
ShowcaseDownloadPolicy._other_validator = bv.Void()
|
||||
ShowcaseDownloadPolicy._tagmap = {
|
||||
'disabled': ShowcaseDownloadPolicy._disabled_validator,
|
||||
'enabled': ShowcaseDownloadPolicy._enabled_validator,
|
||||
'other': ShowcaseDownloadPolicy._other_validator,
|
||||
}
|
||||
|
||||
ShowcaseDownloadPolicy.disabled = ShowcaseDownloadPolicy('disabled')
|
||||
ShowcaseDownloadPolicy.enabled = ShowcaseDownloadPolicy('enabled')
|
||||
ShowcaseDownloadPolicy.other = ShowcaseDownloadPolicy('other')
|
||||
|
||||
ShowcaseEnabledPolicy._disabled_validator = bv.Void()
|
||||
ShowcaseEnabledPolicy._enabled_validator = bv.Void()
|
||||
ShowcaseEnabledPolicy._other_validator = bv.Void()
|
||||
ShowcaseEnabledPolicy._tagmap = {
|
||||
'disabled': ShowcaseEnabledPolicy._disabled_validator,
|
||||
'enabled': ShowcaseEnabledPolicy._enabled_validator,
|
||||
'other': ShowcaseEnabledPolicy._other_validator,
|
||||
}
|
||||
|
||||
ShowcaseEnabledPolicy.disabled = ShowcaseEnabledPolicy('disabled')
|
||||
ShowcaseEnabledPolicy.enabled = ShowcaseEnabledPolicy('enabled')
|
||||
ShowcaseEnabledPolicy.other = ShowcaseEnabledPolicy('other')
|
||||
|
||||
ShowcaseExternalSharingPolicy._disabled_validator = bv.Void()
|
||||
ShowcaseExternalSharingPolicy._enabled_validator = bv.Void()
|
||||
ShowcaseExternalSharingPolicy._other_validator = bv.Void()
|
||||
ShowcaseExternalSharingPolicy._tagmap = {
|
||||
'disabled': ShowcaseExternalSharingPolicy._disabled_validator,
|
||||
'enabled': ShowcaseExternalSharingPolicy._enabled_validator,
|
||||
'other': ShowcaseExternalSharingPolicy._other_validator,
|
||||
}
|
||||
|
||||
ShowcaseExternalSharingPolicy.disabled = ShowcaseExternalSharingPolicy('disabled')
|
||||
ShowcaseExternalSharingPolicy.enabled = ShowcaseExternalSharingPolicy('enabled')
|
||||
ShowcaseExternalSharingPolicy.other = ShowcaseExternalSharingPolicy('other')
|
||||
|
||||
SmartSyncPolicy._local_validator = bv.Void()
|
||||
SmartSyncPolicy._on_demand_validator = bv.Void()
|
||||
SmartSyncPolicy._other_validator = bv.Void()
|
||||
SmartSyncPolicy._tagmap = {
|
||||
'local': SmartSyncPolicy._local_validator,
|
||||
'on_demand': SmartSyncPolicy._on_demand_validator,
|
||||
'other': SmartSyncPolicy._other_validator,
|
||||
}
|
||||
|
||||
SmartSyncPolicy.local = SmartSyncPolicy('local')
|
||||
SmartSyncPolicy.on_demand = SmartSyncPolicy('on_demand')
|
||||
SmartSyncPolicy.other = SmartSyncPolicy('other')
|
||||
|
||||
SsoPolicy._disabled_validator = bv.Void()
|
||||
SsoPolicy._optional_validator = bv.Void()
|
||||
SsoPolicy._required_validator = bv.Void()
|
||||
@@ -1035,6 +1643,19 @@ TwoStepVerificationPolicy.require_tfa_enable = TwoStepVerificationPolicy('requir
|
||||
TwoStepVerificationPolicy.require_tfa_disable = TwoStepVerificationPolicy('require_tfa_disable')
|
||||
TwoStepVerificationPolicy.other = TwoStepVerificationPolicy('other')
|
||||
|
||||
TwoStepVerificationState._required_validator = bv.Void()
|
||||
TwoStepVerificationState._optional_validator = bv.Void()
|
||||
TwoStepVerificationState._other_validator = bv.Void()
|
||||
TwoStepVerificationState._tagmap = {
|
||||
'required': TwoStepVerificationState._required_validator,
|
||||
'optional': TwoStepVerificationState._optional_validator,
|
||||
'other': TwoStepVerificationState._other_validator,
|
||||
}
|
||||
|
||||
TwoStepVerificationState.required = TwoStepVerificationState('required')
|
||||
TwoStepVerificationState.optional = TwoStepVerificationState('optional')
|
||||
TwoStepVerificationState.other = TwoStepVerificationState('other')
|
||||
|
||||
ROUTES = {
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
"""
|
||||
@@ -9,7 +10,7 @@ This namespace contains endpoints and data types for user management.
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
@@ -17,27 +18,32 @@ except (SystemError, ValueError):
|
||||
|
||||
try:
|
||||
from . import (
|
||||
common,
|
||||
team_common,
|
||||
team_policies,
|
||||
users_common,
|
||||
)
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
import common
|
||||
import team_common
|
||||
import team_policies
|
||||
import users_common
|
||||
|
||||
class Account(object):
|
||||
class Account(bb.Struct):
|
||||
"""
|
||||
The amount of detail revealed about an account depends on the user being
|
||||
queried and the user making the query.
|
||||
|
||||
:ivar account_id: The user's unique Dropbox ID.
|
||||
:ivar name: Details of a user's name.
|
||||
:ivar email: The user's e-mail address. Do not rely on this without checking
|
||||
the ``email_verified`` field. Even then, it's possible that the user has
|
||||
since lost access to their e-mail.
|
||||
:ivar email_verified: Whether the user has verified their e-mail address.
|
||||
:ivar profile_photo_url: URL for the photo representing the user, if one is
|
||||
set.
|
||||
:ivar disabled: Whether the user has been disabled.
|
||||
:ivar users.Account.account_id: The user's unique Dropbox ID.
|
||||
:ivar users.Account.name: Details of a user's name.
|
||||
:ivar users.Account.email: The user's e-mail address. Do not rely on this
|
||||
without checking the ``email_verified`` field. Even then, it's possible
|
||||
that the user has since lost access to their e-mail.
|
||||
:ivar users.Account.email_verified: Whether the user has verified their
|
||||
e-mail address.
|
||||
:ivar users.Account.profile_photo_url: URL for the photo representing the
|
||||
user, if one is set.
|
||||
:ivar users.Account.disabled: Whether the user has been disabled.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -232,6 +238,9 @@ class Account(object):
|
||||
self._disabled_value = None
|
||||
self._disabled_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(Account, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Account(account_id={!r}, name={!r}, email={!r}, email_verified={!r}, disabled={!r}, profile_photo_url={!r})'.format(
|
||||
self._account_id_value,
|
||||
@@ -248,11 +257,12 @@ class BasicAccount(Account):
|
||||
"""
|
||||
Basic information about any account.
|
||||
|
||||
:ivar is_teammate: Whether this user is a teammate of the current user. If
|
||||
this account is the current user's account, then this will be ``True``.
|
||||
:ivar team_member_id: The user's unique team member id. This field will only
|
||||
be present if the user is part of a team and ``is_teammate`` is
|
||||
``True``.
|
||||
:ivar users.BasicAccount.is_teammate: Whether this user is a teammate of the
|
||||
current user. If this account is the current user's account, then this
|
||||
will be ``True``.
|
||||
:ivar users.BasicAccount.team_member_id: The user's unique team member id.
|
||||
This field will only be present if the user is part of a team and
|
||||
``is_teammate`` is ``True``.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -339,6 +349,9 @@ class BasicAccount(Account):
|
||||
self._team_member_id_value = None
|
||||
self._team_member_id_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(BasicAccount, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'BasicAccount(account_id={!r}, name={!r}, email={!r}, email_verified={!r}, disabled={!r}, is_teammate={!r}, profile_photo_url={!r}, team_member_id={!r})'.format(
|
||||
self._account_id_value,
|
||||
@@ -357,21 +370,23 @@ class FullAccount(Account):
|
||||
"""
|
||||
Detailed information about the current user's account.
|
||||
|
||||
:ivar country: The user's two-letter country code, if available. Country
|
||||
codes are based on `ISO 3166-1
|
||||
:ivar users.FullAccount.country: The user's two-letter country code, if
|
||||
available. Country codes are based on `ISO 3166-1
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_.
|
||||
:ivar locale: The language that the user specified. Locale tags will be
|
||||
`IETF language tags <http://en.wikipedia.org/wiki/IETF_language_tag>`_.
|
||||
:ivar referral_link: The user's `referral link
|
||||
:ivar users.FullAccount.locale: The language that the user specified. Locale
|
||||
tags will be `IETF language tags
|
||||
<http://en.wikipedia.org/wiki/IETF_language_tag>`_.
|
||||
:ivar users.FullAccount.referral_link: The user's `referral link
|
||||
<https://www.dropbox.com/referrals>`_.
|
||||
:ivar team: If this account is a member of a team, information about that
|
||||
team.
|
||||
:ivar team_member_id: This account's unique team member id. This field will
|
||||
only be present if ``team`` is present.
|
||||
:ivar is_paired: Whether the user has a personal and work account. If the
|
||||
current account is personal, then ``team`` will always be None, but
|
||||
``is_paired`` will indicate if a work account is linked.
|
||||
:ivar account_type: What type of account this user has.
|
||||
:ivar users.FullAccount.team: If this account is a member of a team,
|
||||
information about that team.
|
||||
:ivar users.FullAccount.team_member_id: This account's unique team member
|
||||
id. This field will only be present if ``team`` is present.
|
||||
:ivar users.FullAccount.is_paired: Whether the user has a personal and work
|
||||
account. If the current account is personal, then ``team`` will always
|
||||
be None, but ``is_paired`` will indicate if a work account is linked.
|
||||
:ivar users.FullAccount.account_type: What type of account this user has.
|
||||
:ivar users.FullAccount.root_info: The root info for this account.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -389,6 +404,8 @@ class FullAccount(Account):
|
||||
'_is_paired_present',
|
||||
'_account_type_value',
|
||||
'_account_type_present',
|
||||
'_root_info_value',
|
||||
'_root_info_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
@@ -403,6 +420,7 @@ class FullAccount(Account):
|
||||
referral_link=None,
|
||||
is_paired=None,
|
||||
account_type=None,
|
||||
root_info=None,
|
||||
profile_photo_url=None,
|
||||
country=None,
|
||||
team=None,
|
||||
@@ -427,6 +445,8 @@ class FullAccount(Account):
|
||||
self._is_paired_present = False
|
||||
self._account_type_value = None
|
||||
self._account_type_present = False
|
||||
self._root_info_value = None
|
||||
self._root_info_present = False
|
||||
if country is not None:
|
||||
self.country = country
|
||||
if locale is not None:
|
||||
@@ -441,6 +461,8 @@ class FullAccount(Account):
|
||||
self.is_paired = is_paired
|
||||
if account_type is not None:
|
||||
self.account_type = account_type
|
||||
if root_info is not None:
|
||||
self.root_info = root_info
|
||||
|
||||
@property
|
||||
def country(self):
|
||||
@@ -599,7 +621,7 @@ class FullAccount(Account):
|
||||
"""
|
||||
What type of account this user has.
|
||||
|
||||
:rtype: users_common.AccountType_validator
|
||||
:rtype: users_common.AccountType
|
||||
"""
|
||||
if self._account_type_present:
|
||||
return self._account_type_value
|
||||
@@ -617,8 +639,34 @@ class FullAccount(Account):
|
||||
self._account_type_value = None
|
||||
self._account_type_present = False
|
||||
|
||||
@property
|
||||
def root_info(self):
|
||||
"""
|
||||
The root info for this account.
|
||||
|
||||
:rtype: common.RootInfo
|
||||
"""
|
||||
if self._root_info_present:
|
||||
return self._root_info_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'root_info'")
|
||||
|
||||
@root_info.setter
|
||||
def root_info(self, val):
|
||||
self._root_info_validator.validate_type_only(val)
|
||||
self._root_info_value = val
|
||||
self._root_info_present = True
|
||||
|
||||
@root_info.deleter
|
||||
def root_info(self):
|
||||
self._root_info_value = None
|
||||
self._root_info_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(FullAccount, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'FullAccount(account_id={!r}, name={!r}, email={!r}, email_verified={!r}, disabled={!r}, locale={!r}, referral_link={!r}, is_paired={!r}, account_type={!r}, profile_photo_url={!r}, country={!r}, team={!r}, team_member_id={!r})'.format(
|
||||
return 'FullAccount(account_id={!r}, name={!r}, email={!r}, email_verified={!r}, disabled={!r}, locale={!r}, referral_link={!r}, is_paired={!r}, account_type={!r}, root_info={!r}, profile_photo_url={!r}, country={!r}, team={!r}, team_member_id={!r})'.format(
|
||||
self._account_id_value,
|
||||
self._name_value,
|
||||
self._email_value,
|
||||
@@ -628,6 +676,7 @@ class FullAccount(Account):
|
||||
self._referral_link_value,
|
||||
self._is_paired_value,
|
||||
self._account_type_value,
|
||||
self._root_info_value,
|
||||
self._profile_photo_url_value,
|
||||
self._country_value,
|
||||
self._team_value,
|
||||
@@ -636,12 +685,12 @@ class FullAccount(Account):
|
||||
|
||||
FullAccount_validator = bv.Struct(FullAccount)
|
||||
|
||||
class Team(object):
|
||||
class Team(bb.Struct):
|
||||
"""
|
||||
Information about a team.
|
||||
|
||||
:ivar id: The team's unique ID.
|
||||
:ivar name: The name of the team.
|
||||
:ivar users.Team.id: The team's unique ID.
|
||||
:ivar users.Team.name: The name of the team.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -711,6 +760,9 @@ class Team(object):
|
||||
self._name_value = None
|
||||
self._name_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(Team, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Team(id={!r}, name={!r})'.format(
|
||||
self._id_value,
|
||||
@@ -723,9 +775,9 @@ class FullTeam(Team):
|
||||
"""
|
||||
Detailed information about a team.
|
||||
|
||||
:ivar sharing_policies: Team policies governing sharing.
|
||||
:ivar office_addin_policy: Team policy governing the use of the Office
|
||||
Add-In.
|
||||
:ivar users.FullTeam.sharing_policies: Team policies governing sharing.
|
||||
:ivar users.FullTeam.office_addin_policy: Team policy governing the use of
|
||||
the Office Add-In.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -758,7 +810,7 @@ class FullTeam(Team):
|
||||
"""
|
||||
Team policies governing sharing.
|
||||
|
||||
:rtype: team_policies.TeamSharingPolicies_validator
|
||||
:rtype: team_policies.TeamSharingPolicies
|
||||
"""
|
||||
if self._sharing_policies_present:
|
||||
return self._sharing_policies_value
|
||||
@@ -781,7 +833,7 @@ class FullTeam(Team):
|
||||
"""
|
||||
Team policy governing the use of the Office Add-In.
|
||||
|
||||
:rtype: team_policies.OfficeAddInPolicy_validator
|
||||
:rtype: team_policies.OfficeAddInPolicy
|
||||
"""
|
||||
if self._office_addin_policy_present:
|
||||
return self._office_addin_policy_value
|
||||
@@ -799,6 +851,9 @@ class FullTeam(Team):
|
||||
self._office_addin_policy_value = None
|
||||
self._office_addin_policy_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(FullTeam, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'FullTeam(id={!r}, name={!r}, sharing_policies={!r}, office_addin_policy={!r})'.format(
|
||||
self._id_value,
|
||||
@@ -809,9 +864,9 @@ class FullTeam(Team):
|
||||
|
||||
FullTeam_validator = bv.Struct(FullTeam)
|
||||
|
||||
class GetAccountArg(object):
|
||||
class GetAccountArg(bb.Struct):
|
||||
"""
|
||||
:ivar account_id: A user's account identifier.
|
||||
:ivar users.GetAccountArg.account_id: A user's account identifier.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -851,6 +906,9 @@ class GetAccountArg(object):
|
||||
self._account_id_value = None
|
||||
self._account_id_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GetAccountArg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GetAccountArg(account_id={!r})'.format(
|
||||
self._account_id_value,
|
||||
@@ -858,10 +916,10 @@ class GetAccountArg(object):
|
||||
|
||||
GetAccountArg_validator = bv.Struct(GetAccountArg)
|
||||
|
||||
class GetAccountBatchArg(object):
|
||||
class GetAccountBatchArg(bb.Struct):
|
||||
"""
|
||||
:ivar account_ids: List of user account identifiers. Should not contain any
|
||||
duplicate account IDs.
|
||||
:ivar users.GetAccountBatchArg.account_ids: List of user account
|
||||
identifiers. Should not contain any duplicate account IDs.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -902,6 +960,9 @@ class GetAccountBatchArg(object):
|
||||
self._account_ids_value = None
|
||||
self._account_ids_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GetAccountBatchArg, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GetAccountBatchArg(account_ids={!r})'.format(
|
||||
self._account_ids_value,
|
||||
@@ -915,8 +976,9 @@ class GetAccountBatchError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar str no_account: The value is an account ID specified in
|
||||
:field:`GetAccountBatchArg.account_ids` that does not exist.
|
||||
:ivar str users.GetAccountBatchError.no_account: The value is an account ID
|
||||
specified in :field:`GetAccountBatchArg.account_ids` that does not
|
||||
exist.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -963,6 +1025,9 @@ class GetAccountBatchError(bb.Union):
|
||||
raise AttributeError("tag 'no_account' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GetAccountBatchError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GetAccountBatchError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
@@ -974,7 +1039,8 @@ class GetAccountError(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar no_account: The specified ``GetAccountArg.account_id`` does not exist.
|
||||
:ivar users.GetAccountError.no_account: The specified
|
||||
``GetAccountArg.account_id`` does not exist.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -999,14 +1065,18 @@ class GetAccountError(bb.Union):
|
||||
"""
|
||||
return self._tag == 'other'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(GetAccountError, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'GetAccountError(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
GetAccountError_validator = bv.Union(GetAccountError)
|
||||
|
||||
class IndividualSpaceAllocation(object):
|
||||
class IndividualSpaceAllocation(bb.Struct):
|
||||
"""
|
||||
:ivar allocated: The total space allocated to the user's account (bytes).
|
||||
:ivar users.IndividualSpaceAllocation.allocated: The total space allocated
|
||||
to the user's account (bytes).
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -1028,7 +1098,7 @@ class IndividualSpaceAllocation(object):
|
||||
"""
|
||||
The total space allocated to the user's account (bytes).
|
||||
|
||||
:rtype: long
|
||||
:rtype: int
|
||||
"""
|
||||
if self._allocated_present:
|
||||
return self._allocated_value
|
||||
@@ -1046,6 +1116,9 @@ class IndividualSpaceAllocation(object):
|
||||
self._allocated_value = None
|
||||
self._allocated_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(IndividualSpaceAllocation, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'IndividualSpaceAllocation(allocated={!r})'.format(
|
||||
self._allocated_value,
|
||||
@@ -1053,19 +1126,19 @@ class IndividualSpaceAllocation(object):
|
||||
|
||||
IndividualSpaceAllocation_validator = bv.Struct(IndividualSpaceAllocation)
|
||||
|
||||
class Name(object):
|
||||
class Name(bb.Struct):
|
||||
"""
|
||||
Representations for a person's name to assist with internationalization.
|
||||
|
||||
:ivar given_name: Also known as a first name.
|
||||
:ivar surname: Also known as a last name or family name.
|
||||
:ivar familiar_name: Locale-dependent name. In the US, a person's familiar
|
||||
name is their ``given_name``, but elsewhere, it could be any combination
|
||||
of a person's ``given_name`` and ``surname``.
|
||||
:ivar display_name: A name that can be used directly to represent the name
|
||||
of a user's Dropbox account.
|
||||
:ivar abbreviated_name: An abbreviated form of the person's name. Their
|
||||
initials in most locales.
|
||||
:ivar users.Name.given_name: Also known as a first name.
|
||||
:ivar users.Name.surname: Also known as a last name or family name.
|
||||
:ivar users.Name.familiar_name: Locale-dependent name. In the US, a person's
|
||||
familiar name is their ``given_name``, but elsewhere, it could be any
|
||||
combination of a person's ``given_name`` and ``surname``.
|
||||
:ivar users.Name.display_name: A name that can be used directly to represent
|
||||
the name of a user's Dropbox account.
|
||||
:ivar users.Name.abbreviated_name: An abbreviated form of the person's name.
|
||||
Their initials in most locales.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -1229,6 +1302,9 @@ class Name(object):
|
||||
self._abbreviated_name_value = None
|
||||
self._abbreviated_name_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(Name, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Name(given_name={!r}, surname={!r}, familiar_name={!r}, display_name={!r}, abbreviated_name={!r})'.format(
|
||||
self._given_name_value,
|
||||
@@ -1248,10 +1324,10 @@ class SpaceAllocation(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar IndividualSpaceAllocation individual: The user's space allocation
|
||||
applies only to their individual account.
|
||||
:ivar TeamSpaceAllocation team: The user shares space with other members of
|
||||
their team.
|
||||
:ivar IndividualSpaceAllocation SpaceAllocation.individual: The user's space
|
||||
allocation applies only to their individual account.
|
||||
:ivar TeamSpaceAllocation SpaceAllocation.team: The user shares space with
|
||||
other members of their team.
|
||||
"""
|
||||
|
||||
_catch_all = 'other'
|
||||
@@ -1328,17 +1404,20 @@ class SpaceAllocation(bb.Union):
|
||||
raise AttributeError("tag 'team' not set")
|
||||
return self._value
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SpaceAllocation, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SpaceAllocation(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
SpaceAllocation_validator = bv.Union(SpaceAllocation)
|
||||
|
||||
class SpaceUsage(object):
|
||||
class SpaceUsage(bb.Struct):
|
||||
"""
|
||||
Information about a user's space usage and quota.
|
||||
|
||||
:ivar used: The user's total space usage (bytes).
|
||||
:ivar allocation: The user's space allocation.
|
||||
:ivar users.SpaceUsage.used: The user's total space usage (bytes).
|
||||
:ivar users.SpaceUsage.allocation: The user's space allocation.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -1367,7 +1446,7 @@ class SpaceUsage(object):
|
||||
"""
|
||||
The user's total space usage (bytes).
|
||||
|
||||
:rtype: long
|
||||
:rtype: int
|
||||
"""
|
||||
if self._used_present:
|
||||
return self._used_value
|
||||
@@ -1408,6 +1487,9 @@ class SpaceUsage(object):
|
||||
self._allocation_value = None
|
||||
self._allocation_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(SpaceUsage, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'SpaceUsage(used={!r}, allocation={!r})'.format(
|
||||
self._used_value,
|
||||
@@ -1416,10 +1498,18 @@ class SpaceUsage(object):
|
||||
|
||||
SpaceUsage_validator = bv.Struct(SpaceUsage)
|
||||
|
||||
class TeamSpaceAllocation(object):
|
||||
class TeamSpaceAllocation(bb.Struct):
|
||||
"""
|
||||
:ivar used: The total space currently used by the user's team (bytes).
|
||||
:ivar allocated: The total space allocated to the user's team (bytes).
|
||||
:ivar users.TeamSpaceAllocation.used: The total space currently used by the
|
||||
user's team (bytes).
|
||||
:ivar users.TeamSpaceAllocation.allocated: The total space allocated to the
|
||||
user's team (bytes).
|
||||
:ivar users.TeamSpaceAllocation.user_within_team_space_allocated: The total
|
||||
space allocated to the user within its team allocated space (0 means
|
||||
that no restriction is imposed on the user's quota within its team).
|
||||
:ivar users.TeamSpaceAllocation.user_within_team_space_limit_type: The type
|
||||
of the space limit imposed on the team member (off, alert_only,
|
||||
stop_sync).
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
@@ -1427,28 +1517,42 @@ class TeamSpaceAllocation(object):
|
||||
'_used_present',
|
||||
'_allocated_value',
|
||||
'_allocated_present',
|
||||
'_user_within_team_space_allocated_value',
|
||||
'_user_within_team_space_allocated_present',
|
||||
'_user_within_team_space_limit_type_value',
|
||||
'_user_within_team_space_limit_type_present',
|
||||
]
|
||||
|
||||
_has_required_fields = True
|
||||
|
||||
def __init__(self,
|
||||
used=None,
|
||||
allocated=None):
|
||||
allocated=None,
|
||||
user_within_team_space_allocated=None,
|
||||
user_within_team_space_limit_type=None):
|
||||
self._used_value = None
|
||||
self._used_present = False
|
||||
self._allocated_value = None
|
||||
self._allocated_present = False
|
||||
self._user_within_team_space_allocated_value = None
|
||||
self._user_within_team_space_allocated_present = False
|
||||
self._user_within_team_space_limit_type_value = None
|
||||
self._user_within_team_space_limit_type_present = False
|
||||
if used is not None:
|
||||
self.used = used
|
||||
if allocated is not None:
|
||||
self.allocated = allocated
|
||||
if user_within_team_space_allocated is not None:
|
||||
self.user_within_team_space_allocated = user_within_team_space_allocated
|
||||
if user_within_team_space_limit_type is not None:
|
||||
self.user_within_team_space_limit_type = user_within_team_space_limit_type
|
||||
|
||||
@property
|
||||
def used(self):
|
||||
"""
|
||||
The total space currently used by the user's team (bytes).
|
||||
|
||||
:rtype: long
|
||||
:rtype: int
|
||||
"""
|
||||
if self._used_present:
|
||||
return self._used_value
|
||||
@@ -1471,7 +1575,7 @@ class TeamSpaceAllocation(object):
|
||||
"""
|
||||
The total space allocated to the user's team (bytes).
|
||||
|
||||
:rtype: long
|
||||
:rtype: int
|
||||
"""
|
||||
if self._allocated_present:
|
||||
return self._allocated_value
|
||||
@@ -1489,10 +1593,64 @@ class TeamSpaceAllocation(object):
|
||||
self._allocated_value = None
|
||||
self._allocated_present = False
|
||||
|
||||
@property
|
||||
def user_within_team_space_allocated(self):
|
||||
"""
|
||||
The total space allocated to the user within its team allocated space (0
|
||||
means that no restriction is imposed on the user's quota within its
|
||||
team).
|
||||
|
||||
:rtype: int
|
||||
"""
|
||||
if self._user_within_team_space_allocated_present:
|
||||
return self._user_within_team_space_allocated_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'user_within_team_space_allocated'")
|
||||
|
||||
@user_within_team_space_allocated.setter
|
||||
def user_within_team_space_allocated(self, val):
|
||||
val = self._user_within_team_space_allocated_validator.validate(val)
|
||||
self._user_within_team_space_allocated_value = val
|
||||
self._user_within_team_space_allocated_present = True
|
||||
|
||||
@user_within_team_space_allocated.deleter
|
||||
def user_within_team_space_allocated(self):
|
||||
self._user_within_team_space_allocated_value = None
|
||||
self._user_within_team_space_allocated_present = False
|
||||
|
||||
@property
|
||||
def user_within_team_space_limit_type(self):
|
||||
"""
|
||||
The type of the space limit imposed on the team member (off, alert_only,
|
||||
stop_sync).
|
||||
|
||||
:rtype: team_common.MemberSpaceLimitType
|
||||
"""
|
||||
if self._user_within_team_space_limit_type_present:
|
||||
return self._user_within_team_space_limit_type_value
|
||||
else:
|
||||
raise AttributeError("missing required field 'user_within_team_space_limit_type'")
|
||||
|
||||
@user_within_team_space_limit_type.setter
|
||||
def user_within_team_space_limit_type(self, val):
|
||||
self._user_within_team_space_limit_type_validator.validate_type_only(val)
|
||||
self._user_within_team_space_limit_type_value = val
|
||||
self._user_within_team_space_limit_type_present = True
|
||||
|
||||
@user_within_team_space_limit_type.deleter
|
||||
def user_within_team_space_limit_type(self):
|
||||
self._user_within_team_space_limit_type_value = None
|
||||
self._user_within_team_space_limit_type_present = False
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(TeamSpaceAllocation, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'TeamSpaceAllocation(used={!r}, allocated={!r})'.format(
|
||||
return 'TeamSpaceAllocation(used={!r}, allocated={!r}, user_within_team_space_allocated={!r}, user_within_team_space_limit_type={!r})'.format(
|
||||
self._used_value,
|
||||
self._allocated_value,
|
||||
self._user_within_team_space_allocated_value,
|
||||
self._user_within_team_space_limit_type_value,
|
||||
)
|
||||
|
||||
TeamSpaceAllocation_validator = bv.Struct(TeamSpaceAllocation)
|
||||
@@ -1539,6 +1697,7 @@ FullAccount._team_validator = bv.Nullable(FullTeam_validator)
|
||||
FullAccount._team_member_id_validator = bv.Nullable(bv.String())
|
||||
FullAccount._is_paired_validator = bv.Boolean()
|
||||
FullAccount._account_type_validator = users_common.AccountType_validator
|
||||
FullAccount._root_info_validator = common.RootInfo_validator
|
||||
FullAccount._all_field_names_ = Account._all_field_names_.union(set([
|
||||
'country',
|
||||
'locale',
|
||||
@@ -1547,6 +1706,7 @@ FullAccount._all_field_names_ = Account._all_field_names_.union(set([
|
||||
'team_member_id',
|
||||
'is_paired',
|
||||
'account_type',
|
||||
'root_info',
|
||||
]))
|
||||
FullAccount._all_fields_ = Account._all_fields_ + [
|
||||
('country', FullAccount._country_validator),
|
||||
@@ -1556,6 +1716,7 @@ FullAccount._all_fields_ = Account._all_fields_ + [
|
||||
('team_member_id', FullAccount._team_member_id_validator),
|
||||
('is_paired', FullAccount._is_paired_validator),
|
||||
('account_type', FullAccount._account_type_validator),
|
||||
('root_info', FullAccount._root_info_validator),
|
||||
]
|
||||
|
||||
Team._id_validator = bv.String()
|
||||
@@ -1655,17 +1816,24 @@ SpaceUsage._all_fields_ = [
|
||||
|
||||
TeamSpaceAllocation._used_validator = bv.UInt64()
|
||||
TeamSpaceAllocation._allocated_validator = bv.UInt64()
|
||||
TeamSpaceAllocation._user_within_team_space_allocated_validator = bv.UInt64()
|
||||
TeamSpaceAllocation._user_within_team_space_limit_type_validator = team_common.MemberSpaceLimitType_validator
|
||||
TeamSpaceAllocation._all_field_names_ = set([
|
||||
'used',
|
||||
'allocated',
|
||||
'user_within_team_space_allocated',
|
||||
'user_within_team_space_limit_type',
|
||||
])
|
||||
TeamSpaceAllocation._all_fields_ = [
|
||||
('used', TeamSpaceAllocation._used_validator),
|
||||
('allocated', TeamSpaceAllocation._allocated_validator),
|
||||
('user_within_team_space_allocated', TeamSpaceAllocation._user_within_team_space_allocated_validator),
|
||||
('user_within_team_space_limit_type', TeamSpaceAllocation._user_within_team_space_limit_type_validator),
|
||||
]
|
||||
|
||||
get_account = bb.Route(
|
||||
'get_account',
|
||||
1,
|
||||
False,
|
||||
GetAccountArg_validator,
|
||||
BasicAccount_validator,
|
||||
@@ -1675,6 +1843,7 @@ get_account = bb.Route(
|
||||
)
|
||||
get_account_batch = bb.Route(
|
||||
'get_account_batch',
|
||||
1,
|
||||
False,
|
||||
GetAccountBatchArg_validator,
|
||||
GetAccountBatchResult_validator,
|
||||
@@ -1684,6 +1853,7 @@ get_account_batch = bb.Route(
|
||||
)
|
||||
get_current_account = bb.Route(
|
||||
'get_current_account',
|
||||
1,
|
||||
False,
|
||||
bv.Void(),
|
||||
FullAccount_validator,
|
||||
@@ -1693,6 +1863,7 @@ get_current_account = bb.Route(
|
||||
)
|
||||
get_space_usage = bb.Route(
|
||||
'get_space_usage',
|
||||
1,
|
||||
False,
|
||||
bv.Void(),
|
||||
SpaceUsage_validator,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Auto-generated by Stone, do not modify.
|
||||
# @generated
|
||||
# flake8: noqa
|
||||
# pylint: skip-file
|
||||
"""
|
||||
@@ -9,7 +10,7 @@ This namespace contains common data types used within the users namespace.
|
||||
try:
|
||||
from . import stone_validators as bv
|
||||
from . import stone_base as bb
|
||||
except (SystemError, ValueError):
|
||||
except (ImportError, SystemError, ValueError):
|
||||
# Catch errors raised when importing a relative module when not in a package.
|
||||
# This makes testing this file directly (outside of a package) easier.
|
||||
import stone_validators as bv
|
||||
@@ -23,9 +24,9 @@ class AccountType(bb.Union):
|
||||
return true. To get the associated value of a tag (if one exists), use the
|
||||
corresponding ``get_*`` method.
|
||||
|
||||
:ivar basic: The basic account type.
|
||||
:ivar pro: The Dropbox Pro account type.
|
||||
:ivar business: The Dropbox Business account type.
|
||||
:ivar users_common.AccountType.basic: The basic account type.
|
||||
:ivar users_common.AccountType.pro: The Dropbox Pro account type.
|
||||
:ivar users_common.AccountType.business: The Dropbox Business account type.
|
||||
"""
|
||||
|
||||
_catch_all = None
|
||||
@@ -60,6 +61,9 @@ class AccountType(bb.Union):
|
||||
"""
|
||||
return self._tag == 'business'
|
||||
|
||||
def _process_custom_annotations(self, annotation_type, field_path, processor):
|
||||
super(AccountType, self)._process_custom_annotations(annotation_type, field_path, processor)
|
||||
|
||||
def __repr__(self):
|
||||
return 'AccountType(%r, %r)' % (self._tag, self._value)
|
||||
|
||||
|
||||
@@ -1,31 +1,31 @@
|
||||
import utils as utils
|
||||
from . import utils as utils
|
||||
|
||||
|
||||
class ZipExtractor:
|
||||
|
||||
def extract(self,zipFile,outLoc,progressBar):
|
||||
|
||||
def extract(self, zipFile, outLoc, progressBar):
|
||||
utils.log("extracting zip archive")
|
||||
|
||||
result = True #result is true unless we fail
|
||||
|
||||
#update the progress bar
|
||||
progressBar.updateProgress(0,utils.getString(30100))
|
||||
|
||||
#list the files
|
||||
|
||||
result = True # result is true unless we fail
|
||||
|
||||
# update the progress bar
|
||||
progressBar.updateProgress(0, utils.getString(30100))
|
||||
|
||||
# list the files
|
||||
fileCount = float(len(zipFile.listFiles()))
|
||||
currentFile = 0
|
||||
|
||||
|
||||
try:
|
||||
for aFile in zipFile.listFiles():
|
||||
#update the progress bar
|
||||
# update the progress bar
|
||||
currentFile += 1
|
||||
progressBar.updateProgress(int((currentFile/fileCount) * 100),utils.getString(30100))
|
||||
|
||||
#extract the file
|
||||
zipFile.extract(aFile,outLoc)
|
||||
|
||||
except Exception as e:
|
||||
progressBar.updateProgress(int((currentFile / fileCount) * 100), utils.getString(30100))
|
||||
|
||||
# extract the file
|
||||
zipFile.extract(aFile, outLoc)
|
||||
|
||||
except Exception:
|
||||
utils.log("Error extracting file")
|
||||
result = False
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -1,99 +1,72 @@
|
||||
import utils as utils
|
||||
import json
|
||||
import xbmc
|
||||
import xbmcvfs
|
||||
from . import utils as utils
|
||||
from xml.dom import minidom
|
||||
from xml.parsers.expat import ExpatError
|
||||
import json
|
||||
import xbmc,xbmcvfs
|
||||
|
||||
|
||||
class GuiSettingsManager:
|
||||
settingsFile = None
|
||||
doc = None
|
||||
settings_allowed = list()
|
||||
found_settings = list()
|
||||
|
||||
def __init__(self,settingsFile):
|
||||
self._readFile(xbmc.translatePath(settingsFile))
|
||||
|
||||
|
||||
def __init__(self):
|
||||
# first make a copy of the file
|
||||
xbmcvfs.copy(xbmc.translatePath('special://home/userdata/guisettings.xml'), xbmc.translatePath("special://home/userdata/guisettings.xml.restored"))
|
||||
|
||||
# read in the copy
|
||||
self._readFile(xbmc.translatePath('special://home/userdata/guisettings.xml.restored'))
|
||||
|
||||
def run(self):
|
||||
#get a list of all the settings we can manipulate via json
|
||||
# get a list of all the settings we can manipulate via json
|
||||
json_response = json.loads(xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.GetSettings","params":{"level":"advanced"}}'))
|
||||
|
||||
|
||||
settings = json_response['result']['settings']
|
||||
|
||||
currentSettings = {}
|
||||
|
||||
for aSetting in settings:
|
||||
self.settings_allowed.append(aSetting['id'])
|
||||
|
||||
#parse the existing xml file and get all the settings
|
||||
root_nodes = self.__parseNodes(self.doc.documentElement)
|
||||
|
||||
for aNode in root_nodes:
|
||||
secondary_list = self.__parseNodes(self.doc.getElementsByTagName(aNode.name)[0])
|
||||
|
||||
for secondNode in secondary_list:
|
||||
#if the node does not have children and is not default
|
||||
if(not secondNode.hasChildren and not secondNode.isDefault):
|
||||
|
||||
if(secondNode.json_name() in self.settings_allowed):
|
||||
self.found_settings.append(secondNode)
|
||||
|
||||
#go through all the found settings and update them
|
||||
for aSetting in self.found_settings:
|
||||
utils.log("updating: " + aSetting.json_name() + ", value: " + aSetting.value)
|
||||
|
||||
#check for boolean and numeric values
|
||||
if(aSetting.value.isdigit() or (aSetting.value == 'true' or aSetting.value == 'false')):
|
||||
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.SetSettingValue","params":{"setting":"' + aSetting.json_name() + '","value":' + aSetting.value + '}}')
|
||||
else:
|
||||
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.SetSettingValue","params":{"setting":"' + aSetting.json_name() + '","value":"' + utils.encode(aSetting.value) + '"}}')
|
||||
|
||||
#make a copy of the guisettings file to make user based restores easier
|
||||
xbmcvfs.copy(self.settingsFile, xbmc.translatePath("special://home/userdata/guisettings.xml.restored"))
|
||||
|
||||
def __parseNodes(self,nodeList):
|
||||
result = []
|
||||
if('value' in aSetting):
|
||||
currentSettings[aSetting['id']] = aSetting['value']
|
||||
|
||||
for node in nodeList.childNodes:
|
||||
if(node.nodeType == self.doc.ELEMENT_NODE):
|
||||
aSetting = SettingNode(node.nodeName)
|
||||
# parse the existing xml file and get all the settings we need to restore
|
||||
restoreSettings = self.__parseNodes(self.doc.getElementsByTagName('setting'))
|
||||
|
||||
#detect if there are any element nodes
|
||||
if(len(node.childNodes) > 0):
|
||||
for child_node in node.childNodes:
|
||||
if(child_node.nodeType == self.doc.ELEMENT_NODE):
|
||||
aSetting.hasChildren = True
|
||||
# get a list where the restore setting value != the current value
|
||||
updateSettings = {k: v for k, v in list(restoreSettings.items()) if (k in currentSettings and currentSettings[k] != v)}
|
||||
|
||||
# go through all the found settings and update them
|
||||
jsonObj = {"jsonrpc": "2.0", "id": 1, "method": "Settings.SetSettingValue", "params": {"setting": "", "value": ""}}
|
||||
for anId, aValue in list(updateSettings.items()):
|
||||
utils.log("updating: " + anId + ", value: " + str(aValue))
|
||||
|
||||
jsonObj['params']['setting'] = anId
|
||||
jsonObj['params']['value'] = aValue
|
||||
|
||||
xbmc.executeJSONRPC(json.dumps(jsonObj))
|
||||
|
||||
def __parseNodes(self, nodeList):
|
||||
result = {}
|
||||
|
||||
for node in nodeList:
|
||||
nodeValue = ''
|
||||
if(node.firstChild is not None):
|
||||
nodeValue = node.firstChild.nodeValue
|
||||
|
||||
# check for numbers and booleans
|
||||
if(nodeValue.isdigit()):
|
||||
nodeValue = int(nodeValue)
|
||||
elif(nodeValue == 'true'):
|
||||
nodeValue = True
|
||||
elif(nodeValue == 'false'):
|
||||
nodeValue = False
|
||||
|
||||
result[node.getAttribute('id')] = nodeValue
|
||||
|
||||
if(not aSetting.hasChildren and len(node.childNodes) > 0):
|
||||
aSetting.value = node.firstChild.nodeValue
|
||||
|
||||
if('default' not in node.attributes.keys()):
|
||||
aSetting.isDefault = False
|
||||
|
||||
aSetting.parent = node.parentNode.nodeName
|
||||
|
||||
result.append(aSetting)
|
||||
return result
|
||||
|
||||
|
||||
def _readFile(self,fileLoc):
|
||||
|
||||
|
||||
def _readFile(self, fileLoc):
|
||||
|
||||
if(xbmcvfs.exists(fileLoc)):
|
||||
try:
|
||||
self.doc = minidom.parse(fileLoc)
|
||||
self.settingsFile = fileLoc
|
||||
except ExpatError:
|
||||
utils.log("Can't read " + fileLoc)
|
||||
|
||||
class SettingNode:
|
||||
name = ''
|
||||
value = ''
|
||||
hasChildren = False
|
||||
isDefault = True
|
||||
parent = ''
|
||||
|
||||
def __init__(self,name):
|
||||
self.name = name
|
||||
|
||||
def json_name(self):
|
||||
return self.parent + "." + self.name
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import utils as utils
|
||||
import xbmcgui
|
||||
from . import utils as utils
|
||||
|
||||
|
||||
class BackupProgressBar:
|
||||
NONE = 2
|
||||
@@ -9,13 +10,13 @@ class BackupProgressBar:
|
||||
mode = 2
|
||||
progressBar = None
|
||||
override = False
|
||||
|
||||
def __init__(self,progressOverride):
|
||||
|
||||
def __init__(self, progressOverride):
|
||||
self.override = progressOverride
|
||||
|
||||
#check if we should use the progress bar
|
||||
|
||||
# check if we should use the progress bar
|
||||
if(int(utils.getSetting('progress_mode')) != 2):
|
||||
#check if background or normal
|
||||
# check if background or normal
|
||||
if(int(utils.getSetting('progress_mode')) == 0 and not self.override):
|
||||
self.mode = self.DIALOG
|
||||
self.progressBar = xbmcgui.DialogProgress()
|
||||
@@ -23,20 +24,20 @@ class BackupProgressBar:
|
||||
self.mode = self.BACKGROUND
|
||||
self.progressBar = xbmcgui.DialogProgressBG()
|
||||
|
||||
def create(self,heading,message):
|
||||
def create(self, heading, message):
|
||||
if(self.mode != self.NONE):
|
||||
self.progressBar.create(heading,message)
|
||||
self.progressBar.create(heading, message)
|
||||
|
||||
def updateProgress(self,percent,message=None):
|
||||
|
||||
#update the progress bar
|
||||
def updateProgress(self, percent, message=None):
|
||||
|
||||
# update the progress bar
|
||||
if(self.mode != self.NONE):
|
||||
if(message != None):
|
||||
#need different calls for dialog and background bars
|
||||
if(message is not None):
|
||||
# need different calls for dialog and background bars
|
||||
if(self.mode == self.DIALOG):
|
||||
self.progressBar.update(percent,message)
|
||||
self.progressBar.update(percent, message)
|
||||
else:
|
||||
self.progressBar.update(percent,message=message)
|
||||
self.progressBar.update(percent, message=message)
|
||||
else:
|
||||
self.progressBar.update(percent)
|
||||
|
||||
|
||||
@@ -1,185 +0,0 @@
|
||||
Copyright 2013 Google Inc. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
@@ -1,174 +0,0 @@
|
||||
class ApiAttribute(object):
|
||||
"""A data descriptor that sets and returns values."""
|
||||
|
||||
def __init__(self, name):
|
||||
"""Create an instance of ApiAttribute.
|
||||
|
||||
:param name: name of this attribute.
|
||||
:type name: str.
|
||||
"""
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, type=None):
|
||||
"""Accesses value of this attribute."""
|
||||
return obj.attr.get(self.name)
|
||||
|
||||
def __set__(self, obj, value):
|
||||
"""Write value of this attribute."""
|
||||
obj.attr[self.name] = value
|
||||
if obj.dirty.get(self.name) is not None:
|
||||
obj.dirty[self.name] = True
|
||||
|
||||
def __del__(self, obj=None):
|
||||
"""Delete value of this attribute."""
|
||||
if(obj != None):
|
||||
del obj.attr[self.name]
|
||||
if obj.dirty.get(self.name) is not None:
|
||||
del obj.dirty[self.name]
|
||||
|
||||
|
||||
class ApiAttributeMixin(object):
|
||||
"""Mixin to initialize required global variables to use ApiAttribute."""
|
||||
|
||||
def __init__(self):
|
||||
self.attr = {}
|
||||
self.dirty = {}
|
||||
|
||||
|
||||
class ApiResource(dict):
|
||||
"""Super class of all api resources.
|
||||
|
||||
Inherits and behaves as a python dictionary to handle api resources.
|
||||
Save clean copy of metadata in self.metadata as a dictionary.
|
||||
Provides changed metadata elements to efficiently update api resources.
|
||||
"""
|
||||
auth = ApiAttribute('auth')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Create an instance of ApiResource."""
|
||||
self.update(*args, **kwargs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Overwritten method of dictionary.
|
||||
|
||||
:param key: key of the query.
|
||||
:type key: str.
|
||||
:returns: value of the query.
|
||||
"""
|
||||
return dict.__getitem__(self, key)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
"""Overwritten method of dictionary.
|
||||
|
||||
:param key: key of the query.
|
||||
:type key: str.
|
||||
:param val: value of the query.
|
||||
"""
|
||||
dict.__setitem__(self, key, val)
|
||||
|
||||
def __repr__(self):
|
||||
"""Overwritten method of dictionary."""
|
||||
dictrepr = dict.__repr__(self)
|
||||
return '%s(%s)' % (type(self).__name__, dictrepr)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
"""Overwritten method of dictionary."""
|
||||
for k, v in dict(*args, **kwargs).iteritems():
|
||||
self[k] = v
|
||||
|
||||
def UpdateMetadata(self, metadata=None):
|
||||
"""Update metadata and mark all of them to be clean."""
|
||||
if metadata:
|
||||
self.update(metadata)
|
||||
self.metadata = dict(self)
|
||||
|
||||
def GetChanges(self):
|
||||
"""Returns changed metadata elements to update api resources efficiently.
|
||||
|
||||
:returns: dict -- changed metadata elements.
|
||||
"""
|
||||
dirty = {}
|
||||
for key in self:
|
||||
if self.metadata.get(key) is None:
|
||||
dirty[key] = self[key]
|
||||
elif self.metadata[key] != self[key]:
|
||||
dirty[key] = self[key]
|
||||
return dirty
|
||||
|
||||
|
||||
class ApiResourceList(ApiAttributeMixin, ApiResource):
|
||||
"""Abstract class of all api list resources.
|
||||
|
||||
Inherits ApiResource and builds iterator to list any API resource.
|
||||
"""
|
||||
metadata = ApiAttribute('metadata')
|
||||
|
||||
def __init__(self, auth=None, metadata=None):
|
||||
"""Create an instance of ApiResourceList.
|
||||
|
||||
:param auth: authorized GoogleAuth instance.
|
||||
:type auth: GoogleAuth.
|
||||
:param metadata: parameter to send to list command.
|
||||
:type metadata: dict.
|
||||
"""
|
||||
ApiAttributeMixin.__init__(self)
|
||||
ApiResource.__init__(self)
|
||||
self.auth = auth
|
||||
self.UpdateMetadata()
|
||||
if metadata:
|
||||
self.update(metadata)
|
||||
|
||||
def __iter__(self):
|
||||
"""Returns iterator object.
|
||||
|
||||
:returns: ApiResourceList -- self
|
||||
"""
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
"""Make API call to list resources and return them.
|
||||
|
||||
Auto updates 'pageToken' everytime it makes API call and
|
||||
raises StopIteration when it reached the end of iteration.
|
||||
|
||||
:returns: list -- list of API resources.
|
||||
:raises: StopIteration
|
||||
"""
|
||||
if 'pageToken' in self and self['pageToken'] is None:
|
||||
raise StopIteration
|
||||
result = self._GetList()
|
||||
self['pageToken'] = self.metadata.get('nextPageToken')
|
||||
return result
|
||||
|
||||
def GetList(self):
|
||||
"""Get list of API resources.
|
||||
|
||||
If 'maxResults' is not specified, it will automatically iterate through
|
||||
every resources available. Otherwise, it will make API call once and
|
||||
update 'pageToken'.
|
||||
|
||||
:returns: list -- list of API resources.
|
||||
"""
|
||||
if self.get('maxResults') is None:
|
||||
self['maxResults'] = 1000
|
||||
result = []
|
||||
for x in self:
|
||||
result.extend(x)
|
||||
del self['maxResults']
|
||||
return result
|
||||
else:
|
||||
return self.next()
|
||||
|
||||
def _GetList(self):
|
||||
"""Helper function which actually makes API call.
|
||||
|
||||
Should be overwritten.
|
||||
|
||||
:raises: NotImplementedError
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def Reset(self):
|
||||
"""Resets current iteration"""
|
||||
if 'pageToken' in self:
|
||||
del self['pageToken']
|
||||
@@ -1,415 +0,0 @@
|
||||
import socket
|
||||
import webbrowser
|
||||
import httplib2
|
||||
import oauth2client.clientsecrets as clientsecrets
|
||||
|
||||
from googleapiclient.discovery import build
|
||||
from functools import wraps
|
||||
from oauth2client.client import FlowExchangeError
|
||||
from oauth2client.client import AccessTokenRefreshError
|
||||
from oauth2client.client import OAuth2WebServerFlow
|
||||
from oauth2client.client import OOB_CALLBACK_URN
|
||||
from oauth2client.file import Storage
|
||||
from oauth2client.tools import ClientRedirectHandler
|
||||
from oauth2client.tools import ClientRedirectServer
|
||||
from oauth2client._helpers import scopes_to_string
|
||||
from .apiattr import ApiAttribute
|
||||
from .apiattr import ApiAttributeMixin
|
||||
from .settings import LoadSettingsFile
|
||||
from .settings import ValidateSettings
|
||||
from .settings import SettingsError
|
||||
from .settings import InvalidConfigError
|
||||
|
||||
|
||||
class AuthError(Exception):
|
||||
"""Base error for authentication/authorization errors."""
|
||||
|
||||
|
||||
class InvalidCredentialsError(IOError):
|
||||
"""Error trying to read credentials file."""
|
||||
|
||||
|
||||
class AuthenticationRejected(AuthError):
|
||||
"""User rejected authentication."""
|
||||
|
||||
|
||||
class AuthenticationError(AuthError):
|
||||
"""General authentication error."""
|
||||
|
||||
|
||||
class RefreshError(AuthError):
|
||||
"""Access token refresh error."""
|
||||
|
||||
def LoadAuth(decoratee):
|
||||
"""Decorator to check if the auth is valid and loads auth if not."""
|
||||
@wraps(decoratee)
|
||||
def _decorated(self, *args, **kwargs):
|
||||
if self.auth is None: # Initialize auth if needed.
|
||||
self.auth = GoogleAuth()
|
||||
if self.auth.access_token_expired:
|
||||
self.auth.LocalWebserverAuth()
|
||||
if self.auth.service is None: # Check if drive api is built.
|
||||
self.auth.Authorize()
|
||||
return decoratee(self, *args, **kwargs)
|
||||
return _decorated
|
||||
|
||||
def CheckAuth(decoratee):
|
||||
"""Decorator to check if it requires OAuth2 flow request."""
|
||||
@wraps(decoratee)
|
||||
def _decorated(self, *args, **kwargs):
|
||||
dirty = False
|
||||
code = None
|
||||
save_credentials = self.settings.get('save_credentials')
|
||||
if self.credentials is None and save_credentials:
|
||||
self.LoadCredentials()
|
||||
if self.flow is None:
|
||||
self.GetFlow()
|
||||
if self.credentials is None:
|
||||
code = decoratee(self, *args, **kwargs)
|
||||
dirty = True
|
||||
else:
|
||||
if self.access_token_expired:
|
||||
if self.credentials.refresh_token is not None:
|
||||
self.Refresh()
|
||||
else:
|
||||
code = decoratee(self, *args, **kwargs)
|
||||
dirty = True
|
||||
if code is not None:
|
||||
self.Auth(code)
|
||||
if dirty and save_credentials:
|
||||
self.SaveCredentials()
|
||||
return _decorated
|
||||
|
||||
|
||||
class GoogleAuth(ApiAttributeMixin, object):
|
||||
"""Wrapper class for oauth2client library in google-api-python-client.
|
||||
|
||||
Loads all settings and credentials from one 'settings.yaml' file
|
||||
and performs common OAuth2.0 related functionality such as authentication
|
||||
and authorization.
|
||||
"""
|
||||
DEFAULT_SETTINGS = {
|
||||
'client_config_backend': 'file',
|
||||
'client_config_file': 'client_secrets.json',
|
||||
'save_credentials': False,
|
||||
'oauth_scope': ['https://www.googleapis.com/auth/drive']
|
||||
}
|
||||
CLIENT_CONFIGS_LIST = ['client_id', 'client_secret', 'auth_uri',
|
||||
'token_uri', 'revoke_uri', 'redirect_uri']
|
||||
settings = ApiAttribute('settings')
|
||||
client_config = ApiAttribute('client_config')
|
||||
flow = ApiAttribute('flow')
|
||||
credentials = ApiAttribute('credentials')
|
||||
http = ApiAttribute('http')
|
||||
service = ApiAttribute('service')
|
||||
|
||||
def __init__(self, settings_file='settings.yaml'):
|
||||
"""Create an instance of GoogleAuth.
|
||||
|
||||
This constructor just sets the path of settings file.
|
||||
It does not actually read the file.
|
||||
|
||||
:param settings_file: path of settings file. 'settings.yaml' by default.
|
||||
:type settings_file: str.
|
||||
"""
|
||||
ApiAttributeMixin.__init__(self)
|
||||
self.client_config = {}
|
||||
try:
|
||||
self.settings = LoadSettingsFile(settings_file)
|
||||
except SettingsError:
|
||||
self.settings = self.DEFAULT_SETTINGS
|
||||
else:
|
||||
if self.settings is None:
|
||||
self.settings = self.DEFAULT_SETTINGS
|
||||
else:
|
||||
ValidateSettings(self.settings)
|
||||
|
||||
@property
|
||||
def access_token_expired(self):
|
||||
"""Checks if access token doesn't exist or is expired.
|
||||
|
||||
:returns: bool -- True if access token doesn't exist or is expired.
|
||||
"""
|
||||
if self.credentials is None:
|
||||
return True
|
||||
return self.credentials.access_token_expired
|
||||
|
||||
@CheckAuth
|
||||
def LocalWebserverAuth(self, host_name='localhost',
|
||||
port_numbers=[8080, 8090]):
|
||||
"""Authenticate and authorize from user by creating local webserver and
|
||||
retrieving authentication code.
|
||||
|
||||
This function is not for webserver application. It creates local webserver
|
||||
for user from standalone application.
|
||||
|
||||
:param host_name: host name of the local webserver.
|
||||
:type host_name: str.
|
||||
:param port_numbers: list of port numbers to be tried to used.
|
||||
:type port_numbers: list.
|
||||
:returns: str -- code returned from local webserver
|
||||
:raises: AuthenticationRejected, AuthenticationError
|
||||
"""
|
||||
success = False
|
||||
port_number = 0
|
||||
for port in port_numbers:
|
||||
port_number = port
|
||||
try:
|
||||
httpd = ClientRedirectServer((host_name, port), ClientRedirectHandler)
|
||||
except socket.error as e:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
break
|
||||
if success:
|
||||
oauth_callback = 'http://%s:%s/' % (host_name, port_number)
|
||||
else:
|
||||
raise AuthenticationError()
|
||||
self.flow.redirect_uri = oauth_callback
|
||||
authorize_url = self.GetAuthUrl()
|
||||
webbrowser.open(authorize_url, new=1, autoraise=True)
|
||||
httpd.handle_request()
|
||||
if 'error' in httpd.query_params:
|
||||
raise AuthenticationRejected('User rejected authentication')
|
||||
if 'code' in httpd.query_params:
|
||||
return httpd.query_params['code']
|
||||
else:
|
||||
raise AuthenticationError('No code found in redirect')
|
||||
|
||||
@CheckAuth
|
||||
def CommandLineAuth(self):
|
||||
"""Authenticate and authorize from user by printing authentication url
|
||||
retrieving authentication code from command-line.
|
||||
|
||||
:returns: str -- code returned from commandline.
|
||||
"""
|
||||
self.flow.redirect_uri = OOB_CALLBACK_URN
|
||||
authorize_url = self.GetAuthUrl()
|
||||
return raw_input('Enter verification code: ').strip()
|
||||
|
||||
def LoadCredentials(self, backend=None):
|
||||
"""Loads credentials or create empty credentials if it doesn't exist.
|
||||
|
||||
:param backend: target backend to save credential to.
|
||||
:type backend: str.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
if backend is None:
|
||||
backend = self.settings.get('save_credentials_backend')
|
||||
if backend is None:
|
||||
raise InvalidConfigError('Please specify credential backend')
|
||||
if backend == 'file':
|
||||
self.LoadCredentialsFile()
|
||||
else:
|
||||
raise InvalidConfigError('Unknown save_credentials_backend')
|
||||
|
||||
def LoadCredentialsFile(self, credentials_file=None):
|
||||
"""Loads credentials or create empty credentials if it doesn't exist.
|
||||
|
||||
Loads credentials file from path in settings if not specified.
|
||||
|
||||
:param credentials_file: path of credentials file to read.
|
||||
:type credentials_file: str.
|
||||
:raises: InvalidConfigError, InvalidCredentialsError
|
||||
"""
|
||||
if credentials_file is None:
|
||||
credentials_file = self.settings.get('save_credentials_file')
|
||||
if credentials_file is None:
|
||||
raise InvalidConfigError('Please specify credentials file to read')
|
||||
try:
|
||||
storage = Storage(credentials_file)
|
||||
self.credentials = storage.get()
|
||||
except IOError:
|
||||
raise InvalidCredentialsError('Credentials file cannot be symbolic link')
|
||||
|
||||
def SaveCredentials(self, backend=None):
|
||||
"""Saves credentials according to specified backend.
|
||||
|
||||
If you have any specific credentials backend in mind, don't use this
|
||||
function and use the corresponding function you want.
|
||||
|
||||
:param backend: backend to save credentials.
|
||||
:type backend: str.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
if backend is None:
|
||||
backend = self.settings.get('save_credentials_backend')
|
||||
if backend is None:
|
||||
raise InvalidConfigError('Please specify credential backend')
|
||||
if backend == 'file':
|
||||
self.SaveCredentialsFile()
|
||||
else:
|
||||
raise InvalidConfigError('Unknown save_credentials_backend')
|
||||
|
||||
def SaveCredentialsFile(self, credentials_file=None):
|
||||
"""Saves credentials to the file in JSON format.
|
||||
|
||||
:param credentials_file: destination to save file to.
|
||||
:type credentials_file: str.
|
||||
:raises: InvalidConfigError, InvalidCredentialsError
|
||||
"""
|
||||
if self.credentials is None:
|
||||
raise InvalidCredentialsError('No credentials to save')
|
||||
if credentials_file is None:
|
||||
credentials_file = self.settings.get('save_credentials_file')
|
||||
if credentials_file is None:
|
||||
raise InvalidConfigError('Please specify credentials file to read')
|
||||
try:
|
||||
storage = Storage(credentials_file)
|
||||
storage.put(self.credentials)
|
||||
self.credentials.set_store(storage)
|
||||
except CredentialsFileSymbolicLinkError:
|
||||
raise InvalidCredentialsError('Credentials file cannot be symbolic link')
|
||||
|
||||
def LoadClientConfig(self, backend=None):
|
||||
"""Loads client configuration according to specified backend.
|
||||
|
||||
If you have any specific backend to load client configuration from in mind,
|
||||
don't use this function and use the corresponding function you want.
|
||||
|
||||
:param backend: backend to load client configuration from.
|
||||
:type backend: str.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
if backend is None:
|
||||
backend = self.settings.get('client_config_backend')
|
||||
if backend is None:
|
||||
raise InvalidConfigError('Please specify client config backend')
|
||||
if backend == 'file':
|
||||
self.LoadClientConfigFile()
|
||||
elif backend == 'settings':
|
||||
self.LoadClientConfigSettings()
|
||||
else:
|
||||
raise InvalidConfigError('Unknown client_config_backend')
|
||||
|
||||
def LoadClientConfigFile(self, client_config_file=None):
|
||||
"""Loads client configuration file downloaded from APIs console.
|
||||
|
||||
Loads client config file from path in settings if not specified.
|
||||
|
||||
:param client_config_file: path of client config file to read.
|
||||
:type client_config_file: str.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
if client_config_file is None:
|
||||
client_config_file = self.settings['client_config_file']
|
||||
try:
|
||||
client_type, client_info = clientsecrets.loadfile(client_config_file)
|
||||
except clientsecrets.InvalidClientSecretsError as error:
|
||||
raise InvalidConfigError('Invalid client secrets file %s' % error)
|
||||
if not client_type in (clientsecrets.TYPE_WEB,
|
||||
clientsecrets.TYPE_INSTALLED):
|
||||
raise InvalidConfigError('Unknown client_type of client config file')
|
||||
try:
|
||||
config_index = ['client_id', 'client_secret', 'auth_uri', 'token_uri']
|
||||
for config in config_index:
|
||||
self.client_config[config] = client_info[config]
|
||||
self.client_config['revoke_uri'] = client_info.get('revoke_uri')
|
||||
self.client_config['redirect_uri'] = client_info['redirect_uris'][0]
|
||||
except KeyError:
|
||||
raise InvalidConfigError('Insufficient client config in file')
|
||||
|
||||
def LoadClientConfigSettings(self):
|
||||
"""Loads client configuration from settings file.
|
||||
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
|
||||
for config in self.CLIENT_CONFIGS_LIST:
|
||||
try:
|
||||
self.client_config[config] = self.settings['client_config'][config]
|
||||
|
||||
except KeyError:
|
||||
raise InvalidConfigError('Insufficient client config in settings')
|
||||
|
||||
def GetFlow(self):
|
||||
"""Gets Flow object from client configuration.
|
||||
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
if not all(config in self.client_config \
|
||||
for config in self.CLIENT_CONFIGS_LIST):
|
||||
self.LoadClientConfig()
|
||||
constructor_kwargs = {
|
||||
'redirect_uri': self.client_config['redirect_uri'],
|
||||
'auth_uri': self.client_config['auth_uri'],
|
||||
'token_uri': self.client_config['token_uri'],
|
||||
}
|
||||
if self.client_config['revoke_uri'] is not None:
|
||||
constructor_kwargs['revoke_uri'] = self.client_config['revoke_uri']
|
||||
self.flow = OAuth2WebServerFlow(
|
||||
self.client_config['client_id'],
|
||||
self.client_config['client_secret'],
|
||||
scopes_to_string(self.settings['oauth_scope']),
|
||||
**constructor_kwargs)
|
||||
if self.settings.get('get_refresh_token'):
|
||||
self.flow.params.update({'access_type': 'offline'})
|
||||
|
||||
def Refresh(self):
|
||||
"""Refreshes the access_token.
|
||||
|
||||
:raises: RefreshError
|
||||
"""
|
||||
if self.credentials is None:
|
||||
raise RefreshError('No credential to refresh.')
|
||||
if self.credentials.refresh_token is None:
|
||||
raise RefreshError('No refresh_token found.'
|
||||
'Please set access_type of OAuth to offline.')
|
||||
if self.http is None:
|
||||
self.http = httplib2.Http()
|
||||
try:
|
||||
self.credentials.refresh(self.http)
|
||||
except AccessTokenRefreshError as error:
|
||||
raise RefreshError('Access token refresh failed: %s' % error)
|
||||
|
||||
def GetAuthUrl(self, keys = None):
|
||||
"""Creates authentication url where user visits to grant access.
|
||||
|
||||
:returns: str -- Authentication url.
|
||||
"""
|
||||
|
||||
if(keys != None):
|
||||
#update some of the settings in the client_config dict
|
||||
self.client_config['client_id'] = keys['client_id']
|
||||
self.client_config['client_secret'] = keys['client_secret']
|
||||
|
||||
if self.flow is None:
|
||||
self.GetFlow()
|
||||
|
||||
return self.flow.step1_get_authorize_url()
|
||||
|
||||
def Auth(self, code):
|
||||
"""Authenticate, authorize, and build service.
|
||||
|
||||
:param code: Code for authentication.
|
||||
:type code: str.
|
||||
:raises: AuthenticationError
|
||||
"""
|
||||
self.Authenticate(code)
|
||||
self.Authorize()
|
||||
|
||||
def Authenticate(self, code):
|
||||
"""Authenticates given authentication code back from user.
|
||||
|
||||
:param code: Code for authentication.
|
||||
:type code: str.
|
||||
:raises: AuthenticationError
|
||||
"""
|
||||
if self.flow is None:
|
||||
self.GetFlow()
|
||||
try:
|
||||
self.credentials = self.flow.step2_exchange(code)
|
||||
except FlowExchangeError as e:
|
||||
raise AuthenticationError('OAuth2 code exchange failed: %s' % e)
|
||||
|
||||
def Authorize(self):
|
||||
"""Authorizes and builds service.
|
||||
|
||||
:raises: AuthenticationError
|
||||
"""
|
||||
if self.http is None:
|
||||
self.http = httplib2.Http()
|
||||
if self.access_token_expired:
|
||||
raise AuthenticationError('No valid credentials provided to authorize')
|
||||
self.http = self.credentials.authorize(self.http)
|
||||
self.service = build('drive', 'v2', http=self.http)
|
||||
@@ -1,38 +0,0 @@
|
||||
from .apiattr import ApiAttributeMixin
|
||||
from .files import GoogleDriveFile
|
||||
from .files import GoogleDriveFileList
|
||||
|
||||
|
||||
class GoogleDrive(ApiAttributeMixin, object):
|
||||
"""Main Google Drive class."""
|
||||
|
||||
def __init__(self, auth=None):
|
||||
"""Create an instance of GoogleDrive.
|
||||
|
||||
:param auth: authorized GoogleAuth instance.
|
||||
:type auth: pydrive.auth.GoogleAuth.
|
||||
"""
|
||||
ApiAttributeMixin.__init__(self)
|
||||
self.auth = auth
|
||||
|
||||
def CreateFile(self, metadata=None):
|
||||
"""Create an instance of GoogleDriveFile with auth of this instance.
|
||||
|
||||
This method would not upload a file to GoogleDrive.
|
||||
|
||||
:param metadata: file resource to initialize GoogleDriveFile with.
|
||||
:type metadata: dict.
|
||||
:returns: pydrive.files.GoogleDriveFile -- initialized with auth of this instance.
|
||||
"""
|
||||
return GoogleDriveFile(auth=self.auth, metadata=metadata)
|
||||
|
||||
def ListFile(self, param=None):
|
||||
"""Create an instance of GoogleDriveFileList with auth of this instance.
|
||||
|
||||
This method will not fetch from Files.List().
|
||||
|
||||
:param param: parameter to be sent to Files.List().
|
||||
:type param: dict.
|
||||
:returns: pydrive.files.GoogleDriveFileList -- initialized with auth of this instance.
|
||||
"""
|
||||
return GoogleDriveFileList(auth=self.auth, param=param)
|
||||
@@ -1,322 +0,0 @@
|
||||
import io
|
||||
import mimetypes
|
||||
|
||||
from googleapiclient import errors
|
||||
from googleapiclient.http import MediaIoBaseUpload
|
||||
from functools import wraps
|
||||
|
||||
from .apiattr import ApiAttribute
|
||||
from .apiattr import ApiAttributeMixin
|
||||
from .apiattr import ApiResource
|
||||
from .apiattr import ApiResourceList
|
||||
from .auth import LoadAuth
|
||||
|
||||
|
||||
class FileNotUploadedError(RuntimeError):
|
||||
"""Error trying to access metadata of file that is not uploaded."""
|
||||
|
||||
|
||||
class ApiRequestError(IOError):
|
||||
"""Error while making any API requests."""
|
||||
|
||||
|
||||
class FileNotDownloadableError(RuntimeError):
|
||||
"""Error trying to download file that is not downloadable."""
|
||||
|
||||
|
||||
def LoadMetadata(decoratee):
|
||||
"""Decorator to check if the file has metadata and fetches it if not.
|
||||
|
||||
:raises: ApiRequestError, FileNotUploadedError
|
||||
"""
|
||||
@wraps(decoratee)
|
||||
def _decorated(self, *args, **kwargs):
|
||||
if not self.uploaded:
|
||||
self.FetchMetadata()
|
||||
return decoratee(self, *args, **kwargs)
|
||||
return _decorated
|
||||
|
||||
|
||||
class GoogleDriveFileList(ApiResourceList):
|
||||
"""Google Drive FileList instance.
|
||||
|
||||
Equivalent to Files.list() in Drive APIs.
|
||||
"""
|
||||
|
||||
def __init__(self, auth=None, param=None):
|
||||
"""Create an instance of GoogleDriveFileList."""
|
||||
super(GoogleDriveFileList, self).__init__(auth=auth, metadata=param)
|
||||
|
||||
@LoadAuth
|
||||
def _GetList(self):
|
||||
"""Overwritten method which actually makes API call to list files.
|
||||
|
||||
:returns: list -- list of pydrive.files.GoogleDriveFile.
|
||||
"""
|
||||
self.metadata = self.auth.service.files().list(**dict(self)).execute()
|
||||
result = []
|
||||
for file_metadata in self.metadata['items']:
|
||||
tmp_file = GoogleDriveFile(
|
||||
auth=self.auth,
|
||||
metadata=file_metadata,
|
||||
uploaded=True)
|
||||
result.append(tmp_file)
|
||||
return result
|
||||
|
||||
|
||||
class GoogleDriveFile(ApiAttributeMixin, ApiResource):
|
||||
"""Google Drive File instance.
|
||||
|
||||
Inherits ApiResource which inherits dict.
|
||||
Can access and modify metadata like dictionary.
|
||||
"""
|
||||
content = ApiAttribute('content')
|
||||
uploaded = ApiAttribute('uploaded')
|
||||
metadata = ApiAttribute('metadata')
|
||||
|
||||
def __init__(self, auth=None, metadata=None, uploaded=False):
|
||||
"""Create an instance of GoogleDriveFile.
|
||||
|
||||
:param auth: authorized GoogleAuth instance.
|
||||
:type auth: pydrive.auth.GoogleAuth
|
||||
:param metadata: file resource to initialize GoogleDirveFile with.
|
||||
:type metadata: dict.
|
||||
:param uploaded: True if this file is confirmed to be uploaded.
|
||||
:type uploaded: bool.
|
||||
"""
|
||||
ApiAttributeMixin.__init__(self)
|
||||
ApiResource.__init__(self)
|
||||
self.metadata = {}
|
||||
self.dirty = {'content': False}
|
||||
self.auth = auth
|
||||
self.uploaded = uploaded
|
||||
if uploaded:
|
||||
self.UpdateMetadata(metadata)
|
||||
elif metadata:
|
||||
self.update(metadata)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Overwrites manner of accessing Files resource.
|
||||
|
||||
If this file instance is not uploaded and id is specified,
|
||||
it will try to look for metadata with Files.get().
|
||||
|
||||
:param key: key of dictionary query.
|
||||
:type key: str.
|
||||
:returns: value of Files resource
|
||||
:raises: KeyError, FileNotUploadedError
|
||||
"""
|
||||
try:
|
||||
return dict.__getitem__(self, key)
|
||||
except KeyError as e:
|
||||
if self.uploaded:
|
||||
raise KeyError(e)
|
||||
if self.get('id'):
|
||||
self.FetchMetadata()
|
||||
return dict.__getitem__(self, key)
|
||||
else:
|
||||
raise FileNotUploadedError()
|
||||
|
||||
def SetContentString(self, content):
|
||||
"""Set content of this file to be a string.
|
||||
|
||||
Creates io.BytesIO instance of utf-8 encoded string.
|
||||
Sets mimeType to be 'text/plain' if not specified.
|
||||
|
||||
:param content: content of the file in string.
|
||||
:type content: str.
|
||||
"""
|
||||
self.content = io.BytesIO(content.encode('utf-8'))
|
||||
if self.get('mimeType') is None:
|
||||
self['mimeType'] = 'text/plain'
|
||||
|
||||
def SetContentFile(self, filename):
|
||||
"""Set content of this file from a file.
|
||||
|
||||
Opens the file specified by this method.
|
||||
Will be read, uploaded, and closed by Upload() method.
|
||||
Sets metadata 'title' and 'mimeType' automatically if not specified.
|
||||
|
||||
:param filename: name of the file to be uploaded.
|
||||
:type filename: str.
|
||||
"""
|
||||
self.content = open(filename, 'rb')
|
||||
|
||||
if self.get('title') is None:
|
||||
self['title'] = filename
|
||||
if self.get('mimeType') is None:
|
||||
self['mimeType'] = mimetypes.guess_type(filename)[0]
|
||||
|
||||
def GetContentString(self):
|
||||
"""Get content of this file as a string.
|
||||
|
||||
:returns: str -- utf-8 decoded content of the file
|
||||
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
|
||||
"""
|
||||
if self.content is None or type(self.content) is not io.BytesIO:
|
||||
self.FetchContent()
|
||||
return self.content.getvalue().decode('utf-8')
|
||||
|
||||
def GetContentFile(self, filename, mimetype=None):
|
||||
"""Save content of this file as a local file.
|
||||
|
||||
:param filename: name of the file to write to.
|
||||
:type filename: str.
|
||||
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
|
||||
"""
|
||||
if self.content is None or type(self.content) is not io.BytesIO:
|
||||
self.FetchContent(mimetype)
|
||||
f = open(filename, 'wb')
|
||||
f.write(self.content.getvalue())
|
||||
f.close()
|
||||
|
||||
@LoadAuth
|
||||
def FetchMetadata(self):
|
||||
"""Download file's metadata from id using Files.get().
|
||||
|
||||
:raises: ApiRequestError, FileNotUploadedError
|
||||
"""
|
||||
file_id = self.metadata.get('id') or self.get('id')
|
||||
if file_id:
|
||||
try:
|
||||
metadata = self.auth.service.files().get(fileId=file_id).execute()
|
||||
except errors.HttpError as error:
|
||||
raise ApiRequestError(error)
|
||||
else:
|
||||
self.uploaded = True
|
||||
self.UpdateMetadata(metadata)
|
||||
else:
|
||||
raise FileNotUploadedError()
|
||||
|
||||
@LoadMetadata
|
||||
def FetchContent(self, mimetype=None):
|
||||
"""Download file's content from download_url.
|
||||
|
||||
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
|
||||
"""
|
||||
download_url = self.metadata.get('downloadUrl')
|
||||
if download_url:
|
||||
self.content = io.BytesIO(self._DownloadFromUrl(download_url))
|
||||
self.dirty['content'] = False
|
||||
return
|
||||
|
||||
export_links = self.metadata.get('exportLinks')
|
||||
if export_links and export_links.get(mimetype):
|
||||
self.content = io.BytesIO(
|
||||
self._DownloadFromUrl(export_links.get(mimetype)))
|
||||
self.dirty['content'] = False
|
||||
return
|
||||
|
||||
raise FileNotDownloadableError(
|
||||
'No downloadLink/exportLinks for mimetype found in metadata')
|
||||
|
||||
def Upload(self, param=None):
|
||||
"""Upload/update file by choosing the most efficient method.
|
||||
|
||||
:param param: additional parameter to upload file.
|
||||
:type param: dict.
|
||||
:raises: ApiRequestError
|
||||
"""
|
||||
if self.uploaded or self.get('id') is not None:
|
||||
if self.dirty['content']:
|
||||
self._FilesUpdate(param=param)
|
||||
else:
|
||||
self._FilesPatch(param=param)
|
||||
else:
|
||||
self._FilesInsert(param=param)
|
||||
|
||||
def Delete(self):
|
||||
if self.get('id') is not None:
|
||||
self.auth.service.files().delete(fileId=self.get('id')).execute()
|
||||
|
||||
@LoadAuth
|
||||
def _FilesInsert(self, param=None):
|
||||
"""Upload a new file using Files.insert().
|
||||
|
||||
:param param: additional parameter to upload file.
|
||||
:type param: dict.
|
||||
:raises: ApiRequestError
|
||||
"""
|
||||
if param is None:
|
||||
param = {}
|
||||
param['body'] = self.GetChanges()
|
||||
try:
|
||||
if self.dirty['content']:
|
||||
param['media_body'] = self._BuildMediaBody()
|
||||
metadata = self.auth.service.files().insert(**param).execute()
|
||||
except errors.HttpError as error:
|
||||
raise ApiRequestError(error)
|
||||
else:
|
||||
self.uploaded = True
|
||||
self.dirty['content'] = False
|
||||
self.UpdateMetadata(metadata)
|
||||
|
||||
@LoadAuth
|
||||
@LoadMetadata
|
||||
def _FilesUpdate(self, param=None):
|
||||
"""Update metadata and/or content using Files.Update().
|
||||
|
||||
:param param: additional parameter to upload file.
|
||||
:type param: dict.
|
||||
:raises: ApiRequestError, FileNotUploadedError
|
||||
"""
|
||||
if param is None:
|
||||
param = {}
|
||||
param['body'] = self.GetChanges()
|
||||
param['fileId'] = self.metadata.get('id')
|
||||
try:
|
||||
if self.dirty['content']:
|
||||
param['media_body'] = self._BuildMediaBody()
|
||||
metadata = self.auth.service.files().update(**param).execute()
|
||||
except errors.HttpError as error:
|
||||
raise ApiRequestError(error)
|
||||
else:
|
||||
self.uploaded = True
|
||||
self.dirty['content'] = False
|
||||
self.UpdateMetadata(metadata)
|
||||
|
||||
@LoadAuth
|
||||
@LoadMetadata
|
||||
def _FilesPatch(self, param=None):
|
||||
"""Update metadata using Files.Patch().
|
||||
|
||||
:param param: additional parameter to upload file.
|
||||
:type param: dict.
|
||||
:raises: ApiRequestError, FileNotUploadedError
|
||||
"""
|
||||
if param is None:
|
||||
param = {}
|
||||
param['body'] = self.GetChanges()
|
||||
param['fileId'] = self.metadata.get('id')
|
||||
try:
|
||||
metadata = self.auth.service.files().patch(**param).execute()
|
||||
except errors.HttpError as error:
|
||||
raise ApiRequestError(error)
|
||||
else:
|
||||
self.UpdateMetadata(metadata)
|
||||
|
||||
def _BuildMediaBody(self):
|
||||
"""Build MediaIoBaseUpload to get prepared to upload content of the file.
|
||||
|
||||
Sets mimeType as 'application/octet-stream' if not specified.
|
||||
|
||||
:returns: MediaIoBaseUpload -- instance that will be used to upload content.
|
||||
"""
|
||||
if self.get('mimeType') is None:
|
||||
self['mimeType'] = 'application/octet-stream'
|
||||
|
||||
return MediaIoBaseUpload(self.content, self['mimeType'])
|
||||
|
||||
@LoadAuth
|
||||
def _DownloadFromUrl(self, url):
|
||||
"""Download file from url using provided credential.
|
||||
|
||||
:param url: link of the file to download.
|
||||
:type url: str.
|
||||
:returns: str -- content of downloaded file in string.
|
||||
:raises: ApiRequestError
|
||||
"""
|
||||
resp, content = self.auth.service._http.request(url)
|
||||
if resp.status != 200:
|
||||
raise ApiRequestError('Cannot download file: %s' % resp)
|
||||
return content
|
||||
@@ -1,192 +0,0 @@
|
||||
from yaml import load
|
||||
from yaml import YAMLError
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
|
||||
SETTINGS_FILE = 'settings.yaml'
|
||||
SETTINGS_STRUCT = {
|
||||
'client_config_backend': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default': 'file',
|
||||
'dependency': [
|
||||
{
|
||||
'value': 'file',
|
||||
'attribute': ['client_config_file']
|
||||
},
|
||||
{
|
||||
'value': 'settings',
|
||||
'attribute': ['client_config']
|
||||
}
|
||||
]
|
||||
},
|
||||
'save_credentials': {
|
||||
'type': bool,
|
||||
'required': True,
|
||||
'default': False,
|
||||
'dependency': [
|
||||
{
|
||||
'value': True,
|
||||
'attribute': ['save_credentials_backend']
|
||||
}
|
||||
]
|
||||
},
|
||||
'get_refresh_token': {
|
||||
'type': bool,
|
||||
'required': False,
|
||||
'default': False
|
||||
},
|
||||
'client_config_file': {
|
||||
'type': str,
|
||||
'required': False,
|
||||
'default': 'client_secrets.json'
|
||||
},
|
||||
'save_credentials_backend': {
|
||||
'type': str,
|
||||
'required': False,
|
||||
'dependency': [
|
||||
{
|
||||
'value': 'file',
|
||||
'attribute': ['save_credentials_file']
|
||||
}
|
||||
]
|
||||
},
|
||||
'client_config': {
|
||||
'type': dict,
|
||||
'required': False,
|
||||
'struct': {
|
||||
'client_id': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default':'blank'
|
||||
},
|
||||
'client_secret': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default':'blank'
|
||||
},
|
||||
'auth_uri': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default': 'https://accounts.google.com/o/oauth2/auth'
|
||||
},
|
||||
'token_uri': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default': 'https://accounts.google.com/o/oauth2/token'
|
||||
},
|
||||
'redirect_uri': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default': 'urn:ietf:wg:oauth:2.0:oob'
|
||||
},
|
||||
'revoke_uri': {
|
||||
'type': str,
|
||||
'required': True,
|
||||
'default': None
|
||||
}
|
||||
}
|
||||
},
|
||||
'oauth_scope': {
|
||||
'type': list,
|
||||
'required': True,
|
||||
'struct': str,
|
||||
'default': ['https://www.googleapis.com/auth/drive']
|
||||
},
|
||||
'save_credentials_file': {
|
||||
'type': str,
|
||||
'required': False,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class SettingsError(IOError):
|
||||
"""Error while loading/saving settings"""
|
||||
|
||||
|
||||
class InvalidConfigError(IOError):
|
||||
"""Error trying to read client configuration."""
|
||||
|
||||
|
||||
def LoadSettingsFile(filename=SETTINGS_FILE):
|
||||
"""Loads settings file in yaml format given file name.
|
||||
|
||||
:param filename: path for settings file. 'settings.yaml' by default.
|
||||
:type filename: str.
|
||||
:raises: SettingsError
|
||||
"""
|
||||
try:
|
||||
stream = file(filename, 'r')
|
||||
data = load(stream, Loader=Loader)
|
||||
except (YAMLError, IOError) as e:
|
||||
raise SettingsError(e)
|
||||
return data
|
||||
|
||||
|
||||
def ValidateSettings(data):
|
||||
"""Validates if current settings is valid.
|
||||
|
||||
:param data: dictionary containing all settings.
|
||||
:type data: dict.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
_ValidateSettingsStruct(data, SETTINGS_STRUCT)
|
||||
|
||||
|
||||
def _ValidateSettingsStruct(data, struct):
|
||||
"""Validates if provided data fits provided structure.
|
||||
|
||||
:param data: dictionary containing settings.
|
||||
:type data: dict.
|
||||
:param struct: dictionary containing structure information of settings.
|
||||
:type struct: dict.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
# Validate required elements of the setting.
|
||||
for key in struct:
|
||||
if struct[key]['required']:
|
||||
_ValidateSettingsElement(data, struct, key)
|
||||
|
||||
|
||||
def _ValidateSettingsElement(data, struct, key):
|
||||
"""Validates if provided element of settings data fits provided structure.
|
||||
|
||||
:param data: dictionary containing settings.
|
||||
:type data: dict.
|
||||
:param struct: dictionary containing structure information of settings.
|
||||
:type struct: dict.
|
||||
:param key: key of the settings element to validate.
|
||||
:type key: str.
|
||||
:raises: InvalidConfigError
|
||||
"""
|
||||
# Check if data exists. If not, check if default value exists.
|
||||
value = data.get(key)
|
||||
data_type = struct[key]['type']
|
||||
if value is None:
|
||||
try:
|
||||
default = struct[key]['default']
|
||||
except KeyError:
|
||||
raise InvalidConfigError('Missing required setting %s' % key)
|
||||
else:
|
||||
data[key] = default
|
||||
# If data exists, Check type of the data
|
||||
elif type(value) is not data_type:
|
||||
raise InvalidConfigError('Setting %s should be type %s' % (key, data_type))
|
||||
# If type of this data is dict, check if structure of the data is valid.
|
||||
if data_type is dict:
|
||||
_ValidateSettingsStruct(data[key], struct[key]['struct'])
|
||||
# If type of this data is list, check if all values in the list is valid.
|
||||
elif data_type is list:
|
||||
for element in data[key]:
|
||||
if type(element) is not struct[key]['struct']:
|
||||
raise InvalidConfigError('Setting %s should be list of %s' %
|
||||
(key, struct[key]['struct']))
|
||||
# Check dependency of this attribute.
|
||||
dependencies = struct[key].get('dependency')
|
||||
if dependencies:
|
||||
for dependency in dependencies:
|
||||
if value == dependency['value']:
|
||||
for reqkey in dependency['attribute']:
|
||||
_ValidateSettingsElement(data, struct, reqkey)
|
||||
@@ -1,7 +0,0 @@
|
||||
client_config_backend: 'settings'
|
||||
client_config:
|
||||
client_id: "blank"
|
||||
client_secret: "blank"
|
||||
get_refresh_token: True
|
||||
oauth_scope:
|
||||
- "https://www.googleapis.com/auth/drive.file"
|
||||
@@ -1,430 +0,0 @@
|
||||
"""
|
||||
Copyright (c) 2003-2010 Gustavo Niemeyer <gustavo@niemeyer.net>
|
||||
|
||||
This module offers extensions to the standard python 2.3+
|
||||
datetime module.
|
||||
"""
|
||||
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
|
||||
__license__ = "PSF License"
|
||||
|
||||
import datetime
|
||||
import calendar
|
||||
|
||||
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
||||
|
||||
class weekday(object):
|
||||
__slots__ = ["weekday", "n"]
|
||||
|
||||
def __init__(self, weekday, n=None):
|
||||
self.weekday = weekday
|
||||
self.n = n
|
||||
|
||||
def __call__(self, n):
|
||||
if n == self.n:
|
||||
return self
|
||||
else:
|
||||
return self.__class__(self.weekday, n)
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
if self.weekday != other.weekday or self.n != other.n:
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
||||
if not self.n:
|
||||
return s
|
||||
else:
|
||||
return "%s(%+d)" % (s, self.n)
|
||||
|
||||
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
|
||||
|
||||
class relativedelta:
|
||||
"""
|
||||
The relativedelta type is based on the specification of the excelent
|
||||
work done by M.-A. Lemburg in his mx.DateTime extension. However,
|
||||
notice that this type does *NOT* implement the same algorithm as
|
||||
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
||||
|
||||
There's two different ways to build a relativedelta instance. The
|
||||
first one is passing it two date/datetime classes:
|
||||
|
||||
relativedelta(datetime1, datetime2)
|
||||
|
||||
And the other way is to use the following keyword arguments:
|
||||
|
||||
year, month, day, hour, minute, second, microsecond:
|
||||
Absolute information.
|
||||
|
||||
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
||||
Relative information, may be negative.
|
||||
|
||||
weekday:
|
||||
One of the weekday instances (MO, TU, etc). These instances may
|
||||
receive a parameter N, specifying the Nth weekday, which could
|
||||
be positive or negative (like MO(+1) or MO(-2). Not specifying
|
||||
it is the same as specifying +1. You can also use an integer,
|
||||
where 0=MO.
|
||||
|
||||
leapdays:
|
||||
Will add given days to the date found, if year is a leap
|
||||
year, and the date found is post 28 of february.
|
||||
|
||||
yearday, nlyearday:
|
||||
Set the yearday or the non-leap year day (jump leap days).
|
||||
These are converted to day/month/leapdays information.
|
||||
|
||||
Here is the behavior of operations with relativedelta:
|
||||
|
||||
1) Calculate the absolute year, using the 'year' argument, or the
|
||||
original datetime year, if the argument is not present.
|
||||
|
||||
2) Add the relative 'years' argument to the absolute year.
|
||||
|
||||
3) Do steps 1 and 2 for month/months.
|
||||
|
||||
4) Calculate the absolute day, using the 'day' argument, or the
|
||||
original datetime day, if the argument is not present. Then,
|
||||
subtract from the day until it fits in the year and month
|
||||
found after their operations.
|
||||
|
||||
5) Add the relative 'days' argument to the absolute day. Notice
|
||||
that the 'weeks' argument is multiplied by 7 and added to
|
||||
'days'.
|
||||
|
||||
6) Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds,
|
||||
microsecond/microseconds.
|
||||
|
||||
7) If the 'weekday' argument is present, calculate the weekday,
|
||||
with the given (wday, nth) tuple. wday is the index of the
|
||||
weekday (0-6, 0=Mon), and nth is the number of weeks to add
|
||||
forward or backward, depending on its signal. Notice that if
|
||||
the calculated date is already Monday, for example, using
|
||||
(0, 1) or (0, -1) won't change the day.
|
||||
"""
|
||||
|
||||
def __init__(self, dt1=None, dt2=None,
|
||||
years=0, months=0, days=0, leapdays=0, weeks=0,
|
||||
hours=0, minutes=0, seconds=0, microseconds=0,
|
||||
year=None, month=None, day=None, weekday=None,
|
||||
yearday=None, nlyearday=None,
|
||||
hour=None, minute=None, second=None, microsecond=None):
|
||||
if dt1 and dt2:
|
||||
if not isinstance(dt1, datetime.date) or \
|
||||
not isinstance(dt2, datetime.date):
|
||||
raise TypeError("relativedelta only diffs datetime/date")
|
||||
if type(dt1) is not type(dt2):
|
||||
if not isinstance(dt1, datetime.datetime):
|
||||
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
||||
elif not isinstance(dt2, datetime.datetime):
|
||||
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
||||
self.years = 0
|
||||
self.months = 0
|
||||
self.days = 0
|
||||
self.leapdays = 0
|
||||
self.hours = 0
|
||||
self.minutes = 0
|
||||
self.seconds = 0
|
||||
self.microseconds = 0
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.weekday = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
self.microsecond = None
|
||||
self._has_time = 0
|
||||
|
||||
months = (dt1.year*12+dt1.month)-(dt2.year*12+dt2.month)
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
if dt1 < dt2:
|
||||
while dt1 > dtm:
|
||||
months += 1
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
else:
|
||||
while dt1 < dtm:
|
||||
months -= 1
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
delta = dt1 - dtm
|
||||
self.seconds = delta.seconds+delta.days*86400
|
||||
self.microseconds = delta.microseconds
|
||||
else:
|
||||
self.years = years
|
||||
self.months = months
|
||||
self.days = days+weeks*7
|
||||
self.leapdays = leapdays
|
||||
self.hours = hours
|
||||
self.minutes = minutes
|
||||
self.seconds = seconds
|
||||
self.microseconds = microseconds
|
||||
self.year = year
|
||||
self.month = month
|
||||
self.day = day
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
self.microsecond = microsecond
|
||||
|
||||
if type(weekday) is int:
|
||||
self.weekday = weekdays[weekday]
|
||||
else:
|
||||
self.weekday = weekday
|
||||
|
||||
yday = 0
|
||||
if nlyearday:
|
||||
yday = nlyearday
|
||||
elif yearday:
|
||||
yday = yearday
|
||||
if yearday > 59:
|
||||
self.leapdays = -1
|
||||
if yday:
|
||||
ydayidx = [31,59,90,120,151,181,212,243,273,304,334,366]
|
||||
for idx, ydays in enumerate(ydayidx):
|
||||
if yday <= ydays:
|
||||
self.month = idx+1
|
||||
if idx == 0:
|
||||
self.day = yday
|
||||
else:
|
||||
self.day = yday-ydayidx[idx-1]
|
||||
break
|
||||
else:
|
||||
raise ValueError("invalid year day (%d)" % yday)
|
||||
|
||||
self._fix()
|
||||
|
||||
def _fix(self):
|
||||
if abs(self.microseconds) > 999999:
|
||||
s = self.microseconds//abs(self.microseconds)
|
||||
div, mod = divmod(self.microseconds*s, 1000000)
|
||||
self.microseconds = mod*s
|
||||
self.seconds += div*s
|
||||
if abs(self.seconds) > 59:
|
||||
s = self.seconds//abs(self.seconds)
|
||||
div, mod = divmod(self.seconds*s, 60)
|
||||
self.seconds = mod*s
|
||||
self.minutes += div*s
|
||||
if abs(self.minutes) > 59:
|
||||
s = self.minutes//abs(self.minutes)
|
||||
div, mod = divmod(self.minutes*s, 60)
|
||||
self.minutes = mod*s
|
||||
self.hours += div*s
|
||||
if abs(self.hours) > 23:
|
||||
s = self.hours//abs(self.hours)
|
||||
div, mod = divmod(self.hours*s, 24)
|
||||
self.hours = mod*s
|
||||
self.days += div*s
|
||||
if abs(self.months) > 11:
|
||||
s = self.months//abs(self.months)
|
||||
div, mod = divmod(self.months*s, 12)
|
||||
self.months = mod*s
|
||||
self.years += div*s
|
||||
if (self.hours or self.minutes or self.seconds or self.microseconds or
|
||||
self.hour is not None or self.minute is not None or
|
||||
self.second is not None or self.microsecond is not None):
|
||||
self._has_time = 1
|
||||
else:
|
||||
self._has_time = 0
|
||||
|
||||
def _set_months(self, months):
|
||||
self.months = months
|
||||
if abs(self.months) > 11:
|
||||
s = self.months//abs(self.months)
|
||||
div, mod = divmod(self.months*s, 12)
|
||||
self.months = mod*s
|
||||
self.years = div*s
|
||||
else:
|
||||
self.years = 0
|
||||
|
||||
def __radd__(self, other):
|
||||
if not isinstance(other, datetime.date):
|
||||
raise TypeError("unsupported type for add operation")
|
||||
elif self._has_time and not isinstance(other, datetime.datetime):
|
||||
other = datetime.datetime.fromordinal(other.toordinal())
|
||||
year = (self.year or other.year)+self.years
|
||||
month = self.month or other.month
|
||||
if self.months:
|
||||
assert 1 <= abs(self.months) <= 12
|
||||
month += self.months
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
elif month < 1:
|
||||
year -= 1
|
||||
month += 12
|
||||
day = min(calendar.monthrange(year, month)[1],
|
||||
self.day or other.day)
|
||||
repl = {"year": year, "month": month, "day": day}
|
||||
for attr in ["hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
repl[attr] = value
|
||||
days = self.days
|
||||
if self.leapdays and month > 2 and calendar.isleap(year):
|
||||
days += self.leapdays
|
||||
ret = (other.replace(**repl)
|
||||
+ datetime.timedelta(days=days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds,
|
||||
microseconds=self.microseconds))
|
||||
if self.weekday:
|
||||
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
||||
jumpdays = (abs(nth)-1)*7
|
||||
if nth > 0:
|
||||
jumpdays += (7-ret.weekday()+weekday)%7
|
||||
else:
|
||||
jumpdays += (ret.weekday()-weekday)%7
|
||||
jumpdays *= -1
|
||||
ret += datetime.timedelta(days=jumpdays)
|
||||
return ret
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__neg__().__radd__(other)
|
||||
|
||||
def __add__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
raise TypeError("unsupported type for add operation")
|
||||
return relativedelta(years=other.years+self.years,
|
||||
months=other.months+self.months,
|
||||
days=other.days+self.days,
|
||||
hours=other.hours+self.hours,
|
||||
minutes=other.minutes+self.minutes,
|
||||
seconds=other.seconds+self.seconds,
|
||||
microseconds=other.microseconds+self.microseconds,
|
||||
leapdays=other.leapdays or self.leapdays,
|
||||
year=other.year or self.year,
|
||||
month=other.month or self.month,
|
||||
day=other.day or self.day,
|
||||
weekday=other.weekday or self.weekday,
|
||||
hour=other.hour or self.hour,
|
||||
minute=other.minute or self.minute,
|
||||
second=other.second or self.second,
|
||||
microsecond=other.second or self.microsecond)
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
raise TypeError("unsupported type for sub operation")
|
||||
return relativedelta(years=other.years-self.years,
|
||||
months=other.months-self.months,
|
||||
days=other.days-self.days,
|
||||
hours=other.hours-self.hours,
|
||||
minutes=other.minutes-self.minutes,
|
||||
seconds=other.seconds-self.seconds,
|
||||
microseconds=other.microseconds-self.microseconds,
|
||||
leapdays=other.leapdays or self.leapdays,
|
||||
year=other.year or self.year,
|
||||
month=other.month or self.month,
|
||||
day=other.day or self.day,
|
||||
weekday=other.weekday or self.weekday,
|
||||
hour=other.hour or self.hour,
|
||||
minute=other.minute or self.minute,
|
||||
second=other.second or self.second,
|
||||
microsecond=other.second or self.microsecond)
|
||||
|
||||
def __neg__(self):
|
||||
return relativedelta(years=-self.years,
|
||||
months=-self.months,
|
||||
days=-self.days,
|
||||
hours=-self.hours,
|
||||
minutes=-self.minutes,
|
||||
seconds=-self.seconds,
|
||||
microseconds=-self.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __nonzero__(self):
|
||||
return not (not self.years and
|
||||
not self.months and
|
||||
not self.days and
|
||||
not self.hours and
|
||||
not self.minutes and
|
||||
not self.seconds and
|
||||
not self.microseconds and
|
||||
not self.leapdays and
|
||||
self.year is None and
|
||||
self.month is None and
|
||||
self.day is None and
|
||||
self.weekday is None and
|
||||
self.hour is None and
|
||||
self.minute is None and
|
||||
self.second is None and
|
||||
self.microsecond is None)
|
||||
|
||||
def __mul__(self, other):
|
||||
f = float(other)
|
||||
return relativedelta(years=self.years*f,
|
||||
months=self.months*f,
|
||||
days=self.days*f,
|
||||
hours=self.hours*f,
|
||||
minutes=self.minutes*f,
|
||||
seconds=self.seconds*f,
|
||||
microseconds=self.microseconds*f,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return False
|
||||
if self.weekday or other.weekday:
|
||||
if not self.weekday or not other.weekday:
|
||||
return False
|
||||
if self.weekday.weekday != other.weekday.weekday:
|
||||
return False
|
||||
n1, n2 = self.weekday.n, other.weekday.n
|
||||
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
||||
return False
|
||||
return (self.years == other.years and
|
||||
self.months == other.months and
|
||||
self.days == other.days and
|
||||
self.hours == other.hours and
|
||||
self.minutes == other.minutes and
|
||||
self.seconds == other.seconds and
|
||||
self.leapdays == other.leapdays and
|
||||
self.year == other.year and
|
||||
self.month == other.month and
|
||||
self.day == other.day and
|
||||
self.hour == other.hour and
|
||||
self.minute == other.minute and
|
||||
self.second == other.second and
|
||||
self.microsecond == other.microsecond)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __div__(self, other):
|
||||
return self.__mul__(1/float(other))
|
||||
|
||||
def __repr__(self):
|
||||
l = []
|
||||
for attr in ["years", "months", "days", "leapdays",
|
||||
"hours", "minutes", "seconds", "microseconds"]:
|
||||
value = getattr(self, attr)
|
||||
if value:
|
||||
l.append("%s=%+d" % (attr, value))
|
||||
for attr in ["year", "month", "day", "weekday",
|
||||
"hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
l.append("%s=%s" % (attr, value))
|
||||
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
|
||||
@@ -1,11 +1,12 @@
|
||||
import urllib2
|
||||
# this is duplicated in snipppets of code from all over the web, credit to no one
|
||||
# in particular - to all those that have gone before me!
|
||||
from future.moves.urllib.request import urlopen
|
||||
|
||||
|
||||
#this is duplicated in snipppets of code from all over the web, credit to no one
|
||||
#in particular - to all those that have gone before me!
|
||||
def shorten(aUrl):
|
||||
tinyurl = 'http://tinyurl.com/api-create.php?url='
|
||||
req = urllib2.urlopen(tinyurl + aUrl)
|
||||
req = urlopen(tinyurl + aUrl)
|
||||
data = req.read()
|
||||
|
||||
#should be a tiny url
|
||||
# should be a tiny url
|
||||
return str(data)
|
||||
|
||||
@@ -2,39 +2,46 @@ import xbmc
|
||||
import xbmcgui
|
||||
import xbmcaddon
|
||||
|
||||
__addon_id__= 'script.xbmcbackup'
|
||||
__addon_id__ = 'script.xbmcbackup'
|
||||
__Addon = xbmcaddon.Addon(__addon_id__)
|
||||
|
||||
|
||||
def data_dir():
|
||||
return __Addon.getAddonInfo('profile')
|
||||
|
||||
|
||||
def addon_dir():
|
||||
return __Addon.getAddonInfo('path')
|
||||
|
||||
|
||||
def openSettings():
|
||||
__Addon.openSettings()
|
||||
|
||||
def log(message,loglevel=xbmc.LOGNOTICE):
|
||||
xbmc.log(encode(__addon_id__ + "-" + __Addon.getAddonInfo('version') + ": " + message),level=loglevel)
|
||||
|
||||
def log(message, loglevel=xbmc.LOGDEBUG):
|
||||
xbmc.log(__addon_id__ + "-" + __Addon.getAddonInfo('version') + ": " + message, level=loglevel)
|
||||
|
||||
|
||||
def showNotification(message):
|
||||
xbmcgui.Dialog().notification(encode(getString(30010)),encode(message),time=4000,icon=xbmc.translatePath(__Addon.getAddonInfo('path') + "/icon.png"))
|
||||
xbmcgui.Dialog().notification(getString(30010), message, time=4000, icon=xbmc.translatePath(__Addon.getAddonInfo('path') + "/resources/images/icon.png"))
|
||||
|
||||
|
||||
def getSetting(name):
|
||||
return __Addon.getSetting(name)
|
||||
|
||||
def setSetting(name,value):
|
||||
__Addon.setSetting(name,value)
|
||||
|
||||
|
||||
def setSetting(name, value):
|
||||
__Addon.setSetting(name, value)
|
||||
|
||||
|
||||
def getString(string_id):
|
||||
return __Addon.getLocalizedString(string_id)
|
||||
|
||||
def encode(string):
|
||||
|
||||
def getRegionalTimestamp(date_time, dateformat=['dateshort']):
|
||||
result = ''
|
||||
|
||||
try:
|
||||
result = string.encode('UTF-8','replace')
|
||||
except UnicodeDecodeError:
|
||||
result = 'Unicode Error'
|
||||
|
||||
return result
|
||||
for aFormat in dateformat:
|
||||
result = result + ("%s " % date_time.strftime(xbmc.getRegion(aFormat)))
|
||||
|
||||
return result.strip()
|
||||
|
||||
@@ -1,130 +1,131 @@
|
||||
import utils as utils
|
||||
import tinyurl as tinyurl
|
||||
from __future__ import unicode_literals
|
||||
import zipfile
|
||||
import os.path
|
||||
import sys
|
||||
import xbmc
|
||||
import xbmcvfs
|
||||
import xbmcgui
|
||||
import zipfile
|
||||
import zlib
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import dropbox
|
||||
from dropbox.files import WriteMode,CommitInfo,UploadSessionCursor
|
||||
from pydrive.drive import GoogleDrive
|
||||
from authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
|
||||
from . import dropbox
|
||||
from . import utils as utils
|
||||
from .dropbox.files import WriteMode, CommitInfo, UploadSessionCursor
|
||||
from . authorizers import DropboxAuthorizer
|
||||
|
||||
|
||||
class Vfs:
|
||||
root_path = None
|
||||
|
||||
def __init__(self,rootString):
|
||||
def __init__(self, rootString):
|
||||
self.set_root(rootString)
|
||||
|
||||
def set_root(self,rootString):
|
||||
|
||||
def set_root(self, rootString):
|
||||
old_root = self.root_path
|
||||
self.root_path = rootString
|
||||
|
||||
#fix slashes
|
||||
self.root_path = self.root_path.replace("\\","/")
|
||||
|
||||
#check if trailing slash is included
|
||||
|
||||
# fix slashes
|
||||
self.root_path = self.root_path.replace("\\", "/")
|
||||
|
||||
# check if trailing slash is included
|
||||
if(self.root_path[-1:] != "/"):
|
||||
self.root_path = self.root_path + "/"
|
||||
|
||||
#return the old root
|
||||
# return the old root
|
||||
return old_root
|
||||
|
||||
def listdir(self,directory):
|
||||
|
||||
def listdir(self, directory):
|
||||
return {}
|
||||
|
||||
def mkdir(self,directory):
|
||||
def mkdir(self, directory):
|
||||
return True
|
||||
|
||||
def put(self,source,dest):
|
||||
def put(self, source, dest):
|
||||
return True
|
||||
|
||||
def rmdir(self,directory):
|
||||
def rmdir(self, directory):
|
||||
return True
|
||||
|
||||
def rmfile(self,aFile):
|
||||
def rmfile(self, aFile):
|
||||
return True
|
||||
|
||||
def exists(self,aFile):
|
||||
def exists(self, aFile):
|
||||
return True
|
||||
|
||||
def rename(self,aFile,newName):
|
||||
|
||||
def rename(self, aFile, newName):
|
||||
return True
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
return True
|
||||
|
||||
|
||||
|
||||
class XBMCFileSystem(Vfs):
|
||||
|
||||
def listdir(self,directory):
|
||||
def listdir(self, directory):
|
||||
return xbmcvfs.listdir(directory)
|
||||
|
||||
def mkdir(self,directory):
|
||||
def mkdir(self, directory):
|
||||
return xbmcvfs.mkdir(xbmc.translatePath(directory))
|
||||
|
||||
def put(self,source,dest):
|
||||
return xbmcvfs.copy(xbmc.translatePath(source),xbmc.translatePath(dest))
|
||||
|
||||
def rmdir(self,directory):
|
||||
return xbmcvfs.rmdir(directory,True)
|
||||
def put(self, source, dest):
|
||||
return xbmcvfs.copy(xbmc.translatePath(source), xbmc.translatePath(dest))
|
||||
|
||||
def rmfile(self,aFile):
|
||||
def rmdir(self, directory):
|
||||
return xbmcvfs.rmdir(directory, True)
|
||||
|
||||
def rmfile(self, aFile):
|
||||
return xbmcvfs.delete(aFile)
|
||||
|
||||
def rename(self,aFile,newName):
|
||||
def rename(self, aFile, newName):
|
||||
return xbmcvfs.rename(aFile, newName)
|
||||
|
||||
def exists(self,aFile):
|
||||
def exists(self, aFile):
|
||||
return xbmcvfs.exists(aFile)
|
||||
|
||||
|
||||
class ZipFileSystem(Vfs):
|
||||
zip = None
|
||||
|
||||
def __init__(self,rootString,mode):
|
||||
|
||||
def __init__(self, rootString, mode):
|
||||
self.root_path = ""
|
||||
self.zip = zipfile.ZipFile(rootString,mode=mode,compression=zipfile.ZIP_DEFLATED,allowZip64=True)
|
||||
|
||||
def listdir(self,directory):
|
||||
return [[],[]]
|
||||
|
||||
def mkdir(self,directory):
|
||||
#self.zip.write(directory[len(self.root_path):])
|
||||
self.zip = zipfile.ZipFile(rootString, mode=mode, compression=zipfile.ZIP_DEFLATED, allowZip64=True)
|
||||
|
||||
def listdir(self, directory):
|
||||
return [[], []]
|
||||
|
||||
def mkdir(self, directory):
|
||||
# self.zip.write(directory[len(self.root_path):])
|
||||
return False
|
||||
|
||||
def put(self,source,dest):
|
||||
|
||||
aFile = xbmcvfs.File(xbmc.translatePath(source),'r')
|
||||
|
||||
self.zip.writestr(utils.encode(dest),aFile.read())
|
||||
|
||||
|
||||
def put(self, source, dest):
|
||||
|
||||
aFile = xbmcvfs.File(xbmc.translatePath(source), 'r')
|
||||
|
||||
self.zip.writestr(dest, aFile.readBytes())
|
||||
|
||||
return True
|
||||
|
||||
def rmdir(self,directory):
|
||||
|
||||
def rmdir(self, directory):
|
||||
return False
|
||||
|
||||
def exists(self,aFile):
|
||||
|
||||
def exists(self, aFile):
|
||||
return False
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
self.zip.close()
|
||||
|
||||
def extract(self,aFile,path):
|
||||
#extract zip file to path
|
||||
self.zip.extract(aFile,path)
|
||||
|
||||
|
||||
def extract(self, aFile, path):
|
||||
# extract zip file to path
|
||||
self.zip.extract(aFile, path)
|
||||
|
||||
def listFiles(self):
|
||||
return self.zip.infolist()
|
||||
|
||||
|
||||
class DropboxFileSystem(Vfs):
|
||||
MAX_CHUNK = 50 * 1000 * 1000 #dropbox uses 150, reduced to 50 for small mem systems
|
||||
MAX_CHUNK = 50 * 1000 * 1000 # dropbox uses 150, reduced to 50 for small mem systems
|
||||
client = None
|
||||
APP_KEY = ''
|
||||
APP_SECRET = ''
|
||||
|
||||
def __init__(self,rootString):
|
||||
|
||||
def __init__(self, rootString):
|
||||
self.set_root(rootString)
|
||||
|
||||
authorizer = DropboxAuthorizer()
|
||||
@@ -132,325 +133,136 @@ class DropboxFileSystem(Vfs):
|
||||
if(authorizer.isAuthorized()):
|
||||
self.client = authorizer.getClient()
|
||||
else:
|
||||
#tell the user to go back and run the authorizer
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30105))
|
||||
# tell the user to go back and run the authorizer
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30105))
|
||||
sys.exit()
|
||||
|
||||
def listdir(self,directory):
|
||||
def listdir(self, directory):
|
||||
directory = self._fix_slashes(directory)
|
||||
|
||||
if(self.client != None and self.exists(directory)):
|
||||
|
||||
if(self.client is not None and self.exists(directory)):
|
||||
files = []
|
||||
dirs = []
|
||||
metadata = self.client.files_list_folder(directory)
|
||||
|
||||
for aFile in metadata.entries:
|
||||
if(isinstance(aFile,dropbox.files.FolderMetadata)):
|
||||
dirs.append(utils.encode(aFile.name))
|
||||
if(isinstance(aFile, dropbox.files.FolderMetadata)):
|
||||
dirs.append(aFile.name)
|
||||
else:
|
||||
files.append(utils.encode(aFile.name))
|
||||
files.append(aFile.name)
|
||||
|
||||
return [dirs,files]
|
||||
return [dirs, files]
|
||||
else:
|
||||
return [[],[]]
|
||||
|
||||
return [[], []]
|
||||
|
||||
def mkdir(self,directory):
|
||||
def mkdir(self, directory):
|
||||
directory = self._fix_slashes(directory)
|
||||
if(self.client != None):
|
||||
#sort of odd but always return true, folder create is implicit with file upload
|
||||
if(self.client is not None):
|
||||
# sort of odd but always return true, folder create is implicit with file upload
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def rmdir(self,directory):
|
||||
def rmdir(self, directory):
|
||||
directory = self._fix_slashes(directory)
|
||||
if(self.client != None and self.exists(directory)):
|
||||
#dropbox is stupid and will refuse to do this sometimes, need to delete recursively
|
||||
dirs,files = self.listdir(directory)
|
||||
|
||||
if(self.client is not None and self.exists(directory)):
|
||||
# dropbox is stupid and will refuse to do this sometimes, need to delete recursively
|
||||
dirs, files = self.listdir(directory)
|
||||
|
||||
for aDir in dirs:
|
||||
self.rmdir(aDir)
|
||||
|
||||
#finally remove the root directory
|
||||
# finally remove the root directory
|
||||
self.client.files_delete(directory)
|
||||
|
||||
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def rmfile(self,aFile):
|
||||
def rmfile(self, aFile):
|
||||
aFile = self._fix_slashes(aFile)
|
||||
|
||||
if(self.client != None and self.exists(aFile)):
|
||||
|
||||
if(self.client is not None and self.exists(aFile)):
|
||||
self.client.files_delete(aFile)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def exists(self,aFile):
|
||||
def exists(self, aFile):
|
||||
aFile = self._fix_slashes(aFile)
|
||||
|
||||
if(self.client != None):
|
||||
#can't list root metadata
|
||||
|
||||
if(self.client is not None):
|
||||
# can't list root metadata
|
||||
if(aFile == ''):
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
meta_data = self.client.files_get_metadata(aFile)
|
||||
#if we make it here the file does exist
|
||||
self.client.files_get_metadata(aFile)
|
||||
# if we make it here the file does exist
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
def put(self,source,dest,retry=True):
|
||||
def put(self, source, dest, retry=True):
|
||||
dest = self._fix_slashes(dest)
|
||||
|
||||
if(self.client != None):
|
||||
#open the file and get its size
|
||||
f = open(source,'rb')
|
||||
|
||||
if(self.client is not None):
|
||||
# open the file and get its size
|
||||
f = open(source, 'rb')
|
||||
f_size = os.path.getsize(source)
|
||||
|
||||
|
||||
try:
|
||||
if(f_size < self.MAX_CHUNK):
|
||||
#use the regular upload
|
||||
response = self.client.files_upload(f.read(),dest,mode=WriteMode('overwrite'))
|
||||
# use the regular upload
|
||||
self.client.files_upload(f.read(), dest, mode=WriteMode('overwrite'))
|
||||
else:
|
||||
#start the upload session
|
||||
# start the upload session
|
||||
upload_session = self.client.files_upload_session_start(f.read(self.MAX_CHUNK))
|
||||
upload_cursor = UploadSessionCursor(upload_session.session_id,f.tell())
|
||||
|
||||
upload_cursor = UploadSessionCursor(upload_session.session_id, f.tell())
|
||||
|
||||
while(f.tell() < f_size):
|
||||
#check if we should finish the upload
|
||||
# check if we should finish the upload
|
||||
if((f_size - f.tell()) <= self.MAX_CHUNK):
|
||||
#upload and close
|
||||
self.client.files_upload_session_finish(f.read(self.MAX_CHUNK),upload_cursor,CommitInfo(dest,mode=WriteMode('overwrite')))
|
||||
# upload and close
|
||||
self.client.files_upload_session_finish(f.read(self.MAX_CHUNK), upload_cursor, CommitInfo(dest, mode=WriteMode('overwrite')))
|
||||
else:
|
||||
#upload a part and store the offset
|
||||
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK),upload_cursor)
|
||||
# upload a part and store the offset
|
||||
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK), upload_cursor)
|
||||
upload_cursor.offset = f.tell()
|
||||
|
||||
#if no errors we're good!
|
||||
|
||||
# if no errors we're good!
|
||||
return True
|
||||
except Exception as anError:
|
||||
utils.log(str(anError))
|
||||
|
||||
#if we have an exception retry
|
||||
|
||||
# if we have an exception retry
|
||||
if(retry):
|
||||
return self.put(source,dest,False)
|
||||
return self.put(source, dest, False)
|
||||
else:
|
||||
#tried once already, just quit
|
||||
# tried once already, just quit
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_file(self,source,dest):
|
||||
if(self.client != None):
|
||||
#write the file locally
|
||||
f = self.client.files_download_to_file(dest,source)
|
||||
def get_file(self, source, dest):
|
||||
if(self.client is not None):
|
||||
# write the file locally
|
||||
self.client.files_download_to_file(dest, source)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _fix_slashes(self,filename):
|
||||
result = filename.replace('\\','/')
|
||||
def _fix_slashes(self, filename):
|
||||
result = filename.replace('\\', '/')
|
||||
|
||||
#root needs to be a blank string
|
||||
# root needs to be a blank string
|
||||
if(result == '/'):
|
||||
result = ""
|
||||
|
||||
#if dir ends in slash, remove it
|
||||
# if dir ends in slash, remove it
|
||||
if(result[-1:] == "/"):
|
||||
result = result[:-1]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class GoogleDriveFilesystem(Vfs):
|
||||
drive = None
|
||||
history = {}
|
||||
FOLDER_TYPE = 'application/vnd.google-apps.folder'
|
||||
|
||||
def __init__(self,rootString):
|
||||
self.set_root(rootString)
|
||||
|
||||
authorizer = GoogleDriveAuthorizer()
|
||||
|
||||
if(authorizer.isAuthorized()):
|
||||
self.drive = authorizer.getClient()
|
||||
else:
|
||||
#tell the user to go back and run the authorizer
|
||||
xbmcgui.Dialog().ok(utils.getString(30010),utils.getString(30105))
|
||||
sys.exit()
|
||||
|
||||
#make sure we have the folder we need
|
||||
xbmc_folder = self._getGoogleFile(self.root_path)
|
||||
if(xbmc_folder == None):
|
||||
self.mkdir(self.root_path)
|
||||
|
||||
def listdir(self,directory):
|
||||
files = []
|
||||
dirs = []
|
||||
|
||||
if(not directory.startswith('/')):
|
||||
directory = '/' + directory
|
||||
|
||||
#get the id of this folder
|
||||
parentFolder = self._getGoogleFile(directory)
|
||||
|
||||
#need to do this after
|
||||
if(not directory.endswith('/')):
|
||||
directory = directory + '/'
|
||||
|
||||
if(parentFolder != None):
|
||||
|
||||
fileList = self.drive.ListFile({'q':"'" + parentFolder['id'] + "' in parents and trashed = false"}).GetList()
|
||||
|
||||
for aFile in fileList:
|
||||
if(aFile['mimeType'] == self.FOLDER_TYPE):
|
||||
dirs.append(utils.encode(aFile['title']))
|
||||
else:
|
||||
files.append(utils.encode(aFile['title']))
|
||||
|
||||
|
||||
return [dirs,files]
|
||||
|
||||
def mkdir(self,directory):
|
||||
result = True
|
||||
|
||||
if(not directory.startswith('/')):
|
||||
directory = '/' + directory
|
||||
|
||||
if(directory.endswith('/')):
|
||||
directory = directory[:-1]
|
||||
|
||||
#split the string by the directory separator
|
||||
pathList = os.path.split(directory)
|
||||
|
||||
if(pathList[0] == '/'):
|
||||
|
||||
#we're at the root, just make the folder
|
||||
newFolder = self.drive.CreateFile({'title': pathList[1], 'parent':'root','mimeType':self.FOLDER_TYPE})
|
||||
newFolder.Upload()
|
||||
else:
|
||||
#get the id of the parent folder
|
||||
parentFolder = self._getGoogleFile(pathList[0])
|
||||
|
||||
if(parentFolder != None):
|
||||
newFolder = self.drive.CreateFile({'title': pathList[1],"parents":[{'kind':'drive#fileLink','id':parentFolder['id']}],'mimeType':self.FOLDER_TYPE})
|
||||
newFolder.Upload()
|
||||
else:
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def put(self,source,dest):
|
||||
result = True
|
||||
|
||||
#make the name separate from the path
|
||||
if(not dest.startswith('/')):
|
||||
dest = '/' + dest
|
||||
|
||||
pathList = os.path.split(dest)
|
||||
|
||||
#get the parent location
|
||||
parentFolder = self._getGoogleFile(pathList[0])
|
||||
|
||||
if(parentFolder != None):
|
||||
#create a new file in this folder
|
||||
newFile = self.drive.CreateFile({"title":pathList[1],"parents":[{'kind':'drive#fileLink','id':parentFolder['id']}]})
|
||||
newFile.SetContentFile(source)
|
||||
newFile.Upload()
|
||||
else:
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def get_file(self,source, dest):
|
||||
result = True
|
||||
|
||||
#get the id of this file
|
||||
file = self._getGoogleFile(source)
|
||||
|
||||
if(file != None):
|
||||
file.GetContentFile(dest)
|
||||
else:
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def rmdir(self,directory):
|
||||
result = True
|
||||
|
||||
#check that the folder exists
|
||||
folder = self._getGoogleFile(directory)
|
||||
|
||||
if(folder != None):
|
||||
#delete the folder
|
||||
folder.Delete()
|
||||
else:
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def rmfile(self,aFile):
|
||||
#really just the same as the remove directory function
|
||||
return self.rmdir(aFile)
|
||||
|
||||
def exists(self,aFile):
|
||||
#attempt to get this file
|
||||
foundFile = self._getGoogleFile(aFile)
|
||||
|
||||
if(foundFile != None):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def rename(self,aFile,newName):
|
||||
return True
|
||||
|
||||
def _getGoogleFile(self,file):
|
||||
result = None
|
||||
|
||||
#file must start with / and not end with one (even directory)
|
||||
if(not file.startswith('/')):
|
||||
file = '/' + file
|
||||
|
||||
if(file.endswith('/')):
|
||||
file = file[:-1]
|
||||
|
||||
if(self.history.has_key(file)):
|
||||
|
||||
result = self.history[file]
|
||||
else:
|
||||
pathList = os.path.split(file)
|
||||
|
||||
#end of recurision, we got the root
|
||||
if(pathList[0] == '/'):
|
||||
#get the id of this file (if it exists)
|
||||
file_list = self.drive.ListFile({'q':"title='" + pathList[1] + "' and 'root' in parents and trashed=false"}).GetList()
|
||||
|
||||
if(len(file_list) > 0):
|
||||
result = file_list[0]
|
||||
self.history[pathList[1]] = result
|
||||
else:
|
||||
#recurse down the tree
|
||||
current_file = pathList[1]
|
||||
|
||||
parentId = self._getGoogleFile(pathList[0])
|
||||
|
||||
if(parentId != None):
|
||||
self.history[pathList[0]] = parentId
|
||||
|
||||
#attempt to get the id of this file, with this parent
|
||||
file_list = file_list = self.drive.ListFile({'q':"title='" + current_file + "' and '" + parentId['id'] + "' in parents and trashed=false"}).GetList()
|
||||
|
||||
if(len(file_list) > 0):
|
||||
result = file_list[0]
|
||||
self.history[file] = result
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -4,31 +4,32 @@
|
||||
<setting id="compress_backups" type="bool" label="30087" default="false" />
|
||||
<setting id="backup_rotation" type="number" label="30026" default="0" />
|
||||
<setting id="progress_mode" type="enum" label="30022" lvalues="30082|30083|30084" default="0" />
|
||||
<setting id="upgrade_notes" type="number" label="upgrade_notes" visible="false" default="1" />
|
||||
</category>
|
||||
<category id="backup_path" label="30048">
|
||||
<setting id="remote_selection" type="enum" lvalues="30018|30019|30027|30098" default="0" label="30025"/>
|
||||
<setting id="remote_selection" type="enum" lvalues="30018|30019|30027" default="0" label="30025"/>
|
||||
<setting id="remote_path_2" type="text" label="30024" default="" visible="eq(-1,1)" />
|
||||
<setting id="remote_path" type="folder" label="30020" visible="eq(-2,0)" />
|
||||
<setting id="dropbox_key" type="text" label="30028" visible="eq(-3,2)" default="" />
|
||||
<setting id="dropbox_secret" type="text" label="30029" visible="eq(-4,2)" default="" />
|
||||
<setting id="google_drive_id" type="text" label="Client ID" visible="eq(-5,3)" default="" />
|
||||
<setting id="google_drive_secret" type="text" label="Client Secret" visible="eq(-6,3)" default="" />
|
||||
<setting id="auth_dropbox_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/authorize_cloud.py,type=dropbox)" visible="eq(-7,2)"/>
|
||||
<setting id="auth_google_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/authorize_cloud.py,type=google_drive)" visible="eq(-8,3)"/>
|
||||
<setting id="remove_auth_button" type="action" label="30093" action="RunScript(special://home/addons/script.xbmcbackup/remove_auth.py)" visible="gt(-9,1)"/>
|
||||
<setting id="auth_dropbox_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=authorize_cloud,provider=dropbox)" visible="eq(-7,2)"/>
|
||||
<setting id="auth_google_button" type="action" label="30104" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=authorize_cloud,provider=google_drive)" visible="eq(-8,3)"/>
|
||||
<setting id="remove_auth_button" type="action" label="30093" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=remove_auth)" visible="gt(-9,1)"/>
|
||||
</category>
|
||||
<category id="selection" label="30012">
|
||||
<setting id="backup_addons" type="bool" label="30030" default="true" />
|
||||
<setting id="backup_addon_data" type="bool" label="30031" default="false" />
|
||||
<setting id="backup_database" type="bool" label="30032" default="true" />
|
||||
<setting id="backup_playlists" type="bool" label="30033" default="true" />
|
||||
<setting id="backup_profiles" type="bool" label="30080" default="false" />
|
||||
<setting id="backup_thumbnails" type="bool" label="30034" default="true" />
|
||||
<setting id="backup_config" type="bool" label="30035" default="true" />
|
||||
<setting id="custom_dir_1_enable" type="bool" label="30036" default="false" />
|
||||
<setting id="backup_custom_dir_1" type="folder" label="30018" default="" visible="eq(-1,true)"/>
|
||||
<setting id="custom_dir_2_enable" type="bool" label="30037" default="false" />
|
||||
<setting id="backup_custom_dir_2" type="folder" label="30018" default="" visible="eq(-1,true)"/>
|
||||
<setting id="backup_selection_type" type="enum" lvalues="30014|30015" default="0" label="30023" />
|
||||
<setting id="backup_addon_data" type="bool" label="30031" default="false" visible="eq(-1,0)"/>
|
||||
<setting id="backup_config" type="bool" label="30035" default="true" visible="eq(-2,0)"/>
|
||||
<setting id="backup_database" type="bool" label="30032" default="true" visible="eq(-3,0)"/>
|
||||
<setting id="backup_game_saves" type="bool" label="30133" default="false" visible="eq(-4,0)" />
|
||||
<setting id="backup_playlists" type="bool" label="30033" default="true" visible="eq(-5,0)"/>
|
||||
<setting id="backup_profiles" type="bool" label="30080" default="false" visible="eq(-6,0)"/>
|
||||
<setting id="backup_thumbnails" type="bool" label="30034" default="true" visible="eq(-7,0)"/>
|
||||
<setting id="backup_addons" type="bool" label="30030" default="true" visible="eq(-8,0)" />
|
||||
<setting id="advanced_button" type="action" label="30125" visible="eq(-9,1)" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=advanced_editor)" />
|
||||
<setting id="advanced_defaults" type="action" label="30139" visible="eq(-10,1)" action="RunScript(special://home/addons/script.xbmcbackup/launcher.py,action=advanced_copy_config)" />
|
||||
</category>
|
||||
<category id="scheduling" label="30013">
|
||||
<setting id="enable_scheduler" type="bool" label="30060" default="false" />
|
||||
|
||||
391
scheduler.py
@@ -1,191 +1,200 @@
|
||||
import xbmc
|
||||
import xbmcvfs
|
||||
import xbmcgui
|
||||
import datetime
|
||||
import time
|
||||
import resources.lib.utils as utils
|
||||
from resources.lib.croniter import croniter
|
||||
from resources.lib.backup import XbmcBackup
|
||||
|
||||
class BackupScheduler:
|
||||
monitor = None
|
||||
enabled = "false"
|
||||
next_run = 0
|
||||
next_run_path = None
|
||||
restore_point = None
|
||||
|
||||
def __init__(self):
|
||||
self.monitor = UpdateMonitor(update_method = self.settingsChanged)
|
||||
self.enabled = utils.getSetting("enable_scheduler")
|
||||
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
|
||||
|
||||
if(self.enabled == "true"):
|
||||
|
||||
#sleep for 2 minutes so Kodi can start and time can update correctly
|
||||
xbmc.Monitor().waitForAbort(120)
|
||||
|
||||
nr = 0
|
||||
if(xbmcvfs.exists(self.next_run_path)):
|
||||
|
||||
fh = xbmcvfs.File(self.next_run_path)
|
||||
try:
|
||||
#check if we saved a run time from the last run
|
||||
nr = float(fh.read())
|
||||
except ValueError:
|
||||
nr = 0
|
||||
|
||||
fh.close()
|
||||
|
||||
#if we missed and the user wants to play catch-up
|
||||
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
|
||||
utils.log("scheduled backup was missed, doing it now...")
|
||||
progress_mode = int(utils.getSetting('progress_mode'))
|
||||
|
||||
if(progress_mode == 0):
|
||||
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
|
||||
|
||||
self.doScheduledBackup(progress_mode)
|
||||
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
#scheduler was turned on, find next run time
|
||||
utils.log("scheduler enabled, finding next run time")
|
||||
self.findNextRun(time.time())
|
||||
|
||||
def start(self):
|
||||
|
||||
#check if a backup should be resumed
|
||||
resumeRestore = self._resumeCheck()
|
||||
|
||||
if(resumeRestore):
|
||||
restore = XbmcBackup()
|
||||
restore.selectRestore(self.restore_point)
|
||||
#skip the advanced settings check
|
||||
restore.skipAdvanced()
|
||||
restore.run(XbmcBackup.Restore)
|
||||
|
||||
while(not self.monitor.abortRequested()):
|
||||
|
||||
if(self.enabled == "true"):
|
||||
#scheduler is still on
|
||||
now = time.time()
|
||||
|
||||
if(self.next_run <= now):
|
||||
progress_mode = int(utils.getSetting('progress_mode'))
|
||||
self.doScheduledBackup(progress_mode)
|
||||
|
||||
#check if we should shut the computer down
|
||||
if(utils.getSetting("cron_shutdown") == 'true'):
|
||||
#wait 10 seconds to make sure all backup processes and files are completed
|
||||
time.sleep(10)
|
||||
xbmc.executebuiltin('ShutDown()')
|
||||
else:
|
||||
#find the next run time like normal
|
||||
self.findNextRun(now)
|
||||
|
||||
xbmc.sleep(500)
|
||||
|
||||
#delete monitor to free up memory
|
||||
del self.monitor
|
||||
|
||||
def doScheduledBackup(self,progress_mode):
|
||||
if(progress_mode != 2):
|
||||
utils.showNotification(utils.getString(30053))
|
||||
|
||||
backup = XbmcBackup()
|
||||
|
||||
if(backup.remoteConfigured()):
|
||||
|
||||
if(int(utils.getSetting('progress_mode')) in [0,1]):
|
||||
backup.run(XbmcBackup.Backup,True)
|
||||
else:
|
||||
backup.run(XbmcBackup.Backup,False)
|
||||
|
||||
#check if this is a "one-off"
|
||||
if(int(utils.getSetting("schedule_interval")) == 0):
|
||||
#disable the scheduler after this run
|
||||
self.enabled = "false"
|
||||
utils.setSetting('enable_scheduler','false')
|
||||
else:
|
||||
utils.showNotification(utils.getString(30045))
|
||||
|
||||
def findNextRun(self,now):
|
||||
progress_mode = int(utils.getSetting('progress_mode'))
|
||||
|
||||
#find the cron expression and get the next run time
|
||||
cron_exp = self.parseSchedule()
|
||||
|
||||
cron_ob = croniter(cron_exp,datetime.datetime.fromtimestamp(now))
|
||||
new_run_time = cron_ob.get_next(float)
|
||||
|
||||
if(new_run_time != self.next_run):
|
||||
self.next_run = new_run_time
|
||||
utils.log("scheduler will run again on " + datetime.datetime.fromtimestamp(self.next_run).strftime('%m-%d-%Y %H:%M'))
|
||||
|
||||
#write the next time to a file
|
||||
fh = xbmcvfs.File(self.next_run_path, 'w')
|
||||
fh.write(str(self.next_run))
|
||||
fh.close()
|
||||
|
||||
#only show when not in silent mode
|
||||
if(progress_mode != 2):
|
||||
utils.showNotification(utils.getString(30081) + " " + datetime.datetime.fromtimestamp(self.next_run).strftime('%m-%d-%Y %H:%M'))
|
||||
|
||||
def settingsChanged(self):
|
||||
current_enabled = utils.getSetting("enable_scheduler")
|
||||
|
||||
if(current_enabled == "true" and self.enabled == "false"):
|
||||
#scheduler was just turned on
|
||||
self.enabled = current_enabled
|
||||
self.setup()
|
||||
elif (current_enabled == "false" and self.enabled == "true"):
|
||||
#schedule was turn off
|
||||
self.enabled = current_enabled
|
||||
|
||||
if(self.enabled == "true"):
|
||||
#always recheck the next run time after an update
|
||||
self.findNextRun(time.time())
|
||||
|
||||
def parseSchedule(self):
|
||||
schedule_type = int(utils.getSetting("schedule_interval"))
|
||||
cron_exp = utils.getSetting("cron_schedule")
|
||||
|
||||
hour_of_day = utils.getSetting("schedule_time")
|
||||
hour_of_day = int(hour_of_day[0:2])
|
||||
if(schedule_type == 0 or schedule_type == 1):
|
||||
#every day
|
||||
cron_exp = "0 " + str(hour_of_day) + " * * *"
|
||||
elif(schedule_type == 2):
|
||||
#once a week
|
||||
day_of_week = utils.getSetting("day_of_week")
|
||||
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
|
||||
elif(schedule_type == 3):
|
||||
#first day of month
|
||||
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
|
||||
|
||||
return cron_exp
|
||||
|
||||
def _resumeCheck(self):
|
||||
shouldContinue = False
|
||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
|
||||
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"),'r')
|
||||
self.restore_point = rFile.read()
|
||||
rFile.close()
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
|
||||
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042),utils.getString(30043),utils.getString(30044))
|
||||
|
||||
return shouldContinue
|
||||
|
||||
|
||||
class UpdateMonitor(xbmc.Monitor):
|
||||
update_method = None
|
||||
|
||||
def __init__(self,*args, **kwargs):
|
||||
xbmc.Monitor.__init__(self)
|
||||
self.update_method = kwargs['update_method']
|
||||
|
||||
def onSettingsChanged(self):
|
||||
self.update_method()
|
||||
|
||||
BackupScheduler().start()
|
||||
import time
|
||||
from datetime import datetime
|
||||
import xbmc
|
||||
import xbmcvfs
|
||||
import xbmcgui
|
||||
import resources.lib.utils as utils
|
||||
from resources.lib.croniter import croniter
|
||||
from resources.lib.backup import XbmcBackup
|
||||
|
||||
UPGRADE_INT = 2 # to keep track of any upgrade notifications
|
||||
|
||||
|
||||
class BackupScheduler:
|
||||
monitor = None
|
||||
enabled = "false"
|
||||
next_run = 0
|
||||
next_run_path = None
|
||||
restore_point = None
|
||||
|
||||
def __init__(self):
|
||||
self.monitor = UpdateMonitor(update_method=self.settingsChanged)
|
||||
self.enabled = utils.getSetting("enable_scheduler")
|
||||
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
|
||||
|
||||
if(self.enabled == "true"):
|
||||
|
||||
# sleep for 2 minutes so Kodi can start and time can update correctly
|
||||
xbmc.Monitor().waitForAbort(120)
|
||||
|
||||
nr = 0
|
||||
if(xbmcvfs.exists(self.next_run_path)):
|
||||
|
||||
fh = xbmcvfs.File(self.next_run_path)
|
||||
try:
|
||||
# check if we saved a run time from the last run
|
||||
nr = float(fh.read())
|
||||
except ValueError:
|
||||
nr = 0
|
||||
|
||||
fh.close()
|
||||
|
||||
# if we missed and the user wants to play catch-up
|
||||
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
|
||||
utils.log("scheduled backup was missed, doing it now...")
|
||||
progress_mode = int(utils.getSetting('progress_mode'))
|
||||
|
||||
if(progress_mode == 0):
|
||||
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
|
||||
|
||||
self.doScheduledBackup(progress_mode)
|
||||
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
# scheduler was turned on, find next run time
|
||||
utils.log("scheduler enabled, finding next run time")
|
||||
self.findNextRun(time.time())
|
||||
|
||||
def start(self):
|
||||
|
||||
# display upgrade messages if they exist
|
||||
if(int(utils.getSetting('upgrade_notes')) < UPGRADE_INT):
|
||||
xbmcgui.Dialog().ok(utils.getString(30010), utils.getString(30132))
|
||||
utils.setSetting('upgrade_notes', str(UPGRADE_INT))
|
||||
|
||||
# check if a backup should be resumed
|
||||
resumeRestore = self._resumeCheck()
|
||||
|
||||
if(resumeRestore):
|
||||
restore = XbmcBackup()
|
||||
restore.selectRestore(self.restore_point)
|
||||
# skip the advanced settings check
|
||||
restore.skipAdvanced()
|
||||
restore.restore()
|
||||
|
||||
while(not self.monitor.abortRequested()):
|
||||
|
||||
if(self.enabled == "true"):
|
||||
# scheduler is still on
|
||||
now = time.time()
|
||||
|
||||
if(self.next_run <= now):
|
||||
progress_mode = int(utils.getSetting('progress_mode'))
|
||||
self.doScheduledBackup(progress_mode)
|
||||
|
||||
# check if we should shut the computer down
|
||||
if(utils.getSetting("cron_shutdown") == 'true'):
|
||||
# wait 10 seconds to make sure all backup processes and files are completed
|
||||
time.sleep(10)
|
||||
xbmc.executebuiltin('ShutDown()')
|
||||
else:
|
||||
# find the next run time like normal
|
||||
self.findNextRun(now)
|
||||
|
||||
xbmc.sleep(500)
|
||||
|
||||
# delete monitor to free up memory
|
||||
del self.monitor
|
||||
|
||||
def doScheduledBackup(self, progress_mode):
|
||||
if(progress_mode != 2):
|
||||
utils.showNotification(utils.getString(30053))
|
||||
|
||||
backup = XbmcBackup()
|
||||
|
||||
if(backup.remoteConfigured()):
|
||||
|
||||
if(int(utils.getSetting('progress_mode')) in [0, 1]):
|
||||
backup.backup(True)
|
||||
else:
|
||||
backup.backup(False)
|
||||
|
||||
# check if this is a "one-off"
|
||||
if(int(utils.getSetting("schedule_interval")) == 0):
|
||||
# disable the scheduler after this run
|
||||
self.enabled = "false"
|
||||
utils.setSetting('enable_scheduler', 'false')
|
||||
else:
|
||||
utils.showNotification(utils.getString(30045))
|
||||
|
||||
def findNextRun(self, now):
|
||||
progress_mode = int(utils.getSetting('progress_mode'))
|
||||
|
||||
# find the cron expression and get the next run time
|
||||
cron_exp = self.parseSchedule()
|
||||
|
||||
cron_ob = croniter(cron_exp, datetime.fromtimestamp(now))
|
||||
new_run_time = cron_ob.get_next(float)
|
||||
|
||||
if(new_run_time != self.next_run):
|
||||
self.next_run = new_run_time
|
||||
utils.log("scheduler will run again on " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run), ['dateshort', 'time']))
|
||||
|
||||
# write the next time to a file
|
||||
fh = xbmcvfs.File(self.next_run_path, 'w')
|
||||
fh.write(str(self.next_run))
|
||||
fh.close()
|
||||
|
||||
# only show when not in silent mode
|
||||
if(progress_mode != 2):
|
||||
utils.showNotification(utils.getString(30081) + " " + utils.getRegionalTimestamp(datetime.fromtimestamp(self.next_run), ['dateshort', 'time']))
|
||||
|
||||
def settingsChanged(self):
|
||||
current_enabled = utils.getSetting("enable_scheduler")
|
||||
|
||||
if(current_enabled == "true" and self.enabled == "false"):
|
||||
# scheduler was just turned on
|
||||
self.enabled = current_enabled
|
||||
self.setup()
|
||||
elif (current_enabled == "false" and self.enabled == "true"):
|
||||
# schedule was turn off
|
||||
self.enabled = current_enabled
|
||||
|
||||
if(self.enabled == "true"):
|
||||
# always recheck the next run time after an update
|
||||
self.findNextRun(time.time())
|
||||
|
||||
def parseSchedule(self):
|
||||
schedule_type = int(utils.getSetting("schedule_interval"))
|
||||
cron_exp = utils.getSetting("cron_schedule")
|
||||
|
||||
hour_of_day = utils.getSetting("schedule_time")
|
||||
hour_of_day = int(hour_of_day[0:2])
|
||||
if(schedule_type == 0 or schedule_type == 1):
|
||||
# every day
|
||||
cron_exp = "0 " + str(hour_of_day) + " * * *"
|
||||
elif(schedule_type == 2):
|
||||
# once a week
|
||||
day_of_week = utils.getSetting("day_of_week")
|
||||
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
|
||||
elif(schedule_type == 3):
|
||||
# first day of month
|
||||
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
|
||||
|
||||
return cron_exp
|
||||
|
||||
def _resumeCheck(self):
|
||||
shouldContinue = False
|
||||
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
|
||||
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"), 'r')
|
||||
self.restore_point = rFile.read()
|
||||
rFile.close()
|
||||
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
|
||||
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042), utils.getString(30043), utils.getString(30044))
|
||||
|
||||
return shouldContinue
|
||||
|
||||
|
||||
class UpdateMonitor(xbmc.Monitor):
|
||||
update_method = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
xbmc.Monitor.__init__(self)
|
||||
self.update_method = kwargs['update_method']
|
||||
|
||||
def onSettingsChanged(self):
|
||||
self.update_method()
|
||||
|
||||
|
||||
BackupScheduler().start()
|
||||
|
||||