Merge branch 'master' into krypton

This commit is contained in:
Rob Weber 2017-12-29 13:24:45 -06:00
commit d9d6c1ed42
6 changed files with 64 additions and 29 deletions

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="script.xbmcbackup" <addon id="script.xbmcbackup"
name="Backup" version="1.1.0" provider-name="robweber"> name="Backup" version="1.1.2" provider-name="robweber">
<requires> <requires>
<!-- jarvis --> <!-- jarvis -->
<import addon="xbmc.python" version="2.25.0"/> <import addon="xbmc.python" version="2.25.0"/>

View File

@ -1,3 +1,7 @@
Version 1.1.2
added fixes to the Dropbox lib for python 2.6
Version 1.1.1 Version 1.1.1
fixed error on authorizers (missing secret/key) fixed error on authorizers (missing secret/key)

View File

@ -511,5 +511,9 @@ def _params_to_urlencoded(params):
else: else:
return str(o).encode('utf-8') return str(o).encode('utf-8')
utf8_params = {encode(k): encode(v) for k, v in six.iteritems(params)} #fix for python 2.6
utf8_params = {}
for k,v in six.iteritems(params):
utf8_params[encode(k)] = encode(v)
return url_encode(utf8_params) return url_encode(utf8_params)

View File

@ -237,11 +237,12 @@ class StoneToPythonPrimitiveSerializer(StoneSerializerBase):
def encode_map(self, validator, value): def encode_map(self, validator, value):
validated_value = validator.validate(value) validated_value = validator.validate(value)
return { #fix for python 2.6
self.encode_sub(validator.key_validator, key): result = {}
self.encode_sub(validator.value_validator, value) for for key, value in validated_value.items():
key, value in validated_value.items() result[self.encode_sub(validator.key_validator,key)] = self.encode_sub(validator.value_validator, value)
}
return result
def encode_nullable(self, validator, value): def encode_nullable(self, validator, value):
if value is None: if value is None:
@ -830,11 +831,12 @@ def _decode_list(
if not isinstance(obj, list): if not isinstance(obj, list):
raise bv.ValidationError( raise bv.ValidationError(
'expected list, got %s' % bv.generic_type_name(obj)) 'expected list, got %s' % bv.generic_type_name(obj))
return [
_json_compat_obj_decode_helper( result = []
data_type.item_validator, item, alias_validators, strict, for item in obj:
old_style, for_msgpack) result.append(_json_compat_obj_decode_helper(data_type.item_validator, item, alias_validators, strict,old_style, for_msgpack))
for item in obj]
return result
def _decode_map( def _decode_map(
@ -846,15 +848,12 @@ def _decode_map(
if not isinstance(obj, dict): if not isinstance(obj, dict):
raise bv.ValidationError( raise bv.ValidationError(
'expected dict, got %s' % bv.generic_type_name(obj)) 'expected dict, got %s' % bv.generic_type_name(obj))
return {
_json_compat_obj_decode_helper( result = {}
data_type.key_validator, key, alias_validators, strict, for key, value in obj.items():
old_style, for_msgpack): result[_json_compat_obj_decode_helper(data_type.key_validator, key, alias_validators, strict,old_style, for_msgpack)] = _json_compat_obj_decode_helper(data_type.value_validator, value, alias_validators, strict,old_style, for_msgpack)
_json_compat_obj_decode_helper(
data_type.value_validator, value, alias_validators, strict, return result
old_style, for_msgpack)
for key, value in obj.items()
}
def _decode_nullable( def _decode_nullable(

View File

@ -422,10 +422,13 @@ class Map(Composite):
def validate(self, val): def validate(self, val):
if not isinstance(val, dict): if not isinstance(val, dict):
raise ValidationError('%r is not a valid dict' % val) raise ValidationError('%r is not a valid dict' % val)
return {
self.key_validator.validate(key): #fix for python 2.6
self.value_validator.validate(value) for key, value in val.items() result = {}
} for key, value in val.items():
result[self.key_validator.validate(key)] = self.value_validator.validate(value)
return result
class Struct(Composite): class Struct(Composite):

View File

@ -6,9 +6,10 @@ import xbmcgui
import zipfile import zipfile
import zlib import zlib
import os import os
import os.path
import sys import sys
import dropbox import dropbox
from dropbox.files import WriteMode from dropbox.files import WriteMode,CommitInfo,UploadSessionCursor
from pydrive.drive import GoogleDrive from pydrive.drive import GoogleDrive
from authorizers import DropboxAuthorizer,GoogleDriveAuthorizer from authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
@ -118,6 +119,7 @@ class ZipFileSystem(Vfs):
return self.zip.infolist() return self.zip.infolist()
class DropboxFileSystem(Vfs): class DropboxFileSystem(Vfs):
MAX_CHUNK = 50 * 1000 * 1000 #dropbox uses 150, reduced to 50 for small mem systems
client = None client = None
APP_KEY = '' APP_KEY = ''
APP_SECRET = '' APP_SECRET = ''
@ -188,7 +190,7 @@ class DropboxFileSystem(Vfs):
def exists(self,aFile): def exists(self,aFile):
aFile = self._fix_slashes(aFile) aFile = self._fix_slashes(aFile)
utils.log('check exists:' + aFile)
if(self.client != None): if(self.client != None):
#can't list root metadata #can't list root metadata
if(aFile == ''): if(aFile == ''):
@ -207,11 +209,34 @@ class DropboxFileSystem(Vfs):
dest = self._fix_slashes(dest) dest = self._fix_slashes(dest)
if(self.client != None): if(self.client != None):
#open the file and get its size
f = open(source,'rb') f = open(source,'rb')
f_size = os.path.getsize(source)
try: try:
response = self.client.files_upload(f.read(),dest,mode=WriteMode('overwrite')) if(f_size < self.MAX_CHUNK):
#use the regular upload
response = self.client.files_upload(f.read(),dest,mode=WriteMode('overwrite'))
else:
#start the upload session
upload_session = self.client.files_upload_session_start(f.read(self.MAX_CHUNK))
upload_cursor = UploadSessionCursor(upload_session.session_id,f.tell())
while(f.tell() < f_size):
#check if we should finish the upload
if((f_size - f.tell()) <= self.MAX_CHUNK):
#upload and close
self.client.files_upload_session_finish(f.read(self.MAX_CHUNK),upload_cursor,CommitInfo(dest,mode=WriteMode('overwrite')))
else:
#upload a part and store the offset
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK),upload_cursor)
upload_cursor.offset = f.tell()
#if no errors we're good!
return True return True
except: except Exception as anError:
utils.log(str(anError))
#if we have an exception retry #if we have an exception retry
if(retry): if(retry):
return self.put(source,dest,False) return self.put(source,dest,False)