Merge branch 'master' into krypton

This commit is contained in:
Rob Weber 2017-12-29 13:24:45 -06:00
commit d9d6c1ed42
6 changed files with 64 additions and 29 deletions

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="script.xbmcbackup"
name="Backup" version="1.1.0" provider-name="robweber">
name="Backup" version="1.1.2" provider-name="robweber">
<requires>
<!-- jarvis -->
<import addon="xbmc.python" version="2.25.0"/>

View File

@ -1,3 +1,7 @@
Version 1.1.2
added fixes to the Dropbox lib for python 2.6
Version 1.1.1
fixed error on authorizers (missing secret/key)

View File

@ -511,5 +511,9 @@ def _params_to_urlencoded(params):
else:
return str(o).encode('utf-8')
utf8_params = {encode(k): encode(v) for k, v in six.iteritems(params)}
#fix for python 2.6
utf8_params = {}
for k,v in six.iteritems(params):
utf8_params[encode(k)] = encode(v)
return url_encode(utf8_params)

View File

@ -237,11 +237,12 @@ class StoneToPythonPrimitiveSerializer(StoneSerializerBase):
def encode_map(self, validator, value):
validated_value = validator.validate(value)
return {
self.encode_sub(validator.key_validator, key):
self.encode_sub(validator.value_validator, value) for
key, value in validated_value.items()
}
#fix for python 2.6
result = {}
for key, value in validated_value.items():
result[self.encode_sub(validator.key_validator,key)] = self.encode_sub(validator.value_validator, value)
return result
def encode_nullable(self, validator, value):
if value is None:
@ -830,11 +831,12 @@ def _decode_list(
if not isinstance(obj, list):
raise bv.ValidationError(
'expected list, got %s' % bv.generic_type_name(obj))
return [
_json_compat_obj_decode_helper(
data_type.item_validator, item, alias_validators, strict,
old_style, for_msgpack)
for item in obj]
result = []
for item in obj:
result.append(_json_compat_obj_decode_helper(data_type.item_validator, item, alias_validators, strict,old_style, for_msgpack))
return result
def _decode_map(
@ -846,15 +848,12 @@ def _decode_map(
if not isinstance(obj, dict):
raise bv.ValidationError(
'expected dict, got %s' % bv.generic_type_name(obj))
return {
_json_compat_obj_decode_helper(
data_type.key_validator, key, alias_validators, strict,
old_style, for_msgpack):
_json_compat_obj_decode_helper(
data_type.value_validator, value, alias_validators, strict,
old_style, for_msgpack)
for key, value in obj.items()
}
result = {}
for key, value in obj.items():
result[_json_compat_obj_decode_helper(data_type.key_validator, key, alias_validators, strict,old_style, for_msgpack)] = _json_compat_obj_decode_helper(data_type.value_validator, value, alias_validators, strict,old_style, for_msgpack)
return result
def _decode_nullable(

View File

@ -422,10 +422,13 @@ class Map(Composite):
def validate(self, val):
if not isinstance(val, dict):
raise ValidationError('%r is not a valid dict' % val)
return {
self.key_validator.validate(key):
self.value_validator.validate(value) for key, value in val.items()
}
#fix for python 2.6
result = {}
for key, value in val.items():
result[self.key_validator.validate(key)] = self.value_validator.validate(value)
return result
class Struct(Composite):

View File

@ -6,9 +6,10 @@ import xbmcgui
import zipfile
import zlib
import os
import os.path
import sys
import dropbox
from dropbox.files import WriteMode
from dropbox.files import WriteMode,CommitInfo,UploadSessionCursor
from pydrive.drive import GoogleDrive
from authorizers import DropboxAuthorizer,GoogleDriveAuthorizer
@ -118,6 +119,7 @@ class ZipFileSystem(Vfs):
return self.zip.infolist()
class DropboxFileSystem(Vfs):
MAX_CHUNK = 50 * 1000 * 1000 #dropbox uses 150, reduced to 50 for small mem systems
client = None
APP_KEY = ''
APP_SECRET = ''
@ -188,7 +190,7 @@ class DropboxFileSystem(Vfs):
def exists(self,aFile):
aFile = self._fix_slashes(aFile)
utils.log('check exists:' + aFile)
if(self.client != None):
#can't list root metadata
if(aFile == ''):
@ -207,11 +209,34 @@ class DropboxFileSystem(Vfs):
dest = self._fix_slashes(dest)
if(self.client != None):
#open the file and get its size
f = open(source,'rb')
f_size = os.path.getsize(source)
try:
if(f_size < self.MAX_CHUNK):
#use the regular upload
response = self.client.files_upload(f.read(),dest,mode=WriteMode('overwrite'))
else:
#start the upload session
upload_session = self.client.files_upload_session_start(f.read(self.MAX_CHUNK))
upload_cursor = UploadSessionCursor(upload_session.session_id,f.tell())
while(f.tell() < f_size):
#check if we should finish the upload
if((f_size - f.tell()) <= self.MAX_CHUNK):
#upload and close
self.client.files_upload_session_finish(f.read(self.MAX_CHUNK),upload_cursor,CommitInfo(dest,mode=WriteMode('overwrite')))
else:
#upload a part and store the offset
self.client.files_upload_session_append_v2(f.read(self.MAX_CHUNK),upload_cursor)
upload_cursor.offset = f.tell()
#if no errors we're good!
return True
except:
except Exception as anError:
utils.log(str(anError))
#if we have an exception retry
if(retry):
return self.put(source,dest,False)