diff --git a/addon.xml b/addon.xml
index 4784cdd..943a165 100644
--- a/addon.xml
+++ b/addon.xml
@@ -8,6 +8,7 @@
+
executable
diff --git a/resources/lib/dropbox/.gitignore b/resources/lib/dropbox/.gitignore
deleted file mode 100644
index 1670801..0000000
--- a/resources/lib/dropbox/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-
-*.pyc
\ No newline at end of file
diff --git a/resources/lib/dropbox/CHANGELOG.mdown b/resources/lib/dropbox/CHANGELOG.mdown
new file mode 100644
index 0000000..4377589
--- /dev/null
+++ b/resources/lib/dropbox/CHANGELOG.mdown
@@ -0,0 +1,103 @@
+Changes to the Python SDK
+=========================
+
+Changes in 2.2.0 (2014-09-16)
+-----------------------------
+- Support for Datastore sharing
+- `DropboxClient.get_file()`: Add support for `start`, `length` parameters.
+- Add support for the /longpoll_delta API
+- Add direct support for /chunked_upload and /commit_chunked_upload APIs
+- Fix error handling in ChunkedUploader
+- Make tests compatible with py.test
+- Various docstring updates (including OAuth 1->2 example)
+- Fix encoding issues in example/cli_client.py
+- Fix unicode handling for URL parameters
+
+Changes in 2.1.0 (2014-06-03)
+-----------------------------
+- The datastore API now includes size accessors to allow you to check the size of your data and avoid the syncing limits.
+- The datastore Date() constructor now truncates timestamps to the supported resolution, i.e. milliseconds.
+- The datastore `await*()` calls now use POST instead of GET.
+- Datastore IDs, table IDs, record IDs and field names may be 64 characters (increased from 32 characters). Before taking advantage of the new size limits ensure your application is fully upgraded to SDKs with this support.
+- Option to `include_media_info` has been added to `DropboxClient.metadata()` and `DropboxClient.delta()`.
+
+Changes in 2.0.0 (2013-12-19)
+-----------------------------
+- Add the Datastore API.
+- Upgrade OAuth 1 tokens with `DropboxClient.create_oauth2_access_token` and `DropboxClient.disable_oauth2_access_token`.
+- `DropboxClient.thumbnail()`: Fix `size` identifiers.
+- `DropboxClient.delta()`: Add support for `path_prefix` parameter.
+- Connection reuse/pooling using urllib3.
+- Updated SSL settings.
+- Various documentation reformatting.
+
+Changes in 1.6 (2013-07-07)
+----------------
+- Added OAuth 2 support (use DropboxOAuth2Flow). OAuth 1 still works.
+- Added a Flask-based example.
+- Fixed many minor bugs.
+
+Changes in 1.5.1 (2012-8-20)
+-----------------
+- Fixed packaging.
+- Got rid of debug prints.
+
+Changes in 1.5 (2012-8-15)
+--------------------------
+- Support for uploading large files via /chunked_upload
+
+Changes in 1.4.1 (2012-5-16)
+----------------------------
+- Increase metadata() file list limit to 25,000 (used to be 10,000).
+- Removed debug prints from search() call. Oops.
+- Cleanup to make more compatible with Python 3.
+
+Changes in 1.4 (2012-3-26)
+--------------------------
+- Add support for the /delta API.
+- Add support for the "copy ref" API.
+
+Changes in 1.3 (2012-1-11)
+--------------------------
+- Adds a method to the SDK that returns the file metadata when downloading a
+ file or its thumbnail.
+- Validate server's SSL certificate against CAs in included certificate file.
+
+Changes in 1.2 (2011-10-17)
+---------------------------
+- Fixes for bugs found during beta period
+- Improved README to include steps to remove the v0 SDK if upgrading
+
+Changes in 1.1 (2011-8-16)
+--------------------------
+- Fixed version number
+- Updated CHANGELOG to be more detailed
+
+Changes in 1.0 (2011-7-11)
+--------------------------
+- Backwards compatibility broken
+ - Completely removed 'callback' and 'status\_in\_response' parameters
+ - Change 'sandbox' references to 'app\_folder'
+ - Refactored auth.py and renamed it session.py
+- Updated SDK to Dropbox API Version 1, supporting all calls
+ - Added 'rev' parameter to metadata and get\_file
+ - Added 'parent\_rev' parameter to put\_file
+ - Added search, share, media, revisions, and restore
+ - put\_file uses /files\_put instead of multipart POST and now takes a full path
+ - Removed methods for calls that were removed from v1 of the REST API
+- Removed 'root' input parameter for all calls
+- Changed return format for calls
+ - On error (non-200 response), an exception is raised
+ - On success, the JSON is parsed and a Python dict or list is returned
+- Updated examples
+ - Renamed 'bin' directory to 'example'
+ - Heavily tweaked the CLI example
+ - Added a web app example
+- Removed reliance on config files
+- Assorted bugfixes and improvements
+ - Buffers large file uploads better in put\_file
+ - Improved path normalization
+- All calls are now made over SSL
+- Fully documented code for Pydoc generation
+- Added a CHANGELOG
+- Changed the distribution name from 'dropbox-client' to 'dropbox-python-sdk'
diff --git a/resources/lib/dropbox/client.py b/resources/lib/dropbox/client.py
index ab3ef23..1f93aa8 100644
--- a/resources/lib/dropbox/client.py
+++ b/resources/lib/dropbox/client.py
@@ -1,55 +1,27 @@
-"""
-The main client API you'll be working with most often. You'll need to
-configure a dropbox.session.DropboxSession for this to work, but otherwise
-it's fairly self-explanatory.
-
-Before you can begin making requests to the dropbox API, you have to
-authenticate your application with Dropbox and get the user to
-authorize your application to use dropbox on his behalf. A typical
-progam, from the initial imports to making a simple request (``account_info``),
-looks like this:
-
-.. code-block:: python
-
- # Include the Dropbox SDK libraries
- from dropbox import client, rest, session
-
- # Get your app key and secret from the Dropbox developer website
- APP_KEY = 'INSERT_APP_KEY_HERE'
- APP_SECRET = 'INSERT_SECRET_HERE'
-
- # ACCESS_TYPE should be 'dropbox' or 'app_folder' as configured for your app
- ACCESS_TYPE = 'INSERT_ACCESS_TYPE_HERE'
-
- sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
-
- request_token = sess.obtain_request_token()
-
- url = sess.build_authorize_url(request_token)
-
- # Make the user sign in and authorize this token
- print "url:", url
- print "Please visit this website and press the 'Allow' button, then hit 'Enter' here."
- raw_input()
-
- # This will fail if the user didn't visit the above URL and hit 'Allow'
- access_token = sess.obtain_access_token(request_token)
-
- client = client.DropboxClient(sess)
- print "linked account:", client.account_info()
-
-"""
from __future__ import absolute_import
+import base64
import re
import os
-from StringIO import StringIO
+import sys
+import urllib
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ from io import StringIO
+ basestring = str
+else:
+ from StringIO import StringIO
+
try:
import json
except ImportError:
import simplejson as json
-from .rest import ErrorResponse, RESTClient
+from .rest import ErrorResponse, RESTClient, params_to_urlencoded
+from .session import BaseSession, DropboxSession, DropboxOAuth2Session
+
def format_path(path):
"""Normalize path for use with the Dropbox API.
@@ -68,54 +40,91 @@ def format_path(path):
else:
return '/' + path.strip('/')
+
class DropboxClient(object):
"""
- The main access point of doing REST calls on Dropbox. You should
- first create and configure a dropbox.session.DropboxSession object,
- and then pass it into DropboxClient's constructor. DropboxClient
- then does all the work of properly calling each API method
- with the correct OAuth authentication.
+ This class lets you make Dropbox API calls. You'll need to obtain an
+ OAuth 2 access token first. You can get an access token using either
+ :class:`DropboxOAuth2Flow` or :class:`DropboxOAuth2FlowNoRedirect`.
- You should be aware that any of these methods can raise a
- rest.ErrorResponse exception if the server returns a non-200
- or invalid HTTP response. Note that a 401 return status at any
- point indicates that the user needs to be reauthenticated.
+ All of the API call methods can raise a :class:`dropbox.rest.ErrorResponse` exception if
+ the server returns a non-200 or invalid HTTP response. Note that a 401
+ return status at any point indicates that the access token you're using
+ is no longer valid and the user must be put through the OAuth 2
+ authorization flow again.
"""
- def __init__(self, session, rest_client=RESTClient):
- """Initialize the DropboxClient object.
+ def __init__(self, oauth2_access_token, locale=None, rest_client=None):
+ """Construct a ``DropboxClient`` instance.
- Args:
- ``session``: A dropbox.session.DropboxSession object to use for making requests.
- ``rest_client``: A dropbox.rest.RESTClient-like object to use for making requests. [optional]
+ Parameters
+ oauth2_access_token
+ An OAuth 2 access token (string). For backwards compatibility this may
+ also be a DropboxSession object (see :meth:`create_oauth2_access_token()`).
+ locale
+ The locale of the user of your application. For example "en" or "en_US".
+ Some API calls return localized data and error messages; this setting
+ tells the server which locale to use. By default, the server uses "en_US".
+ rest_client
+ Optional :class:`dropbox.rest.RESTClient`-like object to use for making
+ requests.
"""
- self.session = session
+ if rest_client is None: rest_client = RESTClient
+ if isinstance(oauth2_access_token, basestring):
+ if not _OAUTH2_ACCESS_TOKEN_PATTERN.match(oauth2_access_token):
+ raise ValueError("invalid format for oauth2_access_token: %r"
+ % (oauth2_access_token,))
+ self.session = DropboxOAuth2Session(oauth2_access_token, locale)
+ elif isinstance(oauth2_access_token, DropboxSession):
+ # Backwards compatibility with OAuth 1
+ if locale is not None:
+ raise ValueError("The 'locale' parameter to DropboxClient is only useful "
+ "when also passing in an OAuth 2 access token")
+ self.session = oauth2_access_token
+ else:
+ raise ValueError("'oauth2_access_token' must either be a string or a DropboxSession")
self.rest_client = rest_client
- def request(self, target, params=None, method='POST', content_server=False):
- """Make an HTTP request to a target API method.
+ def request(self, target, params=None, method='POST',
+ content_server=False, notification_server=False):
+ """
+ An internal method that builds the url, headers, and params for a Dropbox API request.
+ It is exposed if you need to make API calls not implemented in this library or if you
+ need to debug requests.
- This is an internal method used to properly craft the url, headers, and
- params for a Dropbox API request. It is exposed for you in case you
- need craft other API calls not in this library or if you want to debug it.
+ Parameters
+ target
+ The target URL with leading slash (e.g. '/files').
+ params
+ A dictionary of parameters to add to the request.
+ method
+ An HTTP method (e.g. 'GET' or 'POST').
+ content_server
+ A boolean indicating whether the request is to the
+ API content server, for example to fetch the contents of a file
+ rather than its metadata.
+ notification_server
+ A boolean indicating whether the request is to the API notification
+ server, for example for longpolling.
- Args:
- - ``target``: The target URL with leading slash (e.g. '/files')
- - ``params``: A dictionary of parameters to add to the request
- - ``method``: An HTTP method (e.g. 'GET' or 'POST')
- - ``content_server``: A boolean indicating whether the request is to the
- API content server, for example to fetch the contents of a file
- rather than its metadata.
-
- Returns:
- - A tuple of (url, params, headers) that should be used to make the request.
- OAuth authentication information will be added as needed within these fields.
+ Returns
+ A tuple of ``(url, params, headers)`` that should be used to make the request.
+ OAuth will be added as needed within these fields.
"""
assert method in ['GET','POST', 'PUT'], "Only 'GET', 'POST', and 'PUT' are allowed."
+ assert not (content_server and notification_server), \
+ "Cannot construct request simultaneously for content and notification servers."
+
if params is None:
params = {}
- host = self.session.API_CONTENT_HOST if content_server else self.session.API_HOST
+ if content_server:
+ host = self.session.API_CONTENT_HOST
+ elif notification_server:
+ host = self.session.API_NOTIFICATION_HOST
+ else:
+ host = self.session.API_HOST
+
base = self.session.build_url(host, target)
headers, params = self.session.build_access_headers(method, base, params)
@@ -126,31 +135,65 @@ class DropboxClient(object):
return url, params, headers
-
def account_info(self):
"""Retrieve information about the user's account.
- Returns:
- - A dictionary containing account information.
+ Returns
+ A dictionary containing account information.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#account-info
+ https://www.dropbox.com/developers/core/docs#account-info
"""
url, params, headers = self.request("/account/info", method='GET')
return self.rest_client.GET(url, headers)
+ def disable_access_token(self):
+ """
+ Disable the access token that this ``DropboxClient`` is using. If this call
+ succeeds, further API calls using this object will fail.
+ """
+ url, params, headers = self.request("/disable_access_token", method='POST')
+
+ return self.rest_client.POST(url, params, headers)
+
+ def create_oauth2_access_token(self):
+ """
+ If this ``DropboxClient`` was created with an OAuth 1 access token, this method
+ can be used to create an equivalent OAuth 2 access token. This can be used to
+ upgrade your app's existing access tokens from OAuth 1 to OAuth 2.
+
+ Example::
+
+ from dropbox.client import DropboxClient
+ from dropbox.session import DropboxSession
+ session = DropboxSession(APP_KEY, APP_SECRET)
+ access_key, access_secret = '123abc', 'xyz456' # Previously obtained OAuth 1 credentials
+ session.set_token(access_key, access_secret)
+ client = DropboxClient(session)
+ token = client.create_oauth2_access_token()
+ # Optionally, create a new client using the new token
+ new_client = DropboxClient(token)
+ """
+ if not isinstance(self.session, DropboxSession):
+ raise ValueError("This call requires a DropboxClient that is configured with an "
+ "OAuth 1 access token.")
+ url, params, headers = self.request("/oauth2/token_from_oauth1", method='POST')
+
+ r = self.rest_client.POST(url, params, headers)
+ return r['access_token']
+
def get_chunked_uploader(self, file_obj, length):
- """Creates a ChunkedUploader to upload the given file-like object.
+ """Creates a :class:`ChunkedUploader` to upload the given file-like object.
- Args:
- - ``file_obj``: The file-like object which is the source of the data
+ Parameters
+ file_obj
+ The file-like object which is the source of the data
being uploaded.
- - ``length``: The number of bytes to upload.
+ length
+ The number of bytes to upload.
- The expected use of this function is as follows:
-
- .. code-block:: python
+ The expected use of this function is as follows::
bigFile = open("data.txt", 'rb')
@@ -161,102 +204,40 @@ class DropboxClient(object):
upload = uploader.upload_chunked()
except rest.ErrorResponse, e:
# perform error handling and retry logic
+ uploader.finish('/bigFile.txt')
The SDK leaves the error handling and retry logic to the developer
to implement, as the exact requirements will depend on the application
involved.
"""
- return DropboxClient.ChunkedUploader(self, file_obj, length)
+ return ChunkedUploader(self, file_obj, length)
-
-
-
- class ChunkedUploader(object):
- """Contains the logic around a chunked upload, which uploads a
- large file to Dropbox via the /chunked_upload endpoint
- """
- def __init__(self, client, file_obj, length):
- self.client = client
- self.offset = 0
- self.upload_id = None
-
- self.last_block = None
- self.file_obj = file_obj
- self.target_length = length
-
-
- def upload_chunked(self, chunk_size = 4 * 1024 * 1024):
- """Uploads data from this ChunkedUploader's file_obj in chunks, until
- an error occurs. Throws an exception when an error occurs, and can
- be called again to resume the upload.
-
- Args:
- - ``chunk_size``: The number of bytes to put in each chunk. [default 4 MB]
- """
-
- while self.offset < self.target_length:
- next_chunk_size = min(chunk_size, self.target_length - self.offset)
- if self.last_block == None:
- self.last_block = self.file_obj.read(next_chunk_size)
-
- try:
- (self.offset, self.upload_id) = self.client.upload_chunk(StringIO(self.last_block), next_chunk_size, self.offset, self.upload_id)
- self.last_block = None
- except ErrorResponse, e:
- reply = e.body
- if "offset" in reply and reply['offset'] != 0:
- if reply['offset'] > self.offset:
- self.last_block = None
- self.offset = reply['offset']
-
- def finish(self, path, overwrite=False, parent_rev=None):
- """Commits the bytes uploaded by this ChunkedUploader to a file
- in the users dropbox.
-
- Args:
- - ``path``: The full path of the file in the Dropbox.
- - ``overwrite``: Whether to overwrite an existing file at the given path. [default False]
- If overwrite is False and a file already exists there, Dropbox
- will rename the upload to make sure it doesn't overwrite anything.
- You need to check the metadata returned for the new name.
- This field should only be True if your intent is to potentially
- clobber changes to a file that you don't know about.
- - ``parent_rev``: The rev field from the 'parent' of this upload. [optional]
- If your intent is to update the file at the given path, you should
- pass the parent_rev parameter set to the rev value from the most recent
- metadata you have of the existing file at that path. If the server
- has a more recent version of the file at the specified path, it will
- automatically rename your uploaded file, spinning off a conflict.
- Using this parameter effectively causes the overwrite parameter to be ignored.
- The file will always be overwritten if you send the most-recent parent_rev,
- and it will never be overwritten if you send a less-recent one.
- """
-
- path = "/commit_chunked_upload/%s%s" % (self.client.session.root, format_path(path))
-
- params = dict(
- overwrite = bool(overwrite),
- upload_id = self.upload_id
- )
-
- if parent_rev is not None:
- params['parent_rev'] = parent_rev
-
- url, params, headers = self.client.request(path, params, content_server=True)
-
- return self.client.rest_client.POST(url, params, headers)
-
- def upload_chunk(self, file_obj, length, offset=0, upload_id=None):
- """Uploads a single chunk of data from the given file like object. The majority of users
- should use the ChunkedUploader object, which provides a simpler interface to the
+ def upload_chunk(self, file_obj, length=None, offset=0, upload_id=None):
+ """Uploads a single chunk of data from a string or file-like object. The majority of users
+ should use the :class:`ChunkedUploader` object, which provides a simpler interface to the
chunked_upload API endpoint.
- Args:
- - ``file_obj``: The source of the data to upload
- - ``length``: The number of bytes to upload in one chunk.
+ Parameters
+ file_obj
+ The source of the chunk to upload; a file-like object or a string.
+ length
+ This argument is ignored but still present for backward compatibility reasons.
+ offset
+ The byte offset to which this source data corresponds in the original file.
+ upload_id
+ The upload identifier for which this chunk should be uploaded,
+ returned by a previous call, or None to start a new upload.
- Returns:
- - The reply from the server, as a dictionary
+ Returns
+ A dictionary containing the keys:
+
+ upload_id
+ A string used to identify the upload for subsequent calls to :meth:`upload_chunk()`
+ and :meth:`commit_chunked_upload()`.
+ offset
+ The offset at which the next upload should be applied.
+ expires
+ The time after which this partial upload is invalid.
"""
params = dict()
@@ -265,29 +246,72 @@ class DropboxClient(object):
params['upload_id'] = upload_id
params['offset'] = offset
- url, ignored_params, headers = self.request("/chunked_upload", params, method='PUT', content_server=True)
+ url, ignored_params, headers = self.request("/chunked_upload", params,
+ method='PUT', content_server=True)
try:
reply = self.rest_client.PUT(url, file_obj, headers)
return reply['offset'], reply['upload_id']
- except ErrorResponse, e:
+ except ErrorResponse as e:
raise e
+ def commit_chunked_upload(self, full_path, upload_id, overwrite=False, parent_rev=None):
+ """Commit the previously uploaded chunks for the given path.
+
+ Parameters
+ full_path
+ The full path to which the chunks are uploaded, *including the file name*.
+ If the destination folder does not yet exist, it will be created.
+ upload_id
+ The chunked upload identifier, previously returned from upload_chunk.
+ overwrite
+ Whether to overwrite an existing file at the given path. (Default ``False``.)
+ If overwrite is False and a file already exists there, Dropbox
+ will rename the upload to make sure it doesn't overwrite anything.
+ You need to check the metadata returned for the new name.
+ This field should only be True if your intent is to potentially
+ clobber changes to a file that you don't know about.
+ parent_rev
+ Optional rev field from the 'parent' of this upload.
+ If your intent is to update the file at the given path, you should
+ pass the parent_rev parameter set to the rev value from the most recent
+ metadata you have of the existing file at that path. If the server
+ has a more recent version of the file at the specified path, it will
+ automatically rename your uploaded file, spinning off a conflict.
+ Using this parameter effectively causes the overwrite parameter to be ignored.
+ The file will always be overwritten if you send the most recent parent_rev,
+ and it will never be overwritten if you send a less recent one.
+
+ Returns
+ A dictionary containing the metadata of the newly committed file.
+
+ For a detailed description of what this call returns, visit:
+ https://www.dropbox.com/developers/core/docs#commit-chunked-upload
+ """
+
+ params = {
+ 'upload_id': upload_id,
+ 'overwrite': overwrite,
+ }
+
+ if parent_rev is not None:
+ params['parent_rev'] = parent_rev
+
+ url, params, headers = self.request("/commit_chunked_upload/%s" % full_path,
+ params, content_server=True)
+
+ return self.rest_client.POST(url, params, headers)
def put_file(self, full_path, file_obj, overwrite=False, parent_rev=None):
"""Upload a file.
-
- A typical use case would be as follows:
- .. code-block:: python
+ A typical use case would be as follows::
- f = open('working-draft.txt')
+ f = open('working-draft.txt', 'rb')
response = client.put_file('/magnum-opus.txt', f)
print "uploaded:", response
- which would return the metadata of the uploaded file, similar to:
-
- .. code-block:: python
+ which would return the metadata of the uploaded file, similar to::
{
'bytes': 77,
@@ -303,39 +327,41 @@ class DropboxClient(object):
'thumb_exists': False
}
- Args:
- - ``full_path``: The full path to upload the file to, *including the file name*.
- If the destination directory does not yet exist, it will be created.
- - ``file_obj``: A file-like object to upload. If you would like, you can pass a string as file_obj.
- - ``overwrite``: Whether to overwrite an existing file at the given path. [default False]
+ Parameters
+ full_path
+ The full path to upload the file to, *including the file name*.
+ If the destination folder does not yet exist, it will be created.
+ file_obj
+ A file-like object to upload. If you would like, you can pass a string as file_obj.
+ overwrite
+ Whether to overwrite an existing file at the given path. (Default ``False``.)
If overwrite is False and a file already exists there, Dropbox
will rename the upload to make sure it doesn't overwrite anything.
You need to check the metadata returned for the new name.
This field should only be True if your intent is to potentially
clobber changes to a file that you don't know about.
- - ``parent_rev``: The rev field from the 'parent' of this upload. [optional]
+ parent_rev
+ Optional rev field from the 'parent' of this upload.
If your intent is to update the file at the given path, you should
pass the parent_rev parameter set to the rev value from the most recent
metadata you have of the existing file at that path. If the server
has a more recent version of the file at the specified path, it will
automatically rename your uploaded file, spinning off a conflict.
Using this parameter effectively causes the overwrite parameter to be ignored.
- The file will always be overwritten if you send the most-recent parent_rev,
- and it will never be overwritten if you send a less-recent one.
+ The file will always be overwritten if you send the most recent parent_rev,
+ and it will never be overwritten if you send a less recent one.
- Returns:
- - A dictionary containing the metadata of the newly uploaded file.
+ Returns
+ A dictionary containing the metadata of the newly uploaded file.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#files-put
+ https://www.dropbox.com/developers/core/docs#files-put
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
- - 400: Bad request (may be due to many things; check e.error for details)
- - 503: User over quota
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- Note: In Python versions below version 2.6, httplib doesn't handle file-like objects.
- In that case, this code will read the entire file into memory (!).
+ - 400: Bad request (may be due to many things; check e.error for details).
+ - 503: User over quota.
"""
path = "/files_put/%s%s" % (self.session.root, format_path(full_path))
@@ -346,38 +372,41 @@ class DropboxClient(object):
if parent_rev is not None:
params['parent_rev'] = parent_rev
-
url, params, headers = self.request(path, params, method='PUT', content_server=True)
return self.rest_client.PUT(url, file_obj, headers)
- def get_file(self, from_path, rev=None):
+ def get_file(self, from_path, rev=None, start=None, length=None):
"""Download a file.
- Unlike most other calls, get_file returns a raw HTTPResponse with the connection open.
- You should call .read() and perform any processing you need, then close the HTTPResponse.
+ Example::
- A typical usage looks like this:
-
- .. code-block:: python
-
- out = open('magnum-opus.txt', 'w')
- f, metadata = client.get_file_and_metadata('/magnum-opus.txt').read()
- out.write(f)
+ out = open('magnum-opus.txt', 'wb')
+ with client.get_file('/magnum-opus.txt') as f:
+ out.write(f.read())
which would download the file ``magnum-opus.txt`` and write the contents into
the file ``magnum-opus.txt`` on the local filesystem.
- Args:
- - ``from_path``: The path to the file to be downloaded.
- - ``rev``: A previous rev value of the file to be downloaded. [optional]
+ Parameters
+ from_path
+ The path to the file to be downloaded.
+ rev
+ Optional previous rev value of the file to be downloaded.
+ start
+ Optional byte value from which to start downloading.
+ length
+ Optional length in bytes for partially downloading the file. If ``length`` is
+ specified but ``start`` is not, then the last ``length`` bytes will be downloaded.
+ Returns
+ A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
+ the API request. It is a file-like object that can be read from. You
+ must call ``close()`` when you're done.
- Returns:
- - An httplib.HTTPResponse that is the result of the request.
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given path, or the file that was there was deleted.
- 200: Request was okay but response was malformed in some way.
"""
@@ -388,26 +417,49 @@ class DropboxClient(object):
params['rev'] = rev
url, params, headers = self.request(path, params, method='GET', content_server=True)
+ if start is not None:
+ if length:
+ headers['Range'] = 'bytes=%s-%s' % (start, start + length - 1)
+ else:
+ headers['Range'] = 'bytes=%s-' % start
+ elif length is not None:
+ headers['Range'] = 'bytes=-%s' % length
return self.rest_client.request("GET", url, headers=headers, raw_response=True)
def get_file_and_metadata(self, from_path, rev=None):
"""Download a file alongwith its metadata.
- Acts as a thin wrapper around get_file() (see get_file() comments for
+ Acts as a thin wrapper around get_file() (see :meth:`get_file()` comments for
more details)
- Args:
- - ``from_path``: The path to the file to be downloaded.
- - ``rev``: A previous rev value of the file to be downloaded. [optional]
+ A typical usage looks like this::
- Returns:
- - An httplib.HTTPResponse that is the result of the request.
- - A dictionary containing the metadata of the file (see
- https://www.dropbox.com/developers/reference/api#metadata for details).
+ out = open('magnum-opus.txt', 'wb')
+ f, metadata = client.get_file_and_metadata('/magnum-opus.txt')
+ with f:
+ out.write(f.read())
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
- - 400: Bad request (may be due to many things; check e.error for details)
+ Parameters
+ from_path
+ The path to the file to be downloaded.
+ rev
+ Optional previous rev value of the file to be downloaded.
+
+ Returns
+ A pair of ``(response, metadata)``:
+
+ response
+ A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
+ the API request. It is a file-like object that can be read from. You
+ must call ``close()`` when you're done.
+ metadata
+ A dictionary containing the metadata of the file (see
+ https://www.dropbox.com/developers/core/docs#metadata for details).
+
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
+
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given path, or the file that was there was deleted.
- 200: Request was okay but response was malformed in some way.
"""
@@ -418,11 +470,10 @@ class DropboxClient(object):
@staticmethod
def __parse_metadata_as_dict(dropbox_raw_response):
- """Parses file metadata from a raw dropbox HTTP response, raising a
- dropbox.rest.ErrorResponse if parsing fails.
- """
+ # Parses file metadata from a raw dropbox HTTP response, raising a
+ # dropbox.rest.ErrorResponse if parsing fails.
metadata = None
- for header, header_val in dropbox_raw_response.getheaders():
+ for header, header_val in dropbox_raw_response.getheaders().iteritems():
if header.lower() == 'x-dropbox-metadata':
try:
metadata = json.loads(header_val)
@@ -431,30 +482,51 @@ class DropboxClient(object):
if not metadata: raise ErrorResponse(dropbox_raw_response)
return metadata
- def delta(self, cursor=None):
+ def delta(self, cursor=None, path_prefix=None, include_media_info=False):
"""A way of letting you keep up with changes to files and folders in a
user's Dropbox. You can periodically call delta() to get a list of "delta
entries", which are instructions on how to update your local state to
match the server's state.
- Arguments:
- - ``cursor``: On the first call, omit this argument (or pass in ``None``). On
+ Parameters
+ cursor
+ On the first call, omit this argument (or pass in ``None``). On
subsequent calls, pass in the ``cursor`` string returned by the previous
call.
+ path_prefix
+ If provided, results will be limited to files and folders
+ whose paths are equal to or under ``path_prefix``. The ``path_prefix`` is
+ fixed for a given cursor. Whatever ``path_prefix`` you use on the first
+ ``delta()`` must also be passed in on subsequent calls that use the returned
+ cursor.
+ include_media_info
+ If True, delta will return additional media info for photos and videos
+ (the time a photo was taken, the GPS coordinates of a photo, etc.). There
+ is a delay between when a file is uploaded to Dropbox and when this
+ information is available; delta will only include a file in the changelist
+ once its media info is ready. The value you use on the first ``delta()`` must
+ also be passed in on subsequent calls that use the returned cursor.
- Returns: A dict with three fields.
- - ``entries``: A list of "delta entries" (described below)
- - ``reset``: If ``True``, you should your local state to be an empty folder
+ Returns
+ A dict with four keys:
+
+ entries
+ A list of "delta entries" (described below).
+ reset
+ If ``True``, you should your local state to be an empty folder
before processing the list of delta entries. This is only ``True`` only
in rare situations.
- - ``cursor``: A string that is used to keep track of your current state.
+ cursor
+ A string that is used to keep track of your current state.
On the next call to delta(), pass in this value to return entries
that were recorded since the cursor was returned.
- - ``has_more``: If ``True``, then there are more entries available; you can
+ has_more
+ If ``True``, then there are more entries available; you can
call delta() again immediately to retrieve those entries. If ``False``,
then wait at least 5 minutes (preferably longer) before checking again.
Delta Entries: Each entry is a 2-item list of one of following forms:
+
- [*path*, *metadata*]: Indicates that there is a file/folder at the given
path. You should add the entry to your local path. (The *metadata*
value is the same as what would be returned by the ``metadata()`` call.)
@@ -468,7 +540,7 @@ class DropboxClient(object):
*path*. If it's a file, replace it with the new entry. If it's a
folder, apply the new *metadata* to the folder, but do not modify
the folder's children.
- - [*path*, ``nil``]: Indicates that there is no file/folder at the *path* on
+ - [*path*, ``None``]: Indicates that there is no file/folder at the *path* on
Dropbox. To update your local state to match, delete whatever is at *path*,
including any children (you will sometimes also get "delete" delta entries
for the children, but this is not guaranteed). If your local state doesn't
@@ -480,26 +552,75 @@ class DropboxClient(object):
"""
path = "/delta"
- params = {}
+ params = {'include_media_info': include_media_info}
if cursor is not None:
params['cursor'] = cursor
+ if path_prefix is not None:
+ params['path_prefix'] = path_prefix
url, params, headers = self.request(path, params)
return self.rest_client.POST(url, params, headers)
+ def longpoll_delta(self, cursor, timeout=None):
+ """A long-poll endpoint to wait for changes on an account. In conjunction with
+ :meth:`delta()`, this call gives you a low-latency way to monitor an account for
+ file changes.
+
+ Note that this call goes to ``api-notify.dropbox.com`` instead of ``api.dropbox.com``.
+
+ Unlike most other API endpoints, this call does not require OAuth authentication.
+ The passed-in cursor can only be acquired via an authenticated call to :meth:`delta()`.
+
+ Parameters
+ cursor
+ A delta cursor as returned from a call to :meth:`delta()`. Note that a cursor
+ returned from a call to :meth:`delta()` with ``include_media_info=True`` is
+ incompatible with ``longpoll_delta()`` and an error will be returned.
+ timeout
+ An optional integer indicating a timeout, in seconds. The default value is
+ 30 seconds, which is also the minimum allowed value. The maximum is 480
+ seconds. The request will block for at most this length of time, plus up
+ to 90 seconds of random jitter added to avoid the thundering herd problem.
+ Care should be taken when using this parameter, as some network
+ infrastructure does not support long timeouts.
+
+ Returns
+ The connection will block until there are changes available or a timeout occurs.
+ The response will be a dictionary that looks like the following example::
+
+ {"changes": false, "backoff": 60}
+
+ For a detailed description of what this call returns, visit:
+ https://www.dropbox.com/developers/core/docs#longpoll-delta
+
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
+
+ - 400: Bad request (generally due to an invalid parameter; check e.error for details).
+ """
+ path = "/longpoll_delta"
+
+ params = {'cursor': cursor}
+ if timeout is not None:
+ params['timeout'] = timeout
+
+ url, params, headers = self.request(path, params, method='GET', notification_server=True)
+
+ return self.rest_client.GET(url, headers)
def create_copy_ref(self, from_path):
"""Creates and returns a copy ref for a specific file. The copy ref can be
used to instantly copy that file to the Dropbox of another account.
- Args:
- - ``path``: The path to the file for a copy ref to be created on.
+ Parameters
+ path
+ The path to the file for a copy ref to be created on.
- Returns:
- - A dictionary that looks like the following example:
+ Returns
+ A dictionary that looks like the following example::
- ``{"expires":"Fri, 31 Jan 2042 21:01:05 +0000", "copy_ref":"z1X6ATl6aWtzOGq0c3g5Ng"}``
+ {"expires": "Fri, 31 Jan 2042 21:01:05 +0000", "copy_ref": "z1X6ATl6aWtzOGq0c3g5Ng"}
"""
path = "/copy_ref/%s%s" % (self.session.root, format_path(from_path))
@@ -511,13 +632,15 @@ class DropboxClient(object):
def add_copy_ref(self, copy_ref, to_path):
"""Adds the file referenced by the copy ref to the specified path
- Args:
- - ``copy_ref``: A copy ref string that was returned from a create_copy_ref call.
- The copy_ref can be created from any other Dropbox account, or from the same account.
- - ``path``: The path to where the file will be created.
+ Parameters
+ copy_ref
+ A copy ref string that was returned from a create_copy_ref call.
+ The copy_ref can be created from any other Dropbox account, or from the same account.
+ path
+ The path to where the file will be created.
- Returns:
- - A dictionary containing the metadata of the new copy of the file.
+ Returns
+ A dictionary containing the metadata of the new copy of the file.
"""
path = "/fileops/copy"
@@ -532,24 +655,28 @@ class DropboxClient(object):
def file_copy(self, from_path, to_path):
"""Copy a file or folder to a new location.
- Args:
- - ``from_path``: The path to the file or folder to be copied.
- - ``to_path``: The destination path of the file or folder to be copied.
+ Parameters
+ from_path
+ The path to the file or folder to be copied.
+ to_path
+ The destination path of the file or folder to be copied.
This parameter should include the destination filename (e.g.
from_path: '/test.txt', to_path: '/dir/test.txt'). If there's
- already a file at the to_path, this copy will be renamed to
- be unique.
+ already a file at the to_path it will raise an ErrorResponse.
- Returns:
- - A dictionary containing the metadata of the new copy of the file or folder.
+ Returns
+ A dictionary containing the metadata of the new copy of the file or folder.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#fileops-copy
+ https://www.dropbox.com/developers/core/docs#fileops-copy
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of:
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
+ - 403: An invalid copy operation was attempted
+ (e.g. there is already a file at the given destination,
+ or trying to copy a shared folder).
- 404: No file was found at given from_path.
- 503: User over storage quota.
"""
@@ -562,22 +689,23 @@ class DropboxClient(object):
return self.rest_client.POST(url, params, headers)
-
def file_create_folder(self, path):
"""Create a folder.
- Args:
- - ``path``: The path of the new folder.
+ Parameters
+ path
+ The path of the new folder.
- Returns:
- - A dictionary containing the metadata of the newly created folder.
+ Returns
+ A dictionary containing the metadata of the newly created folder.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#fileops-create-folder
+ https://www.dropbox.com/developers/core/docs#fileops-create-folder
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
- - 400: Bad request (may be due to many things; check e.error for details)
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
+
+ - 400: Bad request (may be due to many things; check e.error for details).
- 403: A folder at that path already exists.
"""
params = {'root': self.session.root, 'path': format_path(path)}
@@ -586,23 +714,23 @@ class DropboxClient(object):
return self.rest_client.POST(url, params, headers)
-
def file_delete(self, path):
"""Delete a file or folder.
- Args:
- - ``path``: The path of the file or folder.
+ Parameters
+ path
+ The path of the file or folder.
- Returns:
- - A dictionary containing the metadata of the just deleted file.
+ Returns
+ A dictionary containing the metadata of the just deleted file.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#fileops-delete
+ https://www.dropbox.com/developers/core/docs#fileops-delete
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at the given path.
"""
params = {'root': self.session.root, 'path': format_path(path)}
@@ -611,52 +739,54 @@ class DropboxClient(object):
return self.rest_client.POST(url, params, headers)
-
def file_move(self, from_path, to_path):
"""Move a file or folder to a new location.
- Args:
- - ``from_path``: The path to the file or folder to be moved.
- - ``to_path``: The destination path of the file or folder to be moved.
- This parameter should include the destination filename (e.g.
- - ``from_path``: '/test.txt', to_path: '/dir/test.txt'). If there's
- already a file at the to_path, this file or folder will be renamed to
- be unique.
+ Parameters
+ from_path
+ The path to the file or folder to be moved.
+ to_path
+ The destination path of the file or folder to be moved.
+ This parameter should include the destination filename (e.g. if
+ ``from_path`` is ``'/test.txt'``, ``to_path`` might be
+ ``'/dir/test.txt'``). If there's already a file at the
+ ``to_path`` it will raise an ErrorResponse.
- Returns:
- - A dictionary containing the metadata of the new copy of the file or folder.
+ Returns
+ A dictionary containing the metadata of the new copy of the file or folder.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#fileops-move
+ https://www.dropbox.com/developers/core/docs#fileops-move
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
+ - 403: An invalid move operation was attempted
+ (e.g. there is already a file at the given destination,
+ or moving a shared folder into a shared folder).
- 404: No file was found at given from_path.
- 503: User over storage quota.
"""
- params = {'root': self.session.root, 'from_path': format_path(from_path), 'to_path': format_path(to_path)}
+ params = {'root': self.session.root,
+ 'from_path': format_path(from_path),
+ 'to_path': format_path(to_path)}
url, params, headers = self.request("/fileops/move", params)
return self.rest_client.POST(url, params, headers)
-
- def metadata(self, path, list=True, file_limit=25000, hash=None, rev=None, include_deleted=False):
+ def metadata(self, path, list=True, file_limit=25000, hash=None,
+ rev=None, include_deleted=False, include_media_info=False):
"""Retrieve metadata for a file or folder.
- A typical use would be:
-
- .. code-block:: python
+ A typical use would be::
folder_metadata = client.metadata('/')
print "metadata:", folder_metadata
-
- which would return the metadata of the root directory. This
- will look something like:
- .. code-block:: python
+ which would return the metadata of the root folder. This
+ will look something like::
{
'bytes': 0,
@@ -672,7 +802,7 @@ class DropboxClient(object):
'root': 'dropbox',
'size': '0 bytes',
'thumb_exists': False
- },
+ },
{
'bytes': 77,
'icon': 'page_white_text',
@@ -696,37 +826,48 @@ class DropboxClient(object):
'thumb_exists': False
}
- In this example, the root directory contains two things: ``Sample Folder``,
+ In this example, the root folder contains two things: ``Sample Folder``,
which is a folder, and ``/magnum-opus.txt``, which is a text file 77 bytes long
-
- Args:
- - ``path``: The path to the file or folder.
- - ``list``: Whether to list all contained files (only applies when
+
+ Parameters
+ path
+ The path to the file or folder.
+ list
+ Whether to list all contained files (only applies when
path refers to a folder).
- - ``file_limit``: The maximum number of file entries to return within
- a folder. If the number of files in the directory exceeds this
+ file_limit
+ The maximum number of file entries to return within
+ a folder. If the number of files in the folder exceeds this
limit, an exception is raised. The server will return at max
25,000 files within a folder.
- - ``hash``: Every directory listing has a hash parameter attached that
- can then be passed back into this function later to save on\
- bandwidth. Rather than returning an unchanged folder's contents,\
- the server will instead return a 304.\
- - ``rev``: The revision of the file to retrieve the metadata for. [optional]
+ hash
+ Every folder listing has a hash parameter attached that
+ can then be passed back into this function later to save on
+ bandwidth. Rather than returning an unchanged folder's contents,
+ the server will instead return a 304.
+ rev
+ Optional revision of the file to retrieve the metadata for.
This parameter only applies for files. If omitted, you'll receive
the most recent revision metadata.
+ include_deleted
+ When listing contained files, include files that have been deleted.
+ include_media_info
+ If True, includes additional media info for photos and videos if
+ available (the time a photo was taken, the GPS coordinates of a photo,
+ etc.).
- Returns:
- - A dictionary containing the metadata of the file or folder
+ Returns
+ A dictionary containing the metadata of the file or folder
(and contained files if appropriate).
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#metadata
+ https://www.dropbox.com/developers/core/docs#metadata
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 304: Current directory hash matches hash parameters, so contents are unchanged.
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 304: Current folder hash matches hash parameters, so contents are unchanged.
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: No file was found at given path.
- 406: Too many file entries to return.
"""
@@ -735,6 +876,7 @@ class DropboxClient(object):
params = {'file_limit': file_limit,
'list': 'true',
'include_deleted': include_deleted,
+ 'include_media_info': include_media_info,
}
if not list:
@@ -748,59 +890,78 @@ class DropboxClient(object):
return self.rest_client.GET(url, headers)
- def thumbnail(self, from_path, size='large', format='JPEG'):
+ def thumbnail(self, from_path, size='m', format='JPEG'):
"""Download a thumbnail for an image.
- Unlike most other calls, thumbnail returns a raw HTTPResponse with the connection open.
- You should call .read() and perform any processing you need, then close the HTTPResponse.
-
- Args:
- - ``from_path``: The path to the file to be thumbnailed.
- - ``size``: A string describing the desired thumbnail size.
- At this time, 'small', 'medium', and 'large' are
- officially supported sizes (32x32, 64x64, and 128x128
- respectively), though others may be available. Check
- https://www.dropbox.com/developers/reference/api#thumbnails for
+ Parameters
+ from_path
+ The path to the file to be thumbnailed.
+ size
+ A string specifying the desired thumbnail size. Currently
+ supported sizes: ``"xs"`` (32x32), ``"s"`` (64x64), ``"m"`` (128x128),
+ ``"l``" (640x480), ``"xl"`` (1024x768).
+ Check https://www.dropbox.com/developers/core/docs#thumbnails for
more details.
+ format
+ The image format the server should use for the returned
+ thumbnail data. Either ``"JPEG"`` or ``"PNG"``.
- Returns:
- - An httplib.HTTPResponse that is the result of the request.
+ Returns
+ A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
+ the API request. It is a file-like object that can be read from. You
+ must call ``close()`` when you're done.
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
- - 400: Bad request (may be due to many things; check e.error for details)
- - 404: No file was found at the given from_path, or files of that type cannot be thumbnailed.
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
+
+ - 400: Bad request (may be due to many things; check e.error for details).
+ - 404: No file was found at the given from_path,
+ or files of that type cannot be thumbnailed.
- 415: Image is invalid and cannot be thumbnailed.
"""
- assert format in ['JPEG', 'PNG'], "expected a thumbnail format of 'JPEG' or 'PNG', got %s" % format
+ assert format in ['JPEG', 'PNG'], \
+ "expected a thumbnail format of 'JPEG' or 'PNG', got %s" % format
path = "/thumbnails/%s%s" % (self.session.root, format_path(from_path))
- url, params, headers = self.request(path, {'size': size, 'format': format}, method='GET', content_server=True)
+ url, params, headers = self.request(path, {'size': size, 'format': format},
+ method='GET', content_server=True)
return self.rest_client.request("GET", url, headers=headers, raw_response=True)
- def thumbnail_and_metadata(self, from_path, size='large', format='JPEG'):
+ def thumbnail_and_metadata(self, from_path, size='m', format='JPEG'):
"""Download a thumbnail for an image alongwith its metadata.
- Acts as a thin wrapper around thumbnail() (see thumbnail() comments for
+ Acts as a thin wrapper around thumbnail() (see :meth:`thumbnail()` comments for
more details)
- Args:
- - ``from_path``: The path to the file to be thumbnailed.
- - ``size``: A string describing the desired thumbnail size. See thumbnail()
- for details.
+ Parameters
+ from_path
+ The path to the file to be thumbnailed.
+ size
+ A string specifying the desired thumbnail size. See :meth:`thumbnail()`
+ for details.
+ format
+ The image format the server should use for the returned
+ thumbnail data. Either ``"JPEG"`` or ``"PNG"``.
- Returns:
- - An httplib.HTTPResponse that is the result of the request.
- - A dictionary containing the metadata of the file whose thumbnail
- was downloaded (see https://www.dropbox.com/developers/reference/api#metadata
- for details).
+ Returns
+ A pair of ``(response, metadata)``:
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ response
+ A :class:`dropbox.rest.RESTResponse` that is the HTTP response for
+ the API request. It is a file-like object that can be read from. You
+ must call ``close()`` when you're done.
+ metadata
+ A dictionary containing the metadata of the file whose thumbnail
+ was downloaded (see https://www.dropbox.com/developers/core/docs#metadata
+ for details).
- - 400: Bad request (may be due to many things; check e.error for details)
- - 404: No file was found at the given from_path, or files of that type cannot be thumbnailed.
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
+
+ - 400: Bad request (may be due to many things; check e.error for details).
+ - 404: No file was found at the given from_path,
+ or files of that type cannot be thumbnailed.
- 415: Image is invalid and cannot be thumbnailed.
- 200: Request was okay but response was malformed in some way.
"""
@@ -810,24 +971,29 @@ class DropboxClient(object):
return thumbnail_res, metadata
def search(self, path, query, file_limit=1000, include_deleted=False):
- """Search directory for filenames matching query.
+ """Search folder for filenames matching query.
- Args:
- - ``path``: The directory to search within.
- - ``query``: The query to search on (minimum 3 characters).
- - ``file_limit``: The maximum number of file entries to return within a folder.
+ Parameters
+ path
+ The folder to search within.
+ query
+ The query to search on (minimum 3 characters).
+ file_limit
+ The maximum number of file entries to return within a folder.
The server will return at max 1,000 files.
- - ``include_deleted``: Whether to include deleted files in search results.
+ include_deleted
+ Whether to include deleted files in search results.
- Returns:
- - A list of the metadata of all matching files (up to
+ Returns
+ A list of the metadata of all matching files (up to
file_limit entries). For a detailed description of what
this call returns, visit:
- https://www.dropbox.com/developers/reference/api#search
+ https://www.dropbox.com/developers/core/docs#search
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
- - 400: Bad request (may be due to many things; check e.error for details)
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
+
+ - 400: Bad request (may be due to many things; check e.error for details).
"""
path = "/search/%s%s" % (self.session.root, format_path(path))
@@ -844,22 +1010,24 @@ class DropboxClient(object):
def revisions(self, path, rev_limit=1000):
"""Retrieve revisions of a file.
- Args:
- - ``path``: The file to fetch revisions for. Note that revisions
+ Parameters
+ path
+ The file to fetch revisions for. Note that revisions
are not available for folders.
- - ``rev_limit``: The maximum number of file entries to return within
+ rev_limit
+ The maximum number of file entries to return within
a folder. The server will return at max 1,000 revisions.
- Returns:
- - A list of the metadata of all matching files (up to rev_limit entries).
+ Returns
+ A list of the metadata of all matching files (up to rev_limit entries).
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#revisions
+ https://www.dropbox.com/developers/core/docs#revisions
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: No revisions were found at the given path.
"""
path = "/revisions/%s%s" % (self.session.root, format_path(path))
@@ -875,20 +1043,22 @@ class DropboxClient(object):
def restore(self, path, rev):
"""Restore a file to a previous revision.
- Args:
- - ``path``: The file to restore. Note that folders can't be restored.
- - ``rev``: A previous rev value of the file to be restored to.
+ Parameters
+ path
+ The file to restore. Note that folders can't be restored.
+ rev
+ A previous rev value of the file to be restored to.
- Returns:
- - A dictionary containing the metadata of the newly restored file.
+ Returns
+ A dictionary containing the metadata of the newly restored file.
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#restore
+ https://www.dropbox.com/developers/core/docs#restore
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: Unable to find the file at the given revision.
"""
path = "/restore/%s%s" % (self.session.root, format_path(path))
@@ -910,22 +1080,24 @@ class DropboxClient(object):
creates a time-limited URL that can be accessed without any authentication,
and returns that to you, along with an expiration time.
- Args:
- - ``path``: The file to return a URL for. Folders are not supported.
+ Parameters
+ path
+ The file to return a URL for. Folders are not supported.
- Returns:
- - A dictionary that looks like the following example:
+ Returns
+ A dictionary that looks like the following example::
- ``{'url': 'https://dl.dropbox.com/0/view/wvxv1fw6on24qw7/file.mov', 'expires': 'Thu, 16 Sep 2011 01:01:25 +0000'}``
+ {'url': 'https://dl.dropboxusercontent.com/1/view/abcdefghijk/example',
+ 'expires': 'Thu, 16 Sep 2011 01:01:25 +0000'}
- For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#media
+ For a detailed description of what this call returns, visit:
+ https://www.dropbox.com/developers/core/docs#media
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
- - 404: Unable to find the file at the given path.
+ - 400: Bad request (may be due to many things; check e.error for details).
+ - 404: Unable to find the file at the given path.
"""
path = "/media/%s%s" % (self.session.root, format_path(path))
@@ -933,7 +1105,7 @@ class DropboxClient(object):
return self.rest_client.GET(url, headers)
- def share(self, path):
+ def share(self, path, short_url=True):
"""Create a shareable link to a file or folder.
Shareable links created on Dropbox are time-limited, but don't require any
@@ -941,25 +1113,541 @@ class DropboxClient(object):
at least a day of shareability, though users have the ability to disable
a link from their account if they like.
- Args:
- - ``path``: The file or folder to share.
+ Parameters
+ path
+ The file or folder to share.
- Returns:
- - A dictionary that looks like the following example:
+ Returns
+ A dictionary that looks like the following example::
- ``{'url': 'http://www.dropbox.com/s/m/a2mbDa2', 'expires': 'Thu, 16 Sep 2011 01:01:25 +0000'}``
+ {'url': u'https://db.tt/c0mFuu1Y', 'expires': 'Tue, 01 Jan 2030 00:00:00 +0000'}
For a detailed description of what this call returns, visit:
- https://www.dropbox.com/developers/reference/api#shares
+ https://www.dropbox.com/developers/core/docs#shares
- Raises:
- - A dropbox.rest.ErrorResponse with an HTTP status of
+ Raises
+ A :class:`dropbox.rest.ErrorResponse` with an HTTP status of:
- - 400: Bad request (may be due to many things; check e.error for details)
+ - 400: Bad request (may be due to many things; check e.error for details).
- 404: Unable to find the file at the given path.
"""
path = "/shares/%s%s" % (self.session.root, format_path(path))
- url, params, headers = self.request(path, method='GET')
+ params = {
+ 'short_url': short_url,
+ }
+
+ url, params, headers = self.request(path, params, method='GET')
return self.rest_client.GET(url, headers)
+
+
+class ChunkedUploader(object):
+ """Contains the logic around a chunked upload, which uploads a
+ large file to Dropbox via the /chunked_upload endpoint.
+ """
+
+ def __init__(self, client, file_obj, length):
+ self.client = client
+ self.offset = 0
+ self.upload_id = None
+
+ self.last_block = None
+ self.file_obj = file_obj
+ self.target_length = length
+
+ def upload_chunked(self, chunk_size = 4 * 1024 * 1024):
+ """Uploads data from this ChunkedUploader's file_obj in chunks, until
+ an error occurs. Throws an exception when an error occurs, and can
+ be called again to resume the upload.
+
+ Parameters
+ chunk_size
+ The number of bytes to put in each chunk. (Default 4 MB.)
+ """
+
+ while self.offset < self.target_length:
+ next_chunk_size = min(chunk_size, self.target_length - self.offset)
+ if self.last_block == None:
+ self.last_block = self.file_obj.read(next_chunk_size)
+
+ try:
+ (self.offset, self.upload_id) = self.client.upload_chunk(
+ StringIO(self.last_block), next_chunk_size, self.offset, self.upload_id)
+ self.last_block = None
+ except ErrorResponse as e:
+ # Handle the case where the server tells us our offset is wrong.
+ must_reraise = True
+ if e.status == 400:
+ reply = e.body
+ if "offset" in reply and reply['offset'] != 0 and reply['offset'] > self.offset:
+ self.last_block = None
+ self.offset = reply['offset']
+ must_reraise = False
+ if must_reraise:
+ raise
+
+ def finish(self, path, overwrite=False, parent_rev=None):
+ """Commits the bytes uploaded by this ChunkedUploader to a file
+ in the users dropbox.
+
+ Parameters
+ path
+ The full path of the file in the Dropbox.
+ overwrite
+ Whether to overwrite an existing file at the given path. (Default ``False``.)
+ If overwrite is False and a file already exists there, Dropbox
+ will rename the upload to make sure it doesn't overwrite anything.
+ You need to check the metadata returned for the new name.
+ This field should only be True if your intent is to potentially
+ clobber changes to a file that you don't know about.
+ parent_rev
+ Optional rev field from the 'parent' of this upload.
+ If your intent is to update the file at the given path, you should
+ pass the parent_rev parameter set to the rev value from the most recent
+ metadata you have of the existing file at that path. If the server
+ has a more recent version of the file at the specified path, it will
+ automatically rename your uploaded file, spinning off a conflict.
+ Using this parameter effectively causes the overwrite parameter to be ignored.
+ The file will always be overwritten if you send the most recent parent_rev,
+ and it will never be overwritten if you send a less recent one.
+ """
+
+ path = "/commit_chunked_upload/%s%s" % (self.client.session.root, format_path(path))
+
+ params = dict(
+ overwrite = bool(overwrite),
+ upload_id = self.upload_id
+ )
+
+ if parent_rev is not None:
+ params['parent_rev'] = parent_rev
+
+ url, params, headers = self.client.request(path, params, content_server=True)
+
+ return self.client.rest_client.POST(url, params, headers)
+
+
+# Allow access of ChunkedUploader via DropboxClient for backwards compatibility.
+DropboxClient.ChunkedUploader = ChunkedUploader
+
+
+class DropboxOAuth2FlowBase(object):
+
+ def __init__(self, consumer_key, consumer_secret, locale=None, rest_client=RESTClient):
+ self.consumer_key = consumer_key
+ self.consumer_secret = consumer_secret
+ self.locale = locale
+ self.rest_client = rest_client
+
+ def _get_authorize_url(self, redirect_uri, state):
+ params = dict(response_type='code',
+ client_id=self.consumer_key)
+ if redirect_uri is not None:
+ params['redirect_uri'] = redirect_uri
+ if state is not None:
+ params['state'] = state
+
+ return self.build_url(BaseSession.WEB_HOST, '/oauth2/authorize', params)
+
+ def _finish(self, code, redirect_uri):
+ url = self.build_url(BaseSession.API_HOST, '/oauth2/token')
+ params = {'grant_type': 'authorization_code',
+ 'code': code,
+ 'client_id': self.consumer_key,
+ 'client_secret': self.consumer_secret,
+ }
+ if self.locale is not None:
+ params['locale'] = self.locale
+ if redirect_uri is not None:
+ params['redirect_uri'] = redirect_uri
+
+ response = self.rest_client.POST(url, params=params)
+ access_token = response["access_token"]
+ user_id = response["uid"]
+ return access_token, user_id
+
+ def build_path(self, target, params=None):
+ """Build the path component for an API URL.
+
+ This method urlencodes the parameters, adds them
+ to the end of the target url, and puts a marker for the API
+ version in front.
+
+ Parameters
+ target
+ A target url (e.g. '/files') to build upon.
+ params
+ Optional dictionary of parameters (name to value).
+
+ Returns
+ The path and parameters components of an API URL.
+ """
+ if sys.version_info < (3,) and type(target) == unicode:
+ target = target.encode("utf8")
+
+ target_path = urllib.quote(target)
+
+ params = params or {}
+ params = params.copy()
+
+ if self.locale:
+ params['locale'] = self.locale
+
+ if params:
+ query_string = params_to_urlencoded(params)
+ return "/%s%s?%s" % (BaseSession.API_VERSION, target_path, query_string)
+ else:
+ return "/%s%s" % (BaseSession.API_VERSION, target_path)
+
+ def build_url(self, host, target, params=None):
+ """Build an API URL.
+
+ This method adds scheme and hostname to the path
+ returned from build_path.
+
+ Parameters
+ target
+ A target url (e.g. '/files') to build upon.
+ params
+ Optional dictionary of parameters (name to value).
+
+ Returns
+ The full API URL.
+ """
+ return "https://%s%s" % (host, self.build_path(target, params))
+
+
+class DropboxOAuth2FlowNoRedirect(DropboxOAuth2FlowBase):
+ """
+ OAuth 2 authorization helper for apps that can't provide a redirect URI
+ (such as the command-line example apps).
+
+ Example::
+
+ from dropbox.client import DropboxOAuth2FlowNoRedirect, DropboxClient
+ from dropbox import rest as dbrest
+
+ auth_flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET)
+
+ authorize_url = auth_flow.start()
+ print "1. Go to: " + authorize_url
+ print "2. Click \\"Allow\\" (you might have to log in first)."
+ print "3. Copy the authorization code."
+ auth_code = raw_input("Enter the authorization code here: ").strip()
+
+ try:
+ access_token, user_id = auth_flow.finish(auth_code)
+ except dbrest.ErrorResponse, e:
+ print('Error: %s' % (e,))
+ return
+
+ c = DropboxClient(access_token)
+ """
+
+ def __init__(self, consumer_key, consumer_secret, locale=None, rest_client=None):
+ """
+ Construct an instance.
+
+ Parameters
+ consumer_key
+ Your API app's "app key"
+ consumer_secret
+ Your API app's "app secret"
+ locale
+ The locale of the user of your application. For example "en" or "en_US".
+ Some API calls return localized data and error messages; this setting
+ tells the server which locale to use. By default, the server uses "en_US".
+ rest_client
+ Optional :class:`dropbox.rest.RESTClient`-like object to use for making
+ requests.
+ """
+ if rest_client is None: rest_client = RESTClient
+ super(DropboxOAuth2FlowNoRedirect, self).__init__(consumer_key, consumer_secret,
+ locale, rest_client)
+
+ def start(self):
+ """
+ Starts the OAuth 2 authorization process.
+
+ Returns
+ The URL for a page on Dropbox's website. This page will let the user "approve"
+ your app, which gives your app permission to access the user's Dropbox account.
+ Tell the user to visit this URL and approve your app.
+ """
+ return self._get_authorize_url(None, None)
+
+ def finish(self, code):
+ """
+ If the user approves your app, they will be presented with an "authorization code". Have
+ the user copy/paste that authorization code into your app and then call this method to
+ get an access token.
+
+ Parameters
+ code
+ The authorization code shown to the user when they approved your app.
+
+ Returns
+ A pair of ``(access_token, user_id)``. ``access_token`` is a string that
+ can be passed to DropboxClient. ``user_id`` is the Dropbox user ID (string) of the
+ user that just approved your app.
+
+ Raises
+ The same exceptions as :meth:`DropboxOAuth2Flow.finish()`.
+ """
+ return self._finish(code, None)
+
+
+class DropboxOAuth2Flow(DropboxOAuth2FlowBase):
+ """
+ OAuth 2 authorization helper. Use this for web apps.
+
+ OAuth 2 has a two-step authorization process. The first step is having the user authorize
+ your app. The second involves getting an OAuth 2 access token from Dropbox.
+
+ Example::
+
+ from dropbox.client import DropboxOAuth2Flow, DropboxClient
+
+ def get_dropbox_auth_flow(web_app_session):
+ redirect_uri = "https://my-web-server.org/dropbox-auth-finish")
+ return DropboxOAuth2Flow(APP_KEY, APP_SECRET, redirect_uri,
+ web_app_session, "dropbox-auth-csrf-token")
+
+ # URL handler for /dropbox-auth-start
+ def dropbox_auth_start(web_app_session, request):
+ authorize_url = get_dropbox_auth_flow(web_app_session).start()
+ redirect_to(authorize_url)
+
+ # URL handler for /dropbox-auth-finish
+ def dropbox_auth_finish(web_app_session, request):
+ try:
+ access_token, user_id, url_state = \\
+ get_dropbox_auth_flow(web_app_session).finish(request.query_params)
+ except DropboxOAuth2Flow.BadRequestException, e:
+ http_status(400)
+ except DropboxOAuth2Flow.BadStateException, e:
+ # Start the auth flow again.
+ redirect_to("/dropbox-auth-start")
+ except DropboxOAuth2Flow.CsrfException, e:
+ http_status(403)
+ except DropboxOAuth2Flow.NotApprovedException, e:
+ flash('Not approved? Why not?')
+ return redirect_to("/home")
+ except DropboxOAuth2Flow.ProviderException, e:
+ logger.log("Auth error: %s" % (e,))
+ http_status(403)
+
+ """
+
+ def __init__(self, consumer_key, consumer_secret, redirect_uri, session,
+ csrf_token_session_key, locale=None, rest_client=None):
+ """
+ Construct an instance.
+
+ Parameters
+ consumer_key
+ Your API app's "app key".
+ consumer_secret
+ Your API app's "app secret".
+ redirect_uri
+ The URI that the Dropbox server will redirect the user to after the user
+ finishes authorizing your app. This URI must be HTTPS-based and pre-registered with
+ the Dropbox servers, though localhost URIs are allowed without pre-registration and can
+ be either HTTP or HTTPS.
+ session
+ A dict-like object that represents the current user's web session (will be
+ used to save the CSRF token).
+ csrf_token_session_key
+ The key to use when storing the CSRF token in the session (for
+ example: "dropbox-auth-csrf-token").
+ locale
+ The locale of the user of your application. For example "en" or "en_US".
+ Some API calls return localized data and error messages; this setting
+ tells the server which locale to use. By default, the server uses "en_US".
+ rest_client
+ Optional :class:`dropbox.rest.RESTClient`-like object to use for making
+ requests.
+ """
+ if rest_client is None: rest_client = RESTClient
+ super(DropboxOAuth2Flow, self).__init__(consumer_key, consumer_secret, locale, rest_client)
+ self.redirect_uri = redirect_uri
+ self.session = session
+ self.csrf_token_session_key = csrf_token_session_key
+
+ def start(self, url_state=None):
+ """
+ Starts the OAuth 2 authorization process.
+
+ This function builds an "authorization URL". You should redirect your user's browser to
+ this URL, which will give them an opportunity to grant your app access to their Dropbox
+ account. When the user completes this process, they will be automatically redirected to
+ the ``redirect_uri`` you passed in to the constructor.
+
+ This function will also save a CSRF token to ``session[csrf_token_session_key]`` (as
+ provided to the constructor). This CSRF token will be checked on :meth:`finish()` to
+ prevent request forgery.
+
+ Parameters
+ url_state
+ Any data that you would like to keep in the URL through the
+ authorization process. This exact value will be returned to you by :meth:`finish()`.
+
+ Returns
+ The URL for a page on Dropbox's website. This page will let the user "approve"
+ your app, which gives your app permission to access the user's Dropbox account.
+ Tell the user to visit this URL and approve your app.
+ """
+ csrf_token = base64.urlsafe_b64encode(os.urandom(16))
+ state = csrf_token
+ if url_state is not None:
+ state += "|" + url_state
+ self.session[self.csrf_token_session_key] = csrf_token
+
+ return self._get_authorize_url(self.redirect_uri, state)
+
+ def finish(self, query_params):
+ """
+ Call this after the user has visited the authorize URL (see :meth:`start()`), approved your
+ app and was redirected to your redirect URI.
+
+ Parameters
+ query_params
+ The query parameters on the GET request to your redirect URI.
+
+ Returns
+ A tuple of ``(access_token, user_id, url_state)``. ``access_token`` can be used to
+ construct a :class:`DropboxClient`. ``user_id`` is the Dropbox user ID (string) of the
+ user that just approved your app. ``url_state`` is the value you originally passed in to
+ :meth:`start()`.
+
+ Raises
+ :class:`BadRequestException`
+ If the redirect URL was missing parameters or if the given parameters were not valid.
+ :class:`BadStateException`
+ If there's no CSRF token in the session.
+ :class:`CsrfException`
+ If the ``'state'`` query parameter doesn't contain the CSRF token from the user's
+ session.
+ :class:`NotApprovedException`
+ If the user chose not to approve your app.
+ :class:`ProviderException`
+ If Dropbox redirected to your redirect URI with some unexpected error identifier
+ and error message.
+ """
+ csrf_token_from_session = self.session[self.csrf_token_session_key]
+
+ # Check well-formedness of request.
+
+ state = query_params.get('state')
+ if state is None:
+ raise self.BadRequestException("Missing query parameter 'state'.")
+
+ error = query_params.get('error')
+ error_description = query_params.get('error_description')
+ code = query_params.get('code')
+
+ if error is not None and code is not None:
+ raise self.BadRequestException("Query parameters 'code' and 'error' are both set; "
+ " only one must be set.")
+ if error is None and code is None:
+ raise self.BadRequestException("Neither query parameter 'code' or 'error' is set.")
+
+ # Check CSRF token
+
+ if csrf_token_from_session is None:
+ raise self.BadStateError("Missing CSRF token in session.")
+ if len(csrf_token_from_session) <= 20:
+ raise AssertionError("CSRF token unexpectedly short: %r" % (csrf_token_from_session,))
+
+ split_pos = state.find('|')
+ if split_pos < 0:
+ given_csrf_token = state
+ url_state = None
+ else:
+ given_csrf_token = state[0:split_pos]
+ url_state = state[split_pos+1:]
+
+ if not _safe_equals(csrf_token_from_session, given_csrf_token):
+ raise self.CsrfException("expected %r, got %r" % (csrf_token_from_session,
+ given_csrf_token))
+
+ del self.session[self.csrf_token_session_key]
+
+ # Check for error identifier
+
+ if error is not None:
+ if error == 'access_denied':
+ # The user clicked "Deny"
+ if error_description is None:
+ raise self.NotApprovedException("No additional description from Dropbox")
+ else:
+ raise self.NotApprovedException("Additional description from Dropbox: " +
+ error_description)
+ else:
+ # All other errors
+ full_message = error
+ if error_description is not None:
+ full_message += ": " + error_description
+ raise self.ProviderError(full_message)
+
+ # If everything went ok, make the network call to get an access token.
+
+ access_token, user_id = self._finish(code, self.redirect_uri)
+ return access_token, user_id, url_state
+
+ class BadRequestException(Exception):
+ """
+ Thrown if the redirect URL was missing parameters or if the
+ given parameters were not valid.
+
+ The recommended action is to show an HTTP 400 error page.
+ """
+ pass
+
+ class BadStateException(Exception):
+ """
+ Thrown if all the parameters are correct, but there's no CSRF token in the session. This
+ probably means that the session expired.
+
+ The recommended action is to redirect the user's browser to try the approval process again.
+ """
+ pass
+
+ class CsrfException(Exception):
+ """
+ Thrown if the given 'state' parameter doesn't contain the CSRF
+ token from the user's session.
+ This is blocked to prevent CSRF attacks.
+
+ The recommended action is to respond with an HTTP 403 error page.
+ """
+ pass
+
+ class NotApprovedException(Exception):
+ """
+ The user chose not to approve your app.
+ """
+ pass
+
+ class ProviderException(Exception):
+ """
+ Dropbox redirected to your redirect URI with some unexpected error identifier and error
+ message.
+
+ The recommended action is to log the error, tell the user something went wrong, and let
+ them try again.
+ """
+ pass
+
+
+def _safe_equals(a, b):
+ if len(a) != len(b): return False
+ res = 0
+ for ca, cb in zip(a, b):
+ res |= ord(ca) ^ ord(cb)
+ return res == 0
+
+
+_OAUTH2_ACCESS_TOKEN_PATTERN = re.compile(r'\A[-_~/A-Za-z0-9\.\+]+=*\Z')
+ # From the "Bearer" token spec, RFC 6750.
diff --git a/resources/lib/dropbox/datastore.py b/resources/lib/dropbox/datastore.py
new file mode 100644
index 0000000..a3059dd
--- /dev/null
+++ b/resources/lib/dropbox/datastore.py
@@ -0,0 +1,2723 @@
+"""
+Datastores are an easy way to keep an app's per-user data -- like
+settings, bookmarks, or game state -- in sync across multiple devices
+and operating systems. Datastores are simple embedded databases,
+which are synced to Dropbox.
+
+This reference details the full set of classes needed when working
+with datastores. You can also read the Datastore API tutorial for a
+detailed example of how to use them.
+
+Unlike the client-side datastore APIs (for e.g. iOS, Android and
+JavaScript), the Python datastore API does not implement automatic
+conflict resolution. Instead, if commit() fails, you must start over.
+You can use the transaction() method for this, which allows you to
+retry the transaction several times before giving up.
+
+The Python API is not thread-safe. If you want to use the same
+Datastore object from multiple threads you should manage your own
+locking. The exception is the :class:`DatastoreManager` class; all
+its methods are thread-safe. Also, static methods are thread-safe.
+"""
+
+__all__ = ['DatastoreError', 'DatastoreNotFoundError', 'DatastoreConflictError',
+ 'DatastorePermissionError',
+ 'DatastoreManager', 'DatastoreInfo', 'Datastore', 'Table', 'Record',
+ 'Date', 'Bytes', 'List',
+ ]
+
+import array
+import base64
+import collections
+import datetime
+import hashlib
+import json
+import math
+import re
+import sys
+import time
+import uuid
+
+# The port to Python 3 is not yet finished.
+PY3 = sys.version_info[0] == 3
+
+# Polyfill a few things for Bytes().
+if PY3: # pragma: no cover
+ buffer = memoryview
+ basestring = str
+ long = int
+else:
+ bytearray = bytes
+
+# Internal values for roles, used by the HTTP protocol.
+ROLE_OWNER = 3000
+ROLE_EDITOR = 2000
+ROLE_VIEWER = 1000
+ROLE_NONE = 0
+
+
+def _dbase64_encode(b):
+ """Internal helper to encode bytes using our base64 variant.
+
+ This is like urlsafe base64 encode but strips the trailing '='
+ padding. Also, it returns a string, not a bytes object.
+ """
+ bb = base64.urlsafe_b64encode(b)
+ ss = str(bb.decode('ascii'))
+ s = ss.rstrip('=') # Remove padding.
+ return s
+
+
+def _dbase64_decode(s):
+ """Internal helper to decode bytes using our base64 variant.
+
+ This is the inverse of _dbase64_encode(), taking a string,
+ returning bytes.
+ """
+ ss = s + '=' * (4 - len(s) % 4) # Add padding back.
+ bb = ss.encode('ascii')
+ b = base64.urlsafe_b64decode(bb)
+ return b
+
+
+def _generate_shareable_dsid():
+ """Internal helper to generate a random shareable (dsid, key) pair."""
+ # Start with 32 random bytes so the encoded key will be at least 32 characters in length.
+ bkey = uuid.uuid4().bytes + uuid.uuid4().bytes
+ key = _dbase64_encode(bkey)
+ # Use the sha256 of the *encoded* key.
+ keyhash = hashlib.sha256(key.encode('ascii')).digest()
+ dsid = '.' + _dbase64_encode(keyhash)
+ return dsid, key
+
+class DatastoreError(Exception):
+ """Exception raised for datastore-specific error conditions.
+
+ This is the base class for more specific exception classes.
+ """
+
+ _resp__doc__ = """
+ The JSON dict that was returned by the server.
+ """
+
+ def __init__(self, message, resp=None):
+ super(DatastoreError, self).__init__(message)
+ self.resp = resp
+
+
+class DatastoreNotFoundError(DatastoreError):
+ """Exception raised when attempting to open a non-existent datastore.
+
+ Derives from :class:`DatastoreError`.
+ """
+
+
+class DatastoreConflictError(DatastoreError):
+ """Exception raised when the server reports a conflict.
+
+ Derives from :class:`DatastoreError`.
+ """
+
+
+class DatastorePermissionError(DatastoreError):
+ """Exception raised when the server denies access.
+
+ Derives from :class:`DatastoreError`.
+ """
+
+
+class _DatastoreOperations(object):
+ """Low-level datastore operations.
+
+ The methods here map 1:1 to endpoints in the HTTP API.
+
+ Also, the parameter names exactly match the HTTP protocol, and the
+ return value is the JSON dict returned by the request.
+
+ The exception is create_datastore(), which takes no parameters and
+ adds the generated datastore ID to the JSON dict.
+
+ Exceptions that may be raised:
+
+ - :class:`dropbox.rest.ErrorResponse` if the server returned an
+ error
+ - :class:`dropbox.rest.HTTPSocketError` if there was a
+ network problem
+ - :class:`DatastoreNotFoundError` if a specified datastore
+ does not exist
+ - :class:`DatastoreConflictError` if the server reports a write
+ conflict
+ - :class:`DatastoreError` if an unanticipated JSON response is
+ received
+ """
+
+ def __init__(self, client):
+ self._client = client
+
+ def _check_access_errors(self, resp):
+ if 'access_denied' in resp:
+ raise DatastorePermissionError(resp['access_denied'], resp)
+ if 'notfound' in resp:
+ raise DatastoreNotFoundError(resp['notfound'], resp)
+ return resp
+
+ def _check_rev(self, resp):
+ resp = self._check_access_errors(resp)
+ if 'rev' not in resp:
+ raise DatastoreError('rev missing from response: %r' % (resp,), resp)
+ return resp
+
+ def _check_handle(self, resp):
+ resp = self._check_rev(resp)
+ if 'handle' not in resp:
+ raise DatastoreError('handle missing from response: %r' % (resp,), resp)
+ return resp
+
+ def _check_ok(self, resp):
+ resp = self._check_access_errors(resp)
+ if 'ok' not in resp:
+ raise DatastoreError('ok missing from response: %r' % (resp,), resp)
+ return resp
+
+ def _check_conflict(self, resp):
+ if 'conflict' in resp:
+ raise DatastoreConflictError(resp['conflict'], resp)
+ resp = self._check_rev(resp)
+ return resp
+
+ def _check_list_datastores(self, resp):
+ if 'datastores' not in resp or 'token' not in resp:
+ raise DatastoreError('token or datastores missing from response: %r' % (resp,),
+ resp)
+ return resp
+
+ def _check_get_snapshot(self, resp):
+ resp = self._check_rev(resp)
+ if 'rows' not in resp:
+ raise DatastoreError('rows missing from response: %r' % (resp,), resp)
+ return resp
+
+ def _check_await(self, resp):
+ # Nothing to do here -- it may or may not have keys 'list_datastores' and 'get_deltas'.
+ return resp
+
+ def _check_get_deltas(self, resp):
+ resp = self._check_access_errors(resp)
+ # If there are no new deltas the response is empty.
+ if resp and 'deltas' not in resp:
+ raise DatastoreError('deltas missing from response: %r' % (resp,), resp)
+ return resp
+
+ def get_datastore(self, dsid):
+ url, params, headers = self._client.request('/datastores/get_datastore',
+ {'dsid': dsid}, method='GET')
+ resp = self._client.rest_client.GET(url, headers)
+ return self._check_handle(resp)
+
+ def get_or_create_datastore(self, dsid):
+ url, params, headers = self._client.request('/datastores/get_or_create_datastore',
+ {'dsid': dsid})
+ resp = self._client.rest_client.POST(url, params, headers)
+ return self._check_handle(resp)
+
+ def create_datastore(self):
+ # NOTE: This generates a dsid locally and adds it to the returned response.
+ dsid, key = _generate_shareable_dsid()
+ url, params, headers = self._client.request('/datastores/create_datastore',
+ {'dsid': dsid, 'key': key})
+ resp = self._client.rest_client.POST(url, params, headers)
+ resp = self._check_handle(resp)
+ if 'dsid' not in resp:
+ resp['dsid'] = dsid
+ return resp
+
+ def delete_datastore(self, handle):
+ url, params, headers = self._client.request('/datastores/delete_datastore',
+ {'handle': handle})
+ resp = self._client.rest_client.POST(url, params, headers)
+ return self._check_ok(resp)
+
+ def list_datastores(self):
+ url, params, headers = self._client.request('/datastores/list_datastores', method='GET')
+ resp = self._client.rest_client.GET(url, headers)
+ return self._check_list_datastores(resp)
+
+ def get_snapshot(self, handle):
+ url, params, headers = self._client.request('/datastores/get_snapshot',
+ {'handle': handle}, method='GET')
+ resp = self._client.rest_client.GET(url, headers)
+ return self._check_get_snapshot(resp)
+
+ def get_deltas(self, handle, rev):
+ url, params, headers = self._client.request('/datastores/get_deltas',
+ {'handle': handle, 'rev': rev},
+ method='GET')
+ resp = self._client.rest_client.GET(url, headers)
+ return self._check_get_deltas(resp)
+
+ def put_delta(self, handle, rev, changes, nonce=None):
+ args = {'handle': handle,
+ 'rev': str(rev),
+ 'changes': json.dumps(changes),
+ }
+ if nonce:
+ args['nonce'] = nonce
+ url, params, headers = self._client.request('/datastores/put_delta', args)
+ resp = self._client.rest_client.POST(url, params, headers)
+ return self._check_conflict(resp)
+
+ def await(self, token=None, cursors=None):
+ params = {}
+ if token:
+ params['list_datastores'] = json.dumps({'token': token})
+ if cursors:
+ params['get_deltas'] = json.dumps({'cursors': cursors})
+ url, params, headers = self._client.request('/datastores/await', params, method='POST')
+ resp = self._client.rest_client.POST(url, params, headers)
+ return self._check_await(resp)
+
+ def get_client(self):
+ return self._client
+
+class DatastoreManager(object):
+ """A manager for datastores.
+
+ In order to work with datastores you must first create an instance
+ of this class, passing its constructor a
+ :class:`dropbox.client.DropboxClient` instance.
+
+ The methods here let you open or create datastores and retrieve
+ the list of datastores.
+
+ This class has no state except for a reference to the
+ :class:`dropbox.client.DropboxClient`, which itself is thread-safe;
+ hence, all methods of this class are thread-safe.
+ """
+
+ DEFAULT_DATASTORE_ID = 'default' #: The default datastore ID.
+ _DEFAULT_DATASTORE_ID__doc__ = """
+ The default datastore ID used by :meth:`open_default_datastore()`.
+ """
+
+ def __init__(self, client):
+ """Construct a ``DatastoreManager`` using a :class:`dropbox.client.DropboxClient`."""
+ self._dsops = _DatastoreOperations(client)
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, self._dsops._client if self._dsops else None)
+
+ def close(self):
+ # This doesn't do anything to the _DatastoreOperations object.
+ self._dsops = None
+
+ def get_client(self):
+ """Return the :class:`dropbox.client.DropboxClient` object used."""
+ return self._dsops.get_client()
+
+ def open_default_datastore(self):
+ """Open the default datastore for this account, creating it if needed.
+
+ This is a shorthand for :meth:`open_or_create_datastore`\(:const:`DEFAULT_DATASTORE_ID`).
+
+ Returns
+ A :class:`Datastore` instance.
+ """
+ return self.open_or_create_datastore(self.DEFAULT_DATASTORE_ID)
+
+ def open_datastore(self, id):
+ """Open an existing datastore given its ID (a string).
+
+ Returns
+ A :class:`Datastore` instance.
+ """
+ if not Datastore.is_valid_id(id):
+ raise ValueError('Invalid datastore ID %r' % (id,))
+ resp = self._dsops.get_datastore(id)
+ return self._datastore_from_response(resp, id)
+
+ def open_or_create_datastore(self, id):
+ """Open a private datastore, creating it if it does not yet exist.
+
+ The ID must not start with a dot.
+
+ Returns
+ A :class:`Datastore` instance.
+ """
+ if not Datastore.is_valid_id(id):
+ raise ValueError('Invalid datastore ID %r' % (id,))
+ if id.startswith('.'):
+ raise ValueError('Datastore ID must not start with dot')
+ resp = self._dsops.get_or_create_datastore(id)
+ return self._datastore_from_response(resp, id)
+
+ def create_datastore(self):
+ """Create a new datastore with a randomly assigned ID.
+
+ The assigned ID will start with a dot.
+
+ Returns
+ A :class:`Datastore` instance.
+ """
+ resp = self._dsops.create_datastore()
+ return self._datastore_from_response(resp, resp['dsid'])
+
+ def open_raw_datastore(self, id, handle, role=None):
+ """Create a new :class:`Datastore` object without going to the server.
+
+ You can use this to save a server roundtrip when opening a
+ datastore given a :class:`DatastoreInfo` object returned by
+ :meth:`list_datastores()`::
+
+ def open_from_info(mgr, info):
+ ds = mgr.open_raw_datastore(info.id, info.handle, info.role)
+ ds.load_snapshot()
+ return ds
+ """
+ if role is None:
+ role = Datastore.OWNER # Our best guess.
+ else:
+ if not isinstance(role, basestring):
+ raise TypeError('Role must be a string: %r' % (role,))
+ if role not in (Datastore.OWNER, Datastore.EDITOR, Datastore.VIEWER):
+ raise ValueError('invalid role (%r)' % (role,))
+ if not id.startswith('.') and role != Datastore.OWNER:
+ raise ValueError('private datastore role must be owner: %r' % (role,))
+ return Datastore(self, id=id, handle=handle, role=role)
+
+ def delete_datastore(self, id):
+ """Delete a datastore given its ID."""
+ resp = self._dsops.get_datastore(id)
+ self._dsops.delete_datastore(resp['handle'])
+
+ def _datastore_from_response(self, resp, id):
+ handle = resp['handle']
+ rev = resp['rev']
+ role = _make_role(resp.get('role'))
+ ds = Datastore(self, id=id, handle=handle, role=role)
+ if rev > 0:
+ ds.load_snapshot()
+ return ds
+
+ def list_datastores(self):
+ """List the existing datastores for this account.
+
+ Returns
+ A list of :class:`DatastoreInfo` objects.
+ """
+ resp = self._dsops.list_datastores()
+ return [_make_dsinfo(item) for item in resp['datastores']]
+
+ def await(self, token=None, datastores=None):
+ """Wait until certain changes occur.
+
+ This methods implements a flexible and efficient long-polling
+ mechanism that can be used to be notified of changes to
+ specific datastores and/or to the list of datastores itself
+ (for the current account).
+
+ Parameters
+ token
+ An optional token that represents a hash of the list of
+ datastores, computed by the server. If this parameter is
+ present and non-empty, ``await()`` will return when the
+ list of datastores has changed in a way that would cause a
+ different token to be computed, such as when a datastore
+ is created or deleted. The token should be obtained from
+ the previous ``await()`` call; as a special case, the
+ value ``'.'`` forces the call to return immediately with a
+ fresh token (as does any outdated token).
+
+ datastores
+ An optional list of :class:`Datastore` instances or dict
+ mapping such instances to revision numbers. The instances
+ represents currently open datastores for which you are
+ interested in receiving updates. If this parameter is a
+ list of instances, the revision to compare is retrieved
+ from each instance using :meth:`Datastore.get_rev()`. If
+ this parameter is present and non-empty, ``await()`` will
+ return whenever a new revision is available for any of
+ those datastores.
+
+ The call also returns after a certain amount of time passes
+ without activity. The timeout is controlled by the server; it
+ is currently approximately one minute.
+
+ Returns
+ A ``(token, dsinfos, deltamap)`` tuple. The items are as follows:
+
+ token
+ A new token, or the value of the ``token`` parameter if
+ there are no changes to the list of datastores. You
+ should pass this to the next ``await()`` call.
+
+ dsinfo
+ The full list of :class:`DatastoreInfo` objects (as
+ returned by :meth:`list_datastores()`) if there is a new
+ token, otherwise ``None``.
+
+ deltamap
+ Either a mapping indicating which of the given datastores
+ were changed, or ``None`` if there are no changes to
+ datastores to report. If it is a mapping, each key is a
+ :meth:`Datastore`, and the corresponding value is either a
+ non-empty list of deltas, or ``None`` if that datastore is
+ deleted or is otherwise invalid. Datastores that were
+ not changed (and are still valid) will not be present.
+
+ Unlike :meth:`Datastore.load_deltas()` and
+ :meth:`Datastore.await_deltas()`, ``await()`` does not apply
+ the deltas returned in ``deltamap`` to the respective
+ datastores; that is the caller's responsibility. For
+ example::
+
+ for ds, deltas in deltamap.items():
+ if deltas is not None:
+ ds.apply_deltas(deltas)
+ else:
+ # ds has been deleted
+ """
+ cursors = None
+ if datastores is not None:
+ cursors = {}
+ if isinstance(datastores, collections.Mapping):
+ for ds, rev in datastores.items():
+ cursors[ds._handle] = rev
+ else:
+ for ds in datastores:
+ cursors[ds._handle] = ds._rev
+ resp = self._dsops.await(token=token, cursors=cursors)
+ dsinfos = None
+ deltamap = None
+ if 'list_datastores' in resp:
+ subresp = resp['list_datastores']
+ token = subresp['token']
+ dsinfos = [_make_dsinfo(item) for item in subresp['datastores']]
+ if 'get_deltas' in resp:
+ subresp = resp['get_deltas']
+ rawmap = subresp['deltas']
+ deltamap = {}
+ for ds in datastores:
+ if ds._handle in rawmap:
+ value = rawmap[ds._handle]
+ # If this assert triggers, the server sent us bogus data.
+ assert isinstance(value, dict), repr(value)
+ if 'deltas' in value:
+ deltamap[ds] = value['deltas']
+ elif 'notfound' in value:
+ # This datastore is invalid.
+ deltamap[ds] = None
+ # Else the server sent us a response we don't
+ # understand; ignore it.
+ return token, dsinfos, deltamap
+
+ @staticmethod
+ def make_cursor_map(datastores, deltamap):
+ """Utility to construct a ``datastores`` argument for :meth:`await()`.
+
+ Parameters
+ datastores
+ A list of :class:`Datastore` objects.
+ deltamap
+ An data structure as returned by :meth:`await()` in its
+ ``deltamap`` return value. This may be None or it may be
+ a dict mapping :class:`Datastore` objects to values that
+ are either lists of deltas or ``None``.
+
+ Returns
+ A dict mapping :class:`Datastore` objects to revisions,
+ suitable to pass as the ``datastores`` parameter to
+ :meth:`await()`. This will normally just map the datastores
+ from the ``datastores`` parameter to their current revision;
+ however, datastores that are deleted or invalid according to
+ ``deltamap`` are excluded from the dict, and for datastores
+ that have one or more deltas in ``deltamap``, the revision
+ is set to one higher than the revision of the last delta.
+
+ Using this function will reduce redundant server roundtrips in
+ multi-threaded apps that call :meth:`await()` in a background
+ thread and then pass the received deltas to the main thread
+ through some kind of queue.
+ """
+ if deltamap is None:
+ deltamap = {}
+ cursor_map = {}
+ for ds in datastores:
+ if ds not in deltamap:
+ cursor_map[ds] = ds._rev
+ else:
+ deltas = deltamap[ds]
+ # If this is None, the datastore is known to be
+ # invalid, and we do not put it in the map.
+ if deltas is not None:
+ assert deltas, 'Unexpected empty list of deltas in deltamap'
+ cursor_map[ds] = deltas[-1]['rev'] + 1
+ return cursor_map
+
+
+DatastoreInfo = collections.namedtuple('DatastoreInfo', 'id handle rev title mtime effective_role')
+
+# Dummy class for docstrings, see doco.py.
+class _DatastoreInfo__doc__(object):
+ """A read-only record of information about a :class:`Datastore`.
+
+ Instances of this class are returned by
+ :meth:`DatastoreManager.list_datastores()`.
+ """
+ _id__doc__ = """The datastore ID (a string)."""
+ _handle__doc__ = """The datastore handle (a string)."""
+ _rev__doc__ = """The datastore revision (an integer >= 0)."""
+ _title__doc__ = """The datastore title (string or None)."""
+ _mtime__doc__ = """The time of last modification (:class:`Date` or None)."""
+ _effective_role__doc__ = """
+ The current user's effective role (:const:`Datastore.OWNER`,
+ :const:`Datastore.EDITOR` or :const:`Datastore.VIEWER`).
+ """
+
+
+def _make_dsinfo(item):
+ title = mtime = None
+ info = item.get('info')
+ if info:
+ title = info.get('title')
+ raw_mtime = info.get('mtime')
+ if raw_mtime is not None:
+ mtime = Date.from_json(raw_mtime)
+ dsid = item['dsid']
+ role = _make_role(item.get('role'))
+ assert role is not None, repr(role)
+ return DatastoreInfo(id=dsid, handle=item['handle'], rev=item['rev'],
+ title=title, mtime=mtime, effective_role=role)
+
+
+def _make_role(irole):
+ if irole is None:
+ return Datastore.OWNER # Backward compatible default.
+ if not isinstance(irole, (int, long)):
+ raise TypeError('irole must be an integer: %r', irole)
+ # Unknown roles are truncated down to the nearest known role.
+ if irole >= ROLE_OWNER:
+ return Datastore.OWNER
+ if irole >= ROLE_EDITOR:
+ return Datastore.EDITOR
+ if irole >= ROLE_VIEWER:
+ return Datastore.VIEWER
+ return Datastore.NONE
+
+
+def _parse_role(role, owner_ok=False):
+ if role == Datastore.OWNER and owner_ok:
+ return ROLE_OWNER
+ if role == Datastore.EDITOR:
+ return ROLE_EDITOR
+ if role == Datastore.VIEWER:
+ return ROLE_VIEWER
+ if role == Datastore.NONE:
+ return ROLE_NONE
+ if not isinstance(role, basestring):
+ raise TypeError('invalid role type: %r' % (role,))
+ raise ValueError('invalid role: %r' % (role,))
+
+
+_DBASE64_VALID_CHARS = '-_A-Za-z0-9'
+_VALID_PRIVATE_DSID_RE = r'[a-z0-9_-]([a-z0-9._-]{0,62}[a-z0-9_-])?'
+_VALID_SHAREABLE_DSID_RE = r'\.[%s]{1,63}' % _DBASE64_VALID_CHARS
+_VALID_DSID_RE = r'\A(%s|%s)\Z' % (_VALID_PRIVATE_DSID_RE, _VALID_SHAREABLE_DSID_RE)
+
+
+class Principal(object):
+ """A principal used in the access control list (ACL).
+
+ Currently the only valid principals are the predefined objects
+ :const:`Datastore.TEAM` and :const:`Datastore.PUBLIC`.
+ """
+
+ def __init__(self, key):
+ assert self.__class__ is not Principal, 'Cannot directly instantiate Principal'
+ self._key = key
+
+ @property
+ def key(self):
+ return self._key
+
+ def __hash__(self):
+ return hash(self._key)
+
+ def __eq__(self, other):
+ if not isinstance(other, Principal):
+ return NotImplemented
+ return self._key == other._key
+
+ def __ne__(self, other):
+ if not isinstance(other, Principal):
+ return NotImplemented
+ return self._key != other._key
+
+
+class User(Principal):
+ """A user is identified by a numeric user ID (uid).
+
+ The uid may be either an integer or a string of digits.
+ """
+
+ def __init__(self, uid):
+ if not isinstance(uid, (int, long, basestring)):
+ raise TypeError('Invalid uid type: %r' % (uid,))
+ if not str(uid).isdigit():
+ raise ValueError('Invalid uid: %r' % (uid,))
+ if str(int(uid)) != str(uid):
+ raise ValueError('Leading zeros or sign not allowed in uid: %r' % (uid,))
+ if int(uid) <= 0:
+ raise ValueError('Zero or negative uid not allowed: %r' % (uid,))
+ super(User, self).__init__('u%s' % uid)
+
+ def __repr__(self):
+ return 'User(%s)' % self._key[1:]
+
+
+class TeamPrincipal(Principal):
+ """:const:`Datastore.TEAM` is a special principal to set team permissions.
+
+ Don't instantiate this class, use the predefined :const:`Datastore.TEAM` variable.
+ """
+
+ def __init__(self):
+ super(TeamPrincipal, self).__init__('team')
+
+ def __repr__(self):
+ return 'TEAM'
+
+
+class PublicPrincipal(Principal):
+ """:const:`Datastore.PUBLIC` is a special principal to set public permissions.
+
+ Don't instantiate this class, use the predefined :const:`Datastore.PUBLIC` variable.
+ """
+
+ def __init__(self):
+ super(PublicPrincipal, self).__init__('public')
+
+ def __repr__(self):
+ return 'PUBLIC'
+
+
+class Datastore(object):
+ """An object representing a datastore.
+
+ A datastore holds a set of tables identified by table IDs, each of
+ which holds a set of records identified by record IDs. A record
+ holds a set of field values identified by field names. The
+ ``Datastore`` object keeps a snapshot of the current content (all
+ tables, records and fields) in memory and supports simple queries.
+
+ Changes to a datastore are made through methods on the
+ :class:`Table` and :class:`Record` classes, as well as the
+ :class:`List` class (which represents a composite field value).
+
+ Changes are not immediately sent to the server. Instead, the
+ datastore keeps a list of changes in memory; these are sent to the
+ server by the :meth:`commit()` method. The :meth:`load_deltas()`
+ method retrieves new changes from the server and incorporates them
+ into the current snapshot. Those changes that have not yet been
+ sent to the server can be undone using the :meth:`rollback()`
+ method. Finally, the :meth:`transaction()` method combines the
+ functionality of these into a more powerful operation that can
+ retry sets of changes specified by a callback function.
+
+ **Do not instantiate this class directly**. Use the methods on
+ :class:`DatastoreManager` instead.
+ """
+
+ DATASTORE_SIZE_LIMIT = 10 * 1024 * 1024 #: Datastore size limit placeholder for sphinx.
+ _DATASTORE_SIZE_LIMIT__doc__ = """
+ The maximum size in bytes of a datastore.
+ """
+
+ PENDING_CHANGES_SIZE_LIMIT = 2 * 1024 * 1024 #: Delta size limit placeholder for sphinx.
+ _PENDING_CHANGES_SIZE_LIMIT__doc__ = """
+ The maximum size in bytes of changes that can be queued up between calls to
+ :meth:`commit()`.
+ """
+
+ RECORD_COUNT_LIMIT = 100000 #: Record count limit placeholder for sphinx.
+ _RECORD_COUNT_LIMIT__doc__ = """
+ The maximum number of records in a datastore.
+ """
+
+ BASE_DATASTORE_SIZE = 1000 #: Base datastore size placeholder for sphinx.
+ _BASE_DATASTORE_SIZE__doc__ = """
+ The size in bytes of a datastore before accounting for the size of its records.
+
+ The overall size of a datastore is this value plus the size of all records.
+ """
+
+ BASE_DELTA_SIZE = 100 #: Base delta size placeholder for sphinx.
+ _BASE_DELTA_SIZE__doc__ = """
+ The size in bytes of a delta before accounting for the size of each change.
+
+ The overall size of a delta is this value plus the size of each change.
+ """
+
+ BASE_CHANGE_SIZE = 100 #: Base change size placeholder for sphinx.
+ _BASE_CHANGE_SIZE__doc__ = """
+ The size in bytes of a change before including the size of its values.
+
+ The overall size of a change is this value plus the size of the values in the change.
+ """
+
+ TEAM = TeamPrincipal() #: Team placeholder for sphinx.
+ _TEAM__doc__ = """
+ The principal used to get or modify the team role for a datastore.
+ """
+
+ PUBLIC = PublicPrincipal() #: Public placeholder for sphinx.
+ _PUBLIC__doc__ = """
+ The principal used to get or modify the public role for a datastore.
+ """
+
+ OWNER = 'owner' #: Owner placeholder for sphinx.
+ _OWNER__doc__ = """
+ The role indicating ownership of a datastore. Owners have
+ full access and their role cannot be changed or removed.
+ """
+
+ EDITOR = 'editor' #: Editor placeholder for sphinx.
+ _EDITOR__doc__ = """
+ The role indicating edit (i.e., read-write) access. Editors
+ can also modify the role for other principals (except owners).
+ """
+
+ VIEWER = 'viewer' #: Viewer placeholder for sphinx.
+ _VIEWER__doc__ = """
+ The role indicating view (i.e. read-only) access. Viewers
+ cannot change any aspect of a datastore.
+ """
+
+ NONE = 'none' #: Viewer placeholder for sphinx.
+ _NONE__doc__ = """
+ The role indicating no access at all.
+ """
+
+ def __init__(self, manager, id=None, handle=None, role=None):
+ if role is not None:
+ # Should've been caught earlier.
+ assert isinstance(role, str), repr(role)
+ assert role in (Datastore.OWNER, Datastore.EDITOR, Datastore.VIEWER), repr(role)
+ self._manager = manager
+ self._id = id
+ self._handle = handle
+ self._role = role
+ self._rev = 0
+ self._tables = {}
+ self._changes = []
+ self._record_count = 0
+ self._size = self.BASE_DATASTORE_SIZE
+ self._pending_changes_size = 0
+
+ def __repr__(self):
+ return 'Datastore(, id=%r, handle=%r, role=%r)' % (self._rev, self._id,
+ self._handle, self._role)
+
+ def _check_edit_permission(self):
+ if self.is_shareable() and self._role not in (Datastore.OWNER, Datastore.EDITOR):
+ raise DatastorePermissionError('This datastore is read-only')
+
+ def _check_shareable(self):
+ if not self.is_shareable():
+ raise DatastoreError('Access control is only supported for shareable datastores')
+
+ def _check_principal(self, principal):
+ if not isinstance(principal, Principal):
+ raise TypeError('A Principal is expected')
+
+ @staticmethod
+ def is_valid_id(id):
+ """A helper method to check for a valid datastore ID.
+
+ There are actually two types of datastore IDs, which
+ are called private IDs and shareable IDs.
+
+ Private datastores are created with
+ :meth:`DatastoreManager.open_default_datastore()` or
+ :meth:`DatastoreManager.open_or_create_datastore()`,
+ and the app has control over the name.
+ Valid private datastore IDs are 1-64 characters long and
+ may contain the following characters: ``a-z 0-9 . - _`` .
+ However the first and last character cannot be dots. Note
+ that upper case is not allowed.
+
+ Shareable datastores are created with
+ :meth:`DatastoreManager.create_datastore()`; the
+ name is a dot followed by a random-looking sequence of
+ characters assigned by the SDK. Valid shareable datastore IDs
+ are a dot followed by 1-63 dbase64 characters (which are
+ ``a-z A-Z 0-9 - _``). Note that upper case *is* allowed.
+
+ The :meth:`DatastoreManager.open_datastore()` and
+ :meth:`DatastoreManager.open_raw_datastore()` methods
+ can open either type of datastores.
+ """
+ return bool(re.match(_VALID_DSID_RE, id))
+
+ @staticmethod
+ def is_valid_shareable_id(id):
+ """A helper method to check for a valid shareable datastore ID.
+
+ This is a valid datastore ID starting with a '.'.
+ """
+ return Datastore.is_valid_id(id) and id.startswith('.')
+
+ def get_id(self):
+ """Return the ID of this datastore (a string)."""
+ return self._id
+
+ def is_shareable(self):
+ """Return whether this is a shareable datastore."""
+ return self._id.startswith('.')
+
+ def is_writable(self):
+ """Return whether this datastore is writable.
+
+ Always true for private datastores.
+ False iff role==:const:`VIEWER` for shareable datastores.
+ """
+ return self._role != Datastore.VIEWER
+
+ def get_handle(self):
+ """Return the handle of this datastore (a string)."""
+ return self._handle
+
+ def get_rev(self):
+ """Return the current revision of this datastore (an integer >= 0)."""
+ return self._rev
+
+ def get_manager(self):
+ """Return the :class:`DatastoreManager` to which this datastore belongs."""
+ return self._manager
+
+ def get_mtime(self):
+ """Return time this datastore was last modified, if known.
+
+ This value is automatically set to the current time by
+ :meth:`commit()`.
+
+ Returns
+ A :class:`Date` or None.
+ """
+ return self._get_info_field('mtime')
+
+ def get_title(self):
+ """Return the title of this datastore (a string or None).
+
+ The title is primarily useful for apps that use shareable
+ datastores to represent documents created by the user. Using
+ :meth:`set_title()` the title can be set to a string chosen by
+ the user, and :meth:`DatastoreManager.list_datastores()` will
+ return the titles (see :class:`DatastoreInfo`). The app can
+ then show the user a list of documents containing the title
+ and time of last modification for each document without
+ needing to open all datastores.
+ """
+ return self._get_info_field('title')
+
+ def set_title(self, title):
+ """Set the title of this datastore (a string or None).
+
+ Since this operation is implemented by updating a reserved
+ table, you must call :meth:`commit()` to send this change to
+ the server.
+ """
+ if title is not None and not isinstance(title, basestring):
+ raise TypeError('Title must be a string, not %s' % type(title).__name__)
+ self._set_info_field('title', title)
+
+ def _set_mtime(self):
+ now = time.time()
+ mtime = Date(now)
+ self._set_info_field('mtime', mtime)
+
+ def _get_info_field(self, field):
+ info_table = self.get_table(':info')
+ info_record = info_table.get('info')
+ if info_record is None:
+ return None
+ return info_record.get(field)
+
+ def _set_info_field(self, field, value):
+ info_table = self.get_table(':info')
+ info_record = info_table.get_or_insert('info')
+ info_record.set(field, value)
+
+ def get_record_count(self):
+ """Return the number of records in this datastore."""
+ return self._record_count
+
+ def get_size(self):
+ """Return the size in bytes of this datastore.
+
+ The overall size of a datastore is calculated by summing the
+ size of all records, plus the base size of an empty datastore itself.
+ """
+ return self._size
+
+ def get_pending_changes_size(self):
+ """Return the size in bytes of changes made since the last :meth:`commit()`.
+
+ If there are any pending changes, the total size is given by summing the size
+ of those changes and :const:`BASE_DELTA_SIZE`. If there are no pending
+ changes, the total size is zero.
+ """
+ if self._changes:
+ return Datastore.BASE_DELTA_SIZE + self._pending_changes_size
+ else:
+ return 0
+
+ def _add_pending_change(self, change):
+ self._changes.append(change)
+ self._pending_changes_size += change.size()
+
+ def get_effective_role(self):
+ """Return the effective role for the current user.
+
+ This can return :const:`OWNER`, :const:`EDITOR` or
+ :const:`VIEWER`.
+
+ For a private datastore this always returns :const:`OWNER`.
+ """
+ if self.is_shareable():
+ return self._role
+ else:
+ return Datastore.OWNER
+
+ def list_roles(self):
+ """Return the full ACL, as a dict mapping principals to roles.
+
+ This is only supported for shareable datastores.
+ """
+ self._check_shareable()
+ acl_table = self.get_table(':acl')
+ acl = {}
+ for rec in acl_table.query():
+ id = rec.get_id()
+ if id == 'team':
+ principal = Datastore.TEAM
+ elif id == 'public':
+ principal = Datastore.PUBLIC
+ elif id.startswith('u') and id[1:].isdigit():
+ principal = User(id[1:])
+ else:
+ continue # pragma: nocover.
+ acl[principal] = _make_role(rec.get('role'))
+ return acl
+
+ def get_role(self, principal):
+ """Return the role for a principal.
+
+ This can return :const:`OWNER`, :const:`EDITOR`,
+ :const:`VIEWER`, or ``None``.
+
+ The principal must be :const:`TEAM` or :const:`PUBLIC`.
+
+ This is only supported for shareable datastores.
+
+ This method only returns the role explicitly set for the given
+ principal in the ACL; it is equivalent to
+ ``ds.list_roles().get(principal)``. The effective role for a
+ principal may be different; it is affected by the full ACL as
+ well as by team membership and ownership.
+
+ To get the effective role for the current user, use
+ :meth:`get_effective_role()`.
+ """
+ self._check_shareable()
+ self._check_principal(principal)
+ acl_table = self.get_table(':acl')
+ rec = acl_table.get(principal.key)
+ if rec is None:
+ return Datastore.NONE
+ else:
+ return _make_role(rec.get('role'))
+
+ def set_role(self, principal, role):
+ """Set a principal's role.
+
+ The principal must be :const:`TEAM` or :const:`PUBLIC`.
+ The role must be :const:`EDITOR` or :const:`VIEWER`.
+
+ If the principal already has a role it is updated.
+
+ This is only supported for writable, shareable datastores.
+ """
+ if role == Datastore.NONE:
+ return self.delete_role(principal)
+ self._check_shareable()
+ self._check_principal(principal)
+ irole = _parse_role(role, owner_ok=False)
+ acl_table = self.get_table(':acl')
+ rec = acl_table.get(principal.key)
+ if rec is None:
+ acl_table.get_or_insert(principal.key, role=irole)
+ else:
+ rec.update(role=irole)
+
+ def delete_role(self, principal):
+ """Delete a principal's role.
+
+ The principal must be :const:`TEAM` or :const:`PUBLIC`.
+
+ The principal may but need not have a role.
+
+ This is only supported for writable, shareable datastores.
+ """
+ self._check_shareable()
+ self._check_principal(principal)
+ acl_table = self.get_table(':acl')
+ rec = acl_table.get(principal.key)
+ if rec is not None:
+ rec.delete_record()
+
+ def load_snapshot(self):
+ """Load the datastore with a snapshot retrieved from the server.
+
+ All previously loaded content of the datastore is discarded,
+ including pending changes.
+
+ This is automatically called by most of the ``open_*()``
+ methods, so there is normally no reason to call this.
+ """
+ resp = self._manager._dsops.get_snapshot(self._handle)
+ rev = resp['rev']
+ snapshot = resp['rows']
+ self.apply_snapshot(rev, snapshot)
+
+ def apply_snapshot(self, rev, snapshot):
+ """Restore the datastore from a revision and a snapshot.
+
+ All previously loaded content of the ``Datastore`` object is
+ discarded, including pending changes.
+
+ Normally this method is called internally by
+ :meth:`load_snapshot()`. It may also be called with a
+ revision and snapshot obtained previously from
+ :meth:`get_rev()` and :meth:`get_snapshot()`.
+ """
+ self._rev = 0
+ self._tables = {}
+ self._changes = []
+ for row in snapshot:
+ tid = row['tid']
+ recordid = row['rowid']
+ data = dict((field, _value_from_json(v)) for field, v in row['data'].items())
+ table = self.get_table(tid)
+ table._update_record_fields(recordid, data, _compute_record_size_for_fields(data))
+ self._rev = rev
+
+ def get_snapshot(self):
+ """Return a snapshot of the datastore.
+
+ A snapshot is a list of dicts with keys ``'tid'``,
+ ``'rowid'``, and ``'data'``, where ``'tid'`` maps to the table
+ ID, ``'rowid'`` maps to a record ID, and ``'data'`` maps to a
+ JSON-encoded record, i.e. a dict mapping field names to
+ JSON-encoded values.
+
+ Together with the revision (which you can obtain from
+ :meth:`get_rev()`) this comprises the mutable state of a
+ datastore. You may restore a ``Datastore`` object to a given
+ state using :meth:`apply_snapshot()`.
+ """
+ snapshot = []
+ for table_id, table in self._tables.items():
+ for record_id, fields in table._records.items():
+ data = {}
+ for field, value in fields.items():
+ data[field] = _value_to_json(value)
+ snapshot.append({'tid': table_id, 'rowid': record_id, 'data': data})
+ return snapshot
+
+ def await_deltas(self):
+ """Wait for and incorporate changes to this datastore.
+
+ It is an error to call this method if the datastore has
+ pending changes.
+
+ Returns
+ A dict mapping table IDs to sets of records,
+ see :meth:`apply_deltas()`.
+ """
+ if self._changes:
+ raise DatastoreError('Cannot call await_deltas() with pending changes')
+ resp = self._manager._dsops.await(cursors={self._handle: self._rev})
+ if 'get_deltas' not in resp:
+ return {}
+ subresp = resp['get_deltas']
+ if self._handle not in subresp['deltas']:
+ return {}
+ myresp = subresp['deltas'][self._handle]
+ myresp = self._manager._dsops._check_access_errors(myresp)
+ deltas = myresp.get('deltas')
+ return self.apply_deltas(deltas)
+
+ def load_deltas(self):
+ """Load new changes retrieved from the server into the datastore.
+
+ All previously loaded content is preserved, unless explicitly
+ deleted or modified by one of the loaded changes.
+
+ It is an error to call this method if the datastore has
+ pending changes.
+
+ Calling ``ds.load_deltas()`` is equivalent to::
+
+ deltas = ds.fetch_deltas()
+ ds.apply_deltas(deltas)
+
+ Returns
+ A dict mapping table IDs to sets of records,
+ see :meth:`apply_deltas()`.
+ """
+ if self._changes:
+ raise DatastoreError('Cannot call load_deltas() with pending changes')
+ deltas = self.fetch_deltas()
+ return self.apply_deltas(deltas)
+
+ def fetch_deltas(self):
+ """Retrieve new changes from the server without applying them.
+
+ This is one of the building blocks of :meth:`load_deltas()`;
+ you probably want to use that instead.
+
+ Returns
+ A list of deltas suitable to be passed directly to
+ :meth:`apply_deltas()`.
+ """
+ resp = self._manager._dsops.get_deltas(self._handle, self._rev)
+ return resp.get('deltas')
+
+ def apply_deltas(self, deltas):
+ """Apply deltas retrieved by some other means.
+
+ It is an error to call this method if the datastore has
+ pending changes.
+
+ Normally this method is called internally by
+ :meth:`await_deltas()` or :meth:`load_deltas()`.
+
+ The deltas should be received from the server. Under certain
+ conditions (e.g. when :meth:`DatastoreManager.await()` is
+ called in a background thread) it is possible that the server
+ sends a delta that has already been applied locally. Such
+ deltas are silently ignored.
+
+ Returns
+ A dict mapping table IDs to sets of records,
+ indicating the records that were inserted, updated or deleted
+ by the loaded deltas.
+ """
+ if self._changes:
+ raise DatastoreError('Cannot call apply_deltas() with pending changes')
+ if deltas is None:
+ return {}
+ raw_changed_records = set() # Set of (tid, recordid) tuples.
+ for delta in deltas:
+ rev = delta['rev']
+ changes = delta['changes']
+ if rev < self._rev:
+ continue # We've already seen this revision, or it is ours.
+ if rev != self._rev:
+ # Either the server sent us bad data or our state is mixed up.
+ raise DatastoreError('Revision out of sequence (expected %d, actual %d)' %
+ (self._rev, rev))
+ for c in changes:
+ ch = _Change.from_json(c)
+ tid, recordid = self._apply_change(ch)
+ raw_changed_records.add((tid, recordid))
+ self._rev = rev + 1
+ changed_records = {} # Map of tid to set of Record objects.
+ for tid, recordid in raw_changed_records:
+ record = Record(self._tables[tid], recordid)
+ if tid in changed_records:
+ changed_records[tid].add(record)
+ else:
+ changed_records[tid] = set([record])
+ return changed_records
+
+ def get_table(self, tid):
+ """Get a :class:`Table` object with the given table ID."""
+ t = self._tables.get(tid)
+ if t is None:
+ if not Table.is_valid_id(tid):
+ raise ValueError('Invalid table ID %r' % (tid,))
+ t = Table(self, tid)
+ self._tables[tid] = t
+ return t
+
+ def list_table_ids(self):
+ """List the non-empty tables for this datastore.
+
+ Returns
+ A set of strings table IDs (strings).
+ """
+ tids = set()
+ for tid, table in self._tables.items():
+ if table._records:
+ tids.add(tid)
+ return tids
+
+ def rollback(self):
+ """Discard all pending changes since the last successful :meth:`commit()`."""
+ while self._changes:
+ ch = self._changes.pop()
+ inv = ch.invert()
+ self._apply_change(inv)
+
+ def commit(self):
+ """Attempt to commit all pending changes.
+
+ Pending changes are all mutations to a datastore made through
+ :meth:`Table.insert()`, :meth:`Record.set()` and similar
+ methods (inluding mutating :class:`List` methods).
+
+ To upload pending changes to the server you must use
+ :meth:`commit()`, or :meth:`transaction()`, which calls it.
+
+ This method raises :class:`DatastoreConflictError` when the
+ server detects a conflict and refuses to accept the changes.
+ The proper response to this exception is to call
+ :meth:`rollback()`, then :meth:`load_deltas()`, and then retry
+ the transaction from the top, or give up and report an error
+ to the user. (The :meth:`transaction()` method implements
+ this higher-level control flow.)
+
+ If there are any changes, this method adds a change that
+ updates the datastore's mtime. If there are no changes, this
+ method is a no-op (and no empty delta will be sent to the
+ server).
+ """
+ self._check_edit_permission()
+ if not self._changes:
+ return
+ self._set_mtime()
+ changes = [ch.to_json() for ch in self._changes]
+ nonce = _new_uuid()
+ resp = self._manager._dsops.put_delta(self._handle, self._rev, changes, nonce)
+ self._rev = resp['rev']
+ self._changes = []
+
+ def transaction(self, callback, *args, **kwds):
+ """transaction(callback, *args, max_tries=1)
+
+ Call a callback function and commit changes, with retries.
+
+ When multiple clients try to update a datastore concurrently,
+ it is possible for :meth:`commit()` to raise
+ :class:`DatastoreConflictError`, indicating a conflict. This
+ function handles the details of handling such failures and
+ retrying the updates. You pass it a callback function which
+ will be called repeatedly until :meth:`commit()` succeeds, or
+ the maximum number of tries is reached.
+
+ The keyword-only parameter ``max_tries`` specifies how many
+ times the callback is called before giving up. The default is
+ 1, i.e. call it only once; the recommended value is 4.
+
+ Generally, if you plan to modify a datastore, you should do
+ all your reads and writes in a transaction. On entry, there
+ should be no pending changes.
+
+ Example::
+
+ def do_stuff(record_id):
+ record = tasks_table.get(record_id)
+ user_count = record.get('user_count')
+ record.update(user_count=user_count+1)
+
+ datastore.transaction(do_stuff, some_record_id, max_tries=4)
+
+ Extra positional arguments are passed on to the callback
+ function. On success, the return value of the callback is
+ returned.
+
+ When a commit attempt fails, uncommitted changes are rolled
+ back using :meth:`rollback()`, and new changes are retrieved
+ from the server and loaded into the datastore using
+ :meth:`load_deltas()`. This is done before checking whether
+ we are out of tries.
+
+ When giving up, :meth:`DatastoreError` is raised.
+
+ When any other exception occurs (either in the callback or in
+ the commit), uncommitted changes are rolled back and the last
+ exception is re-raised.
+ """
+ # Hack: max_tries is a keyword-only parameter.
+ max_tries = kwds.pop('max_tries', 1)
+ if kwds:
+ raise TypeError('Unexpected kwargs %r' % (kwds,))
+ if max_tries < 1:
+ raise ValueError('max_tries must be >= 1')
+ # Note that catching BaseException is generally not advised.
+ if self._changes:
+ raise DatastoreError('There should be no pending changes')
+ for _ in range(max_tries):
+ try:
+ rv = callback(*args)
+ except Exception:
+ # The callback failed; give up completely.
+ self.rollback()
+ raise
+ try:
+ self.commit()
+ except DatastoreConflictError:
+ # It's a conflict; update content and maybe try again.
+ self.rollback()
+ # If loading deltas fails, that's too bad.
+ self.load_deltas()
+ except Exception:
+ # Some other error; give up completely.
+ self.rollback()
+ raise
+ else:
+ # Success!
+ return rv
+ # We ran out of tries. But we've loaded new deltas.
+ if max_tries == 1:
+ raise DatastoreError('Failed to commit; set max_tries to a value > 1 to retry')
+ else:
+ raise DatastoreError('Failed to commit %d times in a row' % (max_tries,))
+
+ # NOTE: The asserts below can only fire if the server sends bogus data.
+
+ def _apply_change(self, change):
+ op = change.op
+ tid = change.tid
+ recordid = change.recordid
+ data = change.data
+ table = self.get_table(tid)
+ if op == INSERT:
+ assert recordid not in table._records, repr((tid, recordid))
+ table._update_record_fields(recordid, data, _compute_record_size_for_fields(data))
+ elif op == DELETE:
+ old_fields = table._records.get(recordid)
+ table._update_record_fields(recordid, None,
+ -_compute_record_size_for_fields(old_fields))
+ change.undo = dict(old_fields)
+ elif op == UPDATE:
+ fields = dict(table._records[recordid])
+ undo = {}
+ old_size, new_size = 0, 0
+ for field, val in data.items():
+ old_value = fields.get(field)
+ undo[field] = old_value
+ if old_value is not None:
+ old_size += _compute_field_size(old_value)
+ assert _is_op(val), repr(val)
+ op = val[0]
+ if op == ValuePut:
+ fields[field] = val[1]
+ new_size += _compute_field_size(val[1])
+ elif op == ValueDelete:
+ # Silently ignore deletions for non-existing fields.
+ if field in data:
+ del fields[field]
+ elif _is_listop(val):
+ new_list = self._apply_listop(fields.get(field), val)
+ fields[field] = new_list
+ new_size += _compute_field_size(new_list)
+ else:
+ assert False, repr((field, val)) # pragma: no cover
+ table._update_record_fields(recordid, fields, new_size - old_size)
+ change.undo = undo
+ else:
+ assert False, repr(change) # pragma: no cover
+ return tid, recordid
+
+ def _apply_listop(self, oldval, val):
+ op = val[0]
+ if op == ListCreate:
+ assert oldval is None or oldval == (), repr(oldval)
+ return ()
+ assert isinstance(oldval, tuple), repr(oldval)
+ if op == ListPut:
+ index, newval = val[1:]
+ return oldval[:index] + (newval,) + oldval[index+1:]
+ if op == ListInsert:
+ index, newval = val[1:]
+ return oldval[:index] + (newval,) + oldval[index:]
+ if op == ListDelete:
+ index = val[1]
+ return oldval[:index] + oldval[index+1:]
+ if op == ListMove:
+ return _list_move(oldval, *val[1:])
+ assert False, repr(val) # pragma: no cover
+
+ def close(self):
+ """Close the datastore.
+
+ The datastore should not be used after this call.
+
+ All pending changes are lost.
+ """
+ # Make essential stuff fail.
+ self._manager = None
+ self._changes = None
+
+
+_VALID_ID_RE = r'([a-zA-Z0-9_\-/.+=]{1,64}|:[a-zA-Z0-9_\-/.+=]{1,63})\Z'
+
+
+class Table(object):
+ """An object representing a table in a datastore.
+
+ You need a ``Table`` in order to query or modify the content of the datastore.
+
+ **Do not instantiate this class directly**. Use
+ :meth:`Datastore.get_table()` instead. Calls with the same ID will return
+ the same object.
+ """
+
+ def __init__(self, datastore, tid):
+ self._datastore = datastore
+ self._tid = tid
+ self._records = {} # Map {recordid: fields}
+ self._record_sizes = {} # Map {recordid: int size}
+
+ def __repr__(self):
+ return 'Table(<%s>, %r)' % (self._datastore._id, self._tid)
+
+ @staticmethod
+ def is_valid_id(id):
+ """A helper method to check for a valid table ID.
+
+ Valid table IDs are 1-64 characters long and may contain the
+ following characters: ``a-z A-Z 0-9 _ - / . + =`` . Reserved
+ IDs start with a colon followed by 1-63 characters from that set.
+ """
+ return bool(re.match(_VALID_ID_RE, id))
+
+ def get_id(self):
+ """Return the ID of this table (a string)."""
+ return self._tid
+
+ def get_datastore(self):
+ """Return the :class:`Datastore` to which this table belongs."""
+ return self._datastore
+
+ def get(self, recordid):
+ """Return the record with the given record ID.
+
+ If no such record exists, return None.
+ """
+ if recordid in self._records:
+ return Record(self, recordid)
+ if not Record.is_valid_id(recordid):
+ raise ValueError('Invalid record ID %r' % (recordid,))
+ return None
+
+ def get_or_insert(self, recordid, **fields):
+ """Return the record with the given record ID, or create it.
+
+ If a record with the given record ID already exists, it is
+ returned, and the keyword arguments are ignored. If no such
+ record exists, this inserts a record with the given record ID,
+ setting its fields from the keyword arguments.
+ """
+ rec = self.get(recordid)
+ if rec is not None:
+ return rec
+ return self._insert_with_id(recordid, fields)
+
+ def insert(self, **fields):
+ """Insert a new record into the table and return it.
+
+ The new record's fields are set from the keyword arguments.
+ A unique record ID is assigned automatically.
+ """
+ return self._insert_with_id(_new_uuid(), fields)
+
+ def _insert_with_id(self, recordid, fields):
+ self._datastore._check_edit_permission()
+ value_size = 0
+ for field, value in fields.items():
+ if not Record.is_valid_field(field):
+ raise ValueError('Invalid field name %r' % (field,))
+ if value is None:
+ raise TypeError('Cannot set field %r to None in insert' % (field,))
+ value = _typecheck_value(value, field)
+ value_size += _compute_field_size(value)
+ fields[field] = value
+ self._datastore._add_pending_change(_Change(INSERT, self._tid, recordid, dict(fields)))
+ self._update_record_fields(recordid, fields, Record.BASE_RECORD_SIZE + value_size)
+ return Record(self, recordid)
+
+ def query(self, **kwds):
+ """Query the records in the table.
+
+ If called without arguments, this returns a set of all
+ records in the table.
+
+ If called with keyword arguments, each keyword argument
+ specifies a required value for the corresponding field;
+ only records that have the required field values for all
+ keyword arguments are returned.
+
+ The following example retrieves all records in the 'tasks'
+ table that have a 'done' field whose type is ``bool`` and
+ whose value is ``False``::
+
+ to_do = tasks.query(done=False)
+
+ For the purpose of queries, integers and floats are compared
+ using the standard Python equality comparisons.
+
+ Tip: specifying multiple keyword arguments implements a
+ logical 'AND' operation; to implement a logical 'OR'
+ operation, use the union of multiple queries. For example::
+
+ # Assume priority can be 1 (low), 2 (normal), 3 (high)
+ urgent = tasks.query(done=False, priority=3)
+ normal = tasks.query(done=False, priority=2)
+ to_do = urgent | normal
+ """
+ filter = []
+ for field, value in kwds.items():
+ if not Record.is_valid_field(field):
+ raise ValueError('Invalid field name %r' % (field,))
+ value = _typecheck_value(value, field)
+ filter.append((field, value))
+ results = set()
+ for recordid, fields in self._records.items():
+ for field, value in filter:
+ if field not in fields:
+ break
+ rfv = fields[field]
+ if rfv != value:
+ break
+ # If the values match but the type don't, the filter
+ # fails unless both types are numeric.
+ trfv = type(rfv)
+ tv = type(value)
+ if trfv is not tv and not set((trfv, tv)) <= set((int, long, float)):
+ break
+ else:
+ results.add(Record(self, recordid))
+ return results
+
+ def _update_record_fields(self, recordid, fields, change_in_size):
+ """Update the fields of the record, or delete the record if fields is None.
+
+ This method updates the fields for the recordid and also updates its cached size in bytes
+ and the cached size of the datastore.
+ """
+ curr_size = self._get_record_size(recordid)
+ is_new_record = (curr_size == 0)
+ curr_size += change_in_size
+ assert curr_size >= 0, 'Invalid size %d for table %s, record %s' % (curr_size, self._tid,
+ recordid)
+ assert (self._datastore._size + change_in_size >=
+ Datastore.BASE_DATASTORE_SIZE), 'Invalid datastore size %d' % (self._size,)
+ if curr_size:
+ self._record_sizes[recordid] = curr_size
+ self._records[recordid] = fields
+ if is_new_record:
+ self._datastore._record_count += 1
+ else:
+ del self._record_sizes[recordid]
+ del self._records[recordid]
+ self._datastore._record_count -= 1
+ self._datastore._size += change_in_size
+
+ def _get_record_size(self, recordid):
+ record_size = self._record_sizes.get(recordid)
+ if not record_size:
+ fields = self._records.get(recordid)
+ # The values in this cache are maintained through _update_record_fields. There is no
+ # case in which a record with fields exists without having its size set properly in
+ # the cache.
+ assert fields is None, 'Record %r exists %r but has no cached size' % (recordid,
+ fields)
+ record_size = 0
+ return record_size
+
+
+class Record(object):
+ """An object representing a record in a table in a datastore.
+
+ A record has a record ID and zero or more fields. A record
+ belongs to a specific table. Two records are considered equal
+ when they belong to the same table and have the same record ID;
+ equal records by definition have the same fields. Records are
+ hashable.
+
+ A field value can be an atomic type or a list of atomic types.
+
+ Atomic types are ``bool``, integer (``int`` or ``long``), ``float``, string
+ (``unicode`` or 8-bit ``str``; the latter must be a valid UTF-8 string), or an
+ instance of the special classes :class:`Date` or :class:`Bytes`. Note that ``None`` is
+ not a valid field value.
+
+ **Do not instantiate this class directly**. Use
+ :meth:`Table.get()`, :meth:`Table.insert()`,
+ :meth:`Table.get_or_insert()` or :meth:`Table.query()` instead.
+ """
+
+ RECORD_SIZE_LIMIT = 100 * 1024 #: Record size limit placeholder for sphinx.
+ _RECORD_SIZE_LIMIT__doc__ = """
+ The maximum size in bytes of a record.
+ """
+
+ BASE_RECORD_SIZE = 100 #: Base record size placeholder for sphinx.
+ _BASE_RECORD_SIZE__doc__ = """
+ The size in bytes of a record before accounting for the sizes of its fields.
+
+ The overall size of a record is this value plus the sum of the sizes of its fields.
+ """
+
+ BASE_FIELD_SIZE = 100 #: Base field size placeholder for sphinx.
+ _BASE_FIELD_SIZE__doc__ = """
+ The size in bytes of a field before accounting for the sizes of its values.
+
+ The overall size of a field is this value plus:
+
+ - For string and :class:`Bytes`: the length in bytes of the value.
+ - For :class:`List`: the sum of the size of each list item, where each item's size
+ is computed as the size of the item value plus :const:`List.BASE_ITEM_SIZE`.
+ - For other atomic types: no additional contribution to the size of the field.
+ """
+
+ def __init__(self, table, recordid):
+ self._table = table
+ self._datastore = table._datastore
+ self._recordid = recordid
+
+ def __repr__(self):
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ return 'Record(<%s>, %r, )' % (self._table._tid, self._recordid)
+ else:
+ return 'Record(<%s>, %r, %r)' % (self._table._tid, self._recordid, fields)
+
+ def __eq__(self, other):
+ if not isinstance(other, Record):
+ return NotImplemented
+ return self._table is other._table and self._recordid == other._recordid
+
+ def __ne__(self, other):
+ r = self.__eq__(other)
+ if r is not NotImplemented:
+ r = not r
+ return r
+
+ def __hash__(self):
+ return hash((self._table._tid, self._recordid))
+
+ @staticmethod
+ def is_valid_id(id):
+ """A helper method to check for a valid record ID.
+
+ Valid record IDs are 1-64 characters long and may contain the
+ following characters: ``a-z A-Z 0-9 _ - / . + =`` . Reserved
+ IDs start with a colon followed by 1-63 characters from that set.
+ """
+ return bool(re.match(_VALID_ID_RE, id))
+
+ @staticmethod
+ def is_valid_field(field):
+ """A helper method to check for a valid field name.
+
+ Valid field names are 1-64 characters long and may contain the
+ following characters: ``a-z A-Z 0-9 _ - / . + =`` . Reserved
+ field names start with a colon followed by 1-63 characters
+ from that set.
+ """
+ return bool(re.match(_VALID_ID_RE, field))
+
+ def get_id(self):
+ """Return the ID of this record (a string)."""
+ return self._recordid
+
+ def get_table(self):
+ """Return the :class:`Table` to which this record belongs."""
+ return self._table
+
+ def get_size(self):
+ """Return the size in bytes of this record.
+
+ The overall size of a record is calculated by summing the
+ size of all values in all fields, plus the base size of an empty
+ record itself. A deleted record has a size of zero.
+ """
+ return self._table._get_record_size(self._recordid)
+
+ def get(self, field):
+ """Return the value of a field in the record.
+
+ If the record does not have a field by that name, return ``None``.
+
+ If the field value is a list, this returns a :class:`List` object;
+ mutating that object will modify the field's value in the record.
+ """
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ v = None
+ else:
+ v = fields.get(field)
+ if isinstance(v, tuple):
+ v = List(self, field)
+ # Skip field validation if we actually have a value.
+ if v is None and not Record.is_valid_field(field):
+ raise ValueError('Invalid field name %r' % (field,))
+ return v
+
+ def set(self, field, value):
+ """Set the value of a field in the record.
+
+ Setting the value to ``None`` deletes the field.
+ """
+ self.update(**{field: value})
+
+ def delete(self, field):
+ """Delete the value of a field in the record.
+
+ If the field does not exist this is a no-op.
+ """
+ self.update(**{field: None})
+
+ def get_fields(self):
+ """Return a dict mapping all the fields in the record to their values.
+
+ Modifying the dict will not affect the record in the datastore.
+
+ To enforce this, list values are returned as tuples.
+ """
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ return {}
+ return dict(fields)
+
+ def update(self, **kwds):
+ """Set the value of multiple fields in the record.
+
+ For each keyword argument, the field by that name is set to
+ the corresponding value, except that if the value is ``None``, the
+ field is deleted.
+ """
+ self._datastore._check_edit_permission()
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ raise DatastoreError('Cannot update a deleted record')
+ fields = dict(fields)
+ data = {}
+ undo = {}
+ old_size, new_size = 0, 0
+ for field, value in kwds.items():
+ if not Record.is_valid_field(field):
+ raise ValueError('Invalid field name %r' % (field,))
+ if value is None:
+ old_value = fields.get(field)
+ if old_value:
+ undo[field] = old_value
+ old_size += _compute_field_size(old_value)
+ del fields[field]
+ data[field] = [ValueDelete]
+ else:
+ old_value = fields.get(field)
+ undo[field] = old_value
+ old_size += _compute_field_size(old_value)
+ value = _typecheck_value(value, field)
+ fields[field] = value
+ new_size += _compute_field_size(value)
+ data[field] = [ValuePut, value]
+ if data:
+ change = _Change(UPDATE, self._table._tid, self._recordid, data=data, undo=undo)
+ self._table._datastore._add_pending_change(change)
+ self._table._update_record_fields(self._recordid, fields, new_size - old_size)
+
+ def delete_record(self):
+ """Delete the record from the table.
+
+ If the record is already marked as deleted, this is a no-op.
+
+ A record marked as deleted cannot be re-inserted, cannot be
+ modified, and no longer has any fields. To check for a
+ deleted record, use :meth:`is_deleted()`.
+ """
+ self._datastore._check_edit_permission()
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ return
+ change = _Change(DELETE, self._table._tid, self._recordid, data=None, undo=fields)
+ self._table._datastore._add_pending_change(change)
+ self._table._update_record_fields(self._recordid, None, -self.get_size())
+
+ def get_or_create_list(self, field):
+ """Get a list field, possibly setting it to an empty list.
+
+ If the field exists, it must be a list. If it does not exist,
+ it is set to an empty list. In either case, a :class:`List`
+ object representing the field is returned.
+ """
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ raise DatastoreError('Cannot update a deleted record')
+ v = fields.get(field)
+ if isinstance(v, tuple):
+ return List(self, field)
+ if v is not None:
+ raise TypeError('Field %r already exists but is a %s instead of a list' %
+ (field, type(v).__name__))
+ if not Record.is_valid_field(field):
+ raise ValueError('Invalid field name %r' % (field,))
+ self._datastore._check_edit_permission()
+ # Produce a ListCreate op.
+ data = {field: _make_list_create()}
+ change = _Change(UPDATE, self._table._tid, self._recordid, data=data, undo={field: None})
+ self._table._datastore._add_pending_change(change)
+ fields = dict(fields)
+ fields[field] = ()
+ self._table._update_record_fields(self._recordid, fields, self.BASE_FIELD_SIZE)
+ return List(self, field)
+
+ def has(self, field):
+ """Inquire whether the record has a given field.
+
+ Return ``True`` if the field exists, ``False`` if not.
+ """
+ fields = self._table._records.get(self._recordid)
+ found = fields is not None and field in fields
+ if not found and not Record.is_valid_field(field):
+ raise ValueError('Invalid field name %r' % (field,))
+ return found
+
+ def is_deleted(self):
+ """Inquire whether the record is marked as deleted.
+
+ Return ``True`` if the record has been deleted, ``False`` if not.
+ """
+ return self._recordid not in self._table._records
+
+
+class Date(object):
+ """A simple immutable object representing a timestamp.
+
+ Datastores store timestamps as milliseconds since the Epoch
+ (1/1/1970) in UTC.
+
+ To store a timestamp, you must set a field to a ``Date``
+ object; if a field value is a timestamp, getting the value will
+ return a ``Date``.
+
+ To construct a ``Date``, pass the constructor a POSIX
+ timestamp as returned by ``time.time()`` (and many other standard
+ Python APIs).
+
+ You can convert a ``Date`` back to a POSIX timestamp by
+ calling ``float()`` or ``int()`` on it. These conversions take
+ care of the conversion between seconds and milliseconds;
+ milliseconds map to fractions when converting to/from ``float``,
+ and are truncated when converting to ``int``.
+
+ You can also convert between Date and naive (``tzinfo``-less) ``datetime``
+ objects using a choice of UTC or local time, using
+ :meth:`to_datetime_utc()`, :meth:`from_datetime_utc()`,
+ :meth:`to_datetime_local()`, and :meth:`from_datetime_local()`.
+ Note that ``datetime`` objects using an explicit ``tzinfo`` field are not
+ supported; if you need to work with those you must convert to/from
+ naive ``datetime`` objects yourself.
+ """
+
+ def __init__(self, timestamp=None):
+ """Construct a ``Date`` from a timestamp.
+
+ The timestamp is an integer or float specifying seconds since
+ the epoch. It defaults to the current time.
+ """
+ if timestamp is None:
+ timestamp = time.time()
+ else:
+ if not isinstance(timestamp, (float, int, long)):
+ raise TypeError('Timestamp must be a float or integer, not %s' %
+ type(timestamp).__name__)
+ self._timestamp = int(timestamp*1000.0) / 1000.0
+
+ def __repr__(self):
+ dt = datetime.datetime.utcfromtimestamp(int(self._timestamp))
+ ms = (self._timestamp * 1000) % 1000
+ return 'Date<%s.%03d UTC>' % (str(dt), ms)
+
+ def __float__(self):
+ return self._timestamp
+
+ def __int__(self):
+ return int(self._timestamp)
+
+ def __long__(self):
+ return long(self._timestamp)
+
+ def __eq__(self, other):
+ if not isinstance(other, Date):
+ return NotImplemented
+ return self._timestamp == other._timestamp
+
+ def __ne__(self, other):
+ if not isinstance(other, Date):
+ return NotImplemented
+ return self._timestamp != other._timestamp
+
+ def __lt__(self, other):
+ if not isinstance(other, Date):
+ return NotImplemented
+ return self._timestamp < other._timestamp
+
+ def __le__(self, other):
+ if not isinstance(other, Date):
+ return NotImplemented
+ return self._timestamp <= other._timestamp
+
+ def __gt__(self, other):
+ if not isinstance(other, Date):
+ return NotImplemented
+ return self._timestamp > other._timestamp
+
+ def __ge__(self, other):
+ if not isinstance(other, Date):
+ return NotImplemented
+ return self._timestamp >= other._timestamp
+
+ def to_datetime_utc(self):
+ """Convert a ``Date`` to a ``datetime.datetime`` object in UTC.
+
+ This sets the ``tzinfo`` field to ``None``.
+ """
+ return datetime.datetime.utcfromtimestamp(self._timestamp)
+
+ @classmethod
+ def from_datetime_utc(cls, dt):
+ """Convert a ``datetime.datetime`` object in UTC to a ``Date``.
+
+ The ``tzinfo`` field must be ``None``.
+ """
+ if dt.tzinfo is not None:
+ raise TypeError('The argument datetime must not have a timezone')
+ delta = dt - datetime.datetime.utcfromtimestamp(0)
+ return cls(delta.days * 24*3600 + delta.seconds + delta.microseconds * 0.000001)
+
+ def to_datetime_local(self):
+ """Convert a ``Date`` to a ``datetime.datetime`` object in local time.
+
+ This set the ``tzinfo`` field to ``None``.
+ """
+ return datetime.datetime.fromtimestamp(self._timestamp)
+
+ @classmethod
+ def from_datetime_local(cls, dt):
+ """Convert a ``datetime.datetime`` object in UTC to a ``Date``.
+
+ The ``tzinfo`` field must be ``None``.
+ """
+ if dt.tzinfo is not None:
+ raise TypeError('The argument datetime must not have a timezone')
+ # Keep the fraction separate because timetuple() doesn't store it.
+ fraction = dt.microsecond * 0.000001
+ return cls(time.mktime(dt.timetuple()) + fraction)
+
+ # JSON encoding used by protocol.
+
+ def to_json(self):
+ return {TIMESTAMP: str(int(self._timestamp * 1000))}
+
+ @classmethod
+ def from_json(cls, j):
+ # If this assert fires the server sent us bad data.
+ assert (isinstance(j, dict) and
+ list(j) == [TIMESTAMP] and
+ isinstance(j[TIMESTAMP], basestring)), repr(j)
+ timestamp = int(j[TIMESTAMP]) / 1000.0
+ return cls(timestamp)
+
+
+class Bytes(object):
+ """A simple immutable object representing a binary string.
+
+ Datastores transmit binary strings using a base64 encoding.
+
+ Because Python 2 uses ambiguous representations of binary strings,
+ you must wrap binary strings in this class in order to store them
+ in a datastore. 8-bit strings not wrapped this way are assumed to
+ represent text and must use the UTF-8 encoding.
+
+ To construct a :class:`Bytes`, pass the constructor a ``str``
+ instance, a ``buffer`` instance, or an ``array.array`` instance
+ whose typecode indicate a one-byte-wide data type (i.e. ``'c'``, ``'b'``
+ or ``'B'``).
+
+ To convert a :class:`Bytes` to a raw byte string, call ``bytes()``
+ on it.
+ """
+
+ def __init__(self, blob):
+ """Construct a Bytes from an 8-bit string."""
+ if not (isinstance(blob, (bytes, bytearray, buffer)) or
+ isinstance(blob, array.array) and blob.typecode in ('c', 'b', 'B')):
+ raise TypeError('Bytes must be a bytes-compatible type, not %s' %
+ type(blob).__name__)
+ self._bytes = bytes(blob) # Make a copy in case the argument is mutable.
+
+ def __repr__(self):
+ return 'Bytes(%r)' % self._bytes
+
+ if PY3: # pragma: no cover
+
+ def __bytes__(self):
+ return self._bytes
+
+ def __str__(self):
+ return repr(self)
+
+ else:
+
+ def __str__(self):
+ return self._bytes
+
+ def __unicode__(self):
+ return repr(self)
+
+ def __eq__(self, other):
+ if isinstance(other, bytes):
+ return self._bytes == other
+ if isinstance(other, Bytes):
+ return self._bytes == other._bytes
+ return NotImplemented
+
+ def __ne__(self, other):
+ if isinstance(other, bytes):
+ return self._bytes != other
+ if isinstance(other, Bytes):
+ return self._bytes != other._bytes
+ return NotImplemented
+
+ def __lt__(self, other):
+ if isinstance(other, bytes):
+ return self._bytes < other
+ if isinstance(other, Bytes):
+ return self._bytes < other._bytes
+ return NotImplemented
+
+ def __le__(self, other):
+ if isinstance(other, bytes):
+ return self._bytes <= other
+ if isinstance(other, Bytes):
+ return self._bytes <= other._bytes
+ return NotImplemented
+
+ def __gt__(self, other):
+ if isinstance(other, bytes):
+ return self._bytes > other
+ if isinstance(other, Bytes):
+ return self._bytes > other._bytes
+ return NotImplemented
+
+ def __ge__(self, other):
+ if isinstance(other, bytes):
+ return self._bytes >= other
+ if isinstance(other, Bytes):
+ return self._bytes >= other._bytes
+ return NotImplemented
+
+ def __len__(self):
+ return len(self._bytes)
+
+ # JSON encoding used by protocol.
+
+ def to_json(self):
+ s = _dbase64_encode(self._bytes)
+ return {BLOB: s}
+
+ @classmethod
+ def from_json(cls, j):
+ # If this assert fires the server sent us bad data.
+ assert (isinstance(j, dict) and
+ list(j) == [BLOB] and
+ isinstance(j[BLOB], basestring)), repr(j)
+ b = _dbase64_decode(j[BLOB])
+ return cls(b)
+
+
+class List(collections.MutableSequence):
+ """A wrapper for a list value.
+
+ When a field contains a list value, retrieving the field using
+ :meth:`Record.get()` returns a ``List`` object. This object
+ behaves like a mutable sequence, but mutating it (e.g., replacing
+ an item with a new value) will mutate the list value in the
+ record.
+
+ A ``List`` object knows the record and field to which it
+ refers. Multiple ``List`` objects may refer to the same record and
+ field.
+
+ ``List`` objects are compared by value (i.e., the sequence of
+ items they contain, not the record and field to which they refer).
+ They can also be compared to regular tuples and lists.
+
+ Several methods available for regular lists are available for
+ ``List`` objects, when in doubt, consult the documentation
+ below. Some methods unique to ``List`` objects also exist.
+
+ Negative indices are supported in the usual fashion.
+
+ **Do not instantiate this class directly**. Use
+ :meth:`Record.get()` or :meth:`Record.get_or_create_list()` instead.
+ """
+
+ BASE_ITEM_SIZE = 20 #: Base list item size placeholder for sphinx.
+ _BASE_ITEM_SIZE__doc__ = """
+ The size in bytes of a list item.
+
+ The overall size of a list item is this value plus the size of the item value.
+ """
+
+ def __init__(self, record, field):
+ self._table = record._table
+ self._recordid = record._recordid
+ self._field = field
+ self._check()
+
+ def __repr__(self):
+ return 'List(<%s>, %r)' % (self._recordid, self._field)
+
+ def __eq__(self, other):
+ if not isinstance(other, (List, list, tuple)):
+ return NotImplemented
+ return tuple(self) == _typecheck_list(other, self._field)
+
+ def __ne__(self, other):
+ if not isinstance(other, (List, list, tuple)):
+ return NotImplemented
+ return tuple(self) != _typecheck_list(other, self._field)
+
+ def __lt__(self, other):
+ if not isinstance(other, (List, list, tuple)):
+ return NotImplemented
+ return tuple(self) < _typecheck_list(other, self._field)
+
+ def __le__(self, other):
+ if not isinstance(other, (List, list, tuple)):
+ return NotImplemented
+ return tuple(self) <= _typecheck_list(other, self._field)
+
+ def __gt__(self, other):
+ if not isinstance(other, (List, list, tuple)):
+ return NotImplemented
+ return tuple(self) > _typecheck_list(other, self._field)
+
+ def __ge__(self, other):
+ if not isinstance(other, (List, list, tuple)):
+ return NotImplemented
+ return tuple(self) >= _typecheck_list(other, self._field)
+
+ def get_record(self):
+ """Return the :class:`Record` to which this ``List`` refers."""
+ return self._table.get(self._recordid)
+
+ def get_field(self):
+ """Return the field name (a string) to which this ``List`` refers."""
+ return self._field
+
+ def _check(self):
+ fields = self._table._records.get(self._recordid)
+ if fields is None:
+ raise TypeError('Cannot use a List referring to a deleted record')
+ v = fields.get(self._field)
+ if not isinstance(v, tuple):
+ raise TypeError('Cannot use a List referring to a non-list field')
+ return v
+
+ def __len__(self):
+ v = self._check()
+ return len(v)
+
+ def __iter__(self):
+ v = self._check()
+ return iter(v)
+
+ def __contains__(self, value):
+ v = self._check()
+ return value in v
+
+ def __getitem__(self, index):
+ v = self._check()
+ return v[index]
+
+ def __setitem__(self, index, value):
+ if isinstance(index, slice):
+ raise TypeError('Cannot set List slices')
+ value = _typecheck_atom(value, self.get_field(), True)
+ v = self._check()
+ if index < 0:
+ index += len(v)
+ if not 0 <= index < len(v):
+ raise IndexError
+ v = v[:index] + (value,) + v[index+1:]
+ self._update(v, _make_list_put(index, value))
+
+ def __delitem__(self, index):
+ if isinstance(index, slice):
+ raise TypeError('Cannot delete List slices')
+ v = self._check()
+ if index < 0:
+ index += len(v)
+ if not 0 <= index < len(v):
+ raise IndexError
+ v = v[:index] + v[index+1:]
+ self._update(v, _make_list_delete(index))
+
+ def insert(self, index, value):
+ """Insert a value into the list at a given index."""
+ value = _typecheck_atom(value, self.get_field(), True)
+ v = self._check()
+ n = len(v)
+ if index < 0:
+ index += n
+ if index < 0:
+ index = 0
+ elif index > n:
+ index = n
+ v = v[:index] + (value,) + v[index:]
+ self._update(v, _make_list_insert(index, value))
+
+ def append(self, value):
+ """Append a value to the end of the list."""
+ value = _typecheck_atom(value, self.get_field(), True)
+ v = self._check()
+ index = len(v)
+ v = v + (value,)
+ self._update(v, _make_list_insert(index, value))
+
+ def move(self, index, newindex):
+ """Move the list item at ``index`` to position ``newindex``.
+
+ This is most easily explained as follows: first delete the
+ item at position ``index``; then re-insert it at position
+ ``newindex``.
+ """
+ v = self._check()
+ n = len(v)
+ if index < 0:
+ index += n
+ if not 0 <= index < len(v):
+ raise IndexError
+ if newindex < 0:
+ newindex += n
+ if not 0 <= newindex < len(v):
+ raise IndexError
+ v = _list_move(v, index, newindex)
+ self._update(v, _make_list_move(index, newindex))
+
+ def _update(self, v, op):
+ self._table._datastore._check_edit_permission()
+ table = self._table
+ recordid = self._recordid
+ field = self._field
+ fields = table._records[recordid]
+ old_v = fields.get(field)
+ change = _Change(UPDATE, table._tid, recordid,
+ data={field: op}, undo={field: old_v})
+ table._datastore._add_pending_change(change)
+ fields = dict(fields)
+ fields[field] = v
+ table._update_record_fields(recordid, fields,
+ _compute_value_size(v) - _compute_value_size(old_v))
+
+
+VALID_ATOM_TYPES = frozenset([
+ int,
+ bool,
+ float,
+ str,
+ Date,
+ Bytes,
+ List,
+ ] + ([bytes] if PY3 else [long, unicode]))
+
+
+def _typecheck_value(value, field):
+ if isinstance(value, (List, list, tuple)):
+ return _typecheck_list(value, field)
+ else:
+ return _typecheck_atom(value, field)
+
+
+def _typecheck_list(value, field):
+ return tuple(_typecheck_atom(item, field, is_list=True)
+ for item in value)
+
+
+def _typecheck_atom(value, field, is_list=False):
+ if type(value) not in VALID_ATOM_TYPES:
+ if is_list:
+ format = 'Type %s is not an acceptable list item type (field %r)'
+ else:
+ format = 'Type %s is not an acceptable value type (field %r)'
+ raise TypeError(format % (type(value).__name__, field))
+ if isinstance(value, str) and not PY3:
+ # Convert 8-bit strings to Unicode using UTF-8.
+ # If this raises UnicodeDecodeError your data is not in UTF-8 format.
+ value = value.decode('utf-8')
+ return value
+
+
+def _compute_record_size_for_fields(fields):
+ """Compute the size in bytes of a record containing the given fields."""
+ return Record.BASE_RECORD_SIZE + sum(map(_compute_field_size, fields.itervalues()))
+
+
+def _compute_field_size(value):
+ """Compute the size in bytes of a field with the given value.
+
+ Returns 0 when field is None.
+ """
+ if value is None:
+ return 0
+ return Record.BASE_FIELD_SIZE + _compute_value_size(value)
+
+
+def _compute_value_size(value):
+ """Compute the size in bytes of the value.
+
+ Sizes are computed as follows:
+ String: length of the (utf-8) string.
+ Bytes: length in bytes.
+ List: sum of (:const:`List.LIST_VALUE_SIZE` + atom value) for each value in the list.
+ Others: free
+ """
+ if isinstance(value, (List, list, tuple)):
+ return _compute_list_size(value)
+ else:
+ return _compute_atom_size(value)
+
+
+def _compute_list_size(value):
+ return (len(value) * List.BASE_ITEM_SIZE) + sum(map(_compute_atom_size, value))
+
+
+def _compute_atom_size(value):
+ if value is None:
+ return 0
+ if isinstance(value, (int, long, bool, float, Date)):
+ return 0
+ if PY3: # pragma: no cover
+ if isinstance(value, str):
+ value = value.encode('utf-8')
+ if isinstance(value, bytes):
+ return len(value)
+ else:
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ if isinstance(value, str):
+ return len(value)
+ if isinstance(value, Bytes):
+ return len(value)
+ assert False, 'Type %r is not a valid atom (value: %r)' % (type(value), value)
+
+
+# Change ops.
+INSERT, UPDATE, DELETE = 'I', 'U', 'D'
+
+class _Change(object):
+
+ REVERSED_OPS = {INSERT: DELETE, UPDATE: UPDATE, DELETE: INSERT}
+
+ def __init__(self, op, tid, recordid, data=None, undo=None):
+ assert op in (INSERT, UPDATE, DELETE), repr(op)
+ assert isinstance(tid, basestring), repr(tid)
+ assert isinstance(recordid, basestring), repr(recordid)
+ if data is None:
+ assert op == DELETE, repr(op)
+ else:
+ assert op != DELETE, repr(op)
+ assert isinstance(data, dict), repr(data)
+ if undo is not None:
+ assert op != INSERT, repr(op)
+ assert isinstance(undo, dict), repr(undo)
+ self.op = op
+ self.tid = tid
+ self.recordid = recordid
+ self.data = data
+ self.undo = undo
+
+ def __repr__(self):
+ args = [self.op, self.tid, self.recordid]
+ if self.data is not None or self.undo is not None:
+ args.append(self.data)
+ if self.undo is not None:
+ args.append(self.undo)
+ return '_Change(%s)' % (', '.join(map(repr, args)))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Change):
+ return NotImplemented
+ return (self.op == other.op and
+ self.tid == other.tid and
+ self.recordid == other.recordid and
+ self.data == other.data and
+ self.undo == other.undo)
+
+ def __ne__(self, other):
+ eq = self.__eq__(other)
+ if eq is not NotImplemented:
+ eq = not eq
+ return eq
+
+ def without_undo(self):
+ return _Change(self.op, self.tid, self.recordid, self.data)
+
+ def size(self):
+ change_size = Datastore.BASE_CHANGE_SIZE
+ if self.op == INSERT:
+ change_size += sum((Record.BASE_FIELD_SIZE + _compute_value_size(val))
+ for val in self.data.itervalues())
+ elif self.op == UPDATE:
+ for field_op in self.data.itervalues():
+ change_size += Record.BASE_FIELD_SIZE
+ op_value = _get_op_value(field_op)
+ if op_value is not None:
+ change_size += _compute_value_size(op_value)
+ return change_size
+
+ def invert(self):
+ if self.op == UPDATE:
+ newdata = {}
+ newundo = {}
+ for name, op in self.data.items():
+ assert _is_op(op), repr((name, op))
+ if _is_listop(op):
+ newdata[name], newundo[name] = self._invert_listop(name, op)
+ else:
+ # Before and after are from op's POV.
+ before = self.undo.get(name)
+ opid = op[0]
+ if opid == ValuePut:
+ after = op[1]
+ if before is None:
+ newdata[name] = [ValueDelete]
+ newundo[name] = after
+ else:
+ newdata[name] = [ValuePut, before]
+ newundo[name] = after
+ elif opid == ValueDelete:
+ newdata[name] = [ValuePut, before]
+ newundo[name] = None
+ else:
+ assert False, repr((name, op)) # pragma: no cover
+ return _Change(UPDATE, self.tid, self.recordid, newdata, newundo)
+ else:
+ return _Change(self.REVERSED_OPS[self.op], self.tid, self.recordid,
+ data=self.undo, undo=self.data)
+
+ def _invert_listop(self, name, op):
+ assert _is_listop(op), repr(op)
+ # Before and after are from op's POV.
+ before = self.undo[name]
+ opid = op[0]
+ if opid == ListCreate:
+ after = ()
+ invop = [ValueDelete]
+ return invop, after
+ index = op[1]
+ assert isinstance(before, tuple), repr((name, before))
+ if opid == ListPut:
+ assert 0 <= index < len(before), repr((name, index, len(before)))
+ opvalue = op[2]
+ after = before[:index] + (opvalue,) + before[index+1:]
+ invop = _make_list_put(index, before[index])
+ elif opid == ListInsert:
+ assert 0 <= index <= len(before), repr((name, index, len(before)))
+ opvalue = op[2]
+ after = before[:index] + (opvalue,) + before[index:]
+ invop = _make_list_delete(index)
+ elif opid == ListDelete:
+ assert 0 <= index < len(before), repr((name, index, len(before)))
+ after = before[:index] + before[index+1:]
+ invop = _make_list_insert(index, before[index])
+ elif opid == ListMove:
+ assert 0 <= index < len(before), repr((name, index, len(before)))
+ newindex = op[2]
+ assert 0 <= newindex < len(before), repr((name, index, len(before)))
+ after = _list_move(before, index, newindex)
+ invop = _make_list_move(newindex, index)
+ else:
+ assert False, repr((name, op)) # pragma: no cover
+ return invop, after
+
+ @classmethod
+ def from_json(cls, val):
+ assert isinstance(val, list) and len(val) >= 3, repr(val)
+ op, tid, recordid = val[:3]
+ if op == INSERT:
+ assert len(val) == 4, repr(val)
+ data = dict((field, _value_from_json(v)) for field, v in val[3].items())
+ elif op == UPDATE:
+ assert len(val) == 4, repr(val)
+ data = dict((field, _op_from_json(v)) for field, v in val[3].items())
+ elif op == DELETE:
+ assert len(val) == 3, repr(val)
+ data = None
+ else:
+ assert False, repr(val) # pragma: no cover
+ return cls(op, tid, recordid, data)
+
+ def to_json(self):
+ # We never serialize the undo info.
+ if self.op == INSERT:
+ data = dict(self.data)
+ for k, v in data.items():
+ data[k] = _value_to_json(v)
+ return [self.op, self.tid, self.recordid, data]
+ if self.op == UPDATE:
+ data = {}
+ for k, v in self.data.items():
+ assert _is_op(v), repr(v)
+ data[k] = _op_to_json(v)
+ return [self.op, self.tid, self.recordid, data]
+ if self.op == DELETE:
+ return [DELETE, self.tid, self.recordid]
+ assert False, repr(self) # pragma: no cover
+
+
+# Field ops.
+ValuePut, ValueDelete = VALUE_OPS = 'P', 'D'
+ListCreate, ListPut, ListInsert, ListDelete, ListMove = LIST_OPS = 'LC', 'LP', 'LI', 'LD', 'LM'
+
+# Sets of field ops.
+VALUE_OPS = frozenset(VALUE_OPS)
+LIST_OPS = frozenset(LIST_OPS)
+ALL_OPS = VALUE_OPS | LIST_OPS
+
+# Codes for encoding special values.
+INTEGER = 'I'
+NUMBER = 'N'
+TIMESTAMP = 'T'
+BLOB = 'B'
+
+# Special floating point representations.
+PLUS_INFINITY = {NUMBER: '+inf'}
+MINUS_INFINITY = {NUMBER: '-inf'}
+NOT_A_NUMBER = {NUMBER: 'nan'}
+
+# Special floating point values.
+INF_VALUE = 1e1000
+NINF_VALUE = -INF_VALUE
+NAN_VALUE = INF_VALUE / INF_VALUE
+
+
+def _new_uuid():
+ return base64.urlsafe_b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=')
+
+
+def _value_from_json(v):
+ if isinstance(v, (int, long)) and not isinstance(v, bool):
+ return float(v) # Plain JSON "numbers" are only used to encode floats.
+ if isinstance(v, dict):
+ assert len(v) == 1, repr(v)
+ # This slightly awkward spelling is needed to support Python 2 and 3.
+ key = next(iter(v))
+ val = v[key]
+ if key == INTEGER:
+ return int(val)
+ if key == NUMBER:
+ if v == NOT_A_NUMBER:
+ return NAN_VALUE
+ if v == PLUS_INFINITY:
+ return INF_VALUE
+ if v == MINUS_INFINITY:
+ return NINF_VALUE
+ assert False, repr(v) # pragma: no cover
+ if key == TIMESTAMP:
+ return Date.from_json(v)
+ if key == BLOB:
+ return Bytes.from_json(v)
+ assert False, repr(v) # pragma: no cover
+ return v
+
+
+def _value_to_json(v):
+ if isinstance(v, (int, long)) and not isinstance(v, bool):
+ return {INTEGER: str(v)}
+ if isinstance(v, float):
+ if math.isinf(v):
+ if v > 0:
+ return PLUS_INFINITY
+ else:
+ return MINUS_INFINITY
+ if math.isnan(v):
+ return NOT_A_NUMBER
+ if isinstance(v, (Bytes, Date)):
+ return v.to_json()
+ return v
+
+
+def _op_from_json(val):
+ assert _is_op(val), repr(val)
+ opid = val[0]
+ if opid == ValuePut:
+ return [opid, _value_from_json(val[1])]
+ if opid in (ListPut, ListInsert):
+ return [opid, val[1], _value_from_json(val[2])]
+ return list(val)
+
+
+def _op_to_json(val):
+ assert _is_op(val), repr(val)
+ opid = val[0]
+ if opid == ValuePut:
+ return [opid, _value_to_json(val[1])]
+ if opid in (ListPut, ListInsert):
+ return [opid, val[1], _value_to_json(val[2])]
+ return list(val)
+
+
+def _get_op_value(op):
+ assert _is_op(op), repr(op)
+ opid = op[0]
+ if opid == ValuePut:
+ return op[1]
+ if opid in (ListPut, ListInsert):
+ return op[2]
+ return None
+
+
+def _is_op(val):
+ return isinstance(val, list) and val and val[0] in ALL_OPS
+
+
+def _is_listop(val):
+ return isinstance(val, list) and val and val[0] in LIST_OPS
+
+
+def _list_move(old, index, newindex):
+ if index <= newindex:
+ return (old[:index] + old[index+1:newindex+1] +
+ old[index:index+1] + old[newindex+1:])
+ else:
+ return(old[:newindex] + old[index:index+1] +
+ old[newindex:index] + old[index+1:])
+
+
+def _make_list_create():
+ return [ListCreate]
+
+
+def _make_list_put(index, value):
+ return [ListPut, index, value]
+
+
+def _make_list_insert(index, value):
+ return [ListInsert, index, value]
+
+
+def _make_list_delete(index):
+ return [ListDelete, index]
+
+
+def _make_list_move(index, newindex):
+ return [ListMove, index, newindex]
diff --git a/resources/lib/dropbox/rest.py b/resources/lib/dropbox/rest.py
index b410572..61b8cc6 100644
--- a/resources/lib/dropbox/rest.py
+++ b/resources/lib/dropbox/rest.py
@@ -1,100 +1,128 @@
"""
A simple JSON REST request abstraction layer that is used by the
-dropbox.client and dropbox.session modules. You shouldn't need to use this.
+``dropbox.client`` and ``dropbox.session`` modules. You shouldn't need to use this.
"""
-import httplib
-import os
-import pkg_resources
-import re
+import io
import socket
import ssl
import sys
import urllib
-import urlparse
-from . import util
+import pkg_resources
try:
import json
except ImportError:
import simplejson as json
-SDK_VERSION = "1.5.1"
+try:
+ import requests.packages.urllib3 as urllib3
+except ImportError:
+ raise ImportError('Dropbox python client requires urllib3.')
+
+
+SDK_VERSION = "2.2.0"
TRUSTED_CERT_FILE = pkg_resources.resource_filename(__name__, 'trusted-certs.crt')
-class ProperHTTPSConnection(httplib.HTTPConnection):
+
+class RESTResponse(io.IOBase):
"""
- httplib.HTTPSConnection is broken because it doesn't do server certificate
- validation. This class does certificate validation by ensuring:
- 1. The certificate sent down by the server has a signature chain to one of
- the certs in our 'trusted-certs.crt' (this is mostly handled by the 'ssl'
- module).
- 2. The hostname in the certificate matches the hostname we're connecting to.
+ Responses to requests can come in the form of ``RESTResponse``. These are
+ thin wrappers around the socket file descriptor.
+ :meth:`read()` and :meth:`close()` are implemented.
+ It is important to call :meth:`close()` to return the connection
+ back to the connection pool to be reused. If a connection
+ is not closed by the caller it may leak memory. The object makes a
+ best-effort attempt upon destruction to call :meth:`close()`,
+ but it's still best to explicitly call :meth:`close()`.
"""
- def __init__(self, host, port, trusted_cert_file=TRUSTED_CERT_FILE):
- httplib.HTTPConnection.__init__(self, host, port)
- self.ca_certs = trusted_cert_file
- self.cert_reqs = ssl.CERT_REQUIRED
+ def __init__(self, resp):
+ # arg: A urllib3.HTTPResponse object
+ self.urllib3_response = resp
+ self.status = resp.status
+ self.version = resp.version
+ self.reason = resp.reason
+ self.strict = resp.strict
+ self.is_closed = False
- def connect(self):
- sock = create_connection((self.host, self.port))
- self.sock = ssl.wrap_socket(sock, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs)
- cert = self.sock.getpeercert()
- hostname = self.host.split(':', 0)[0]
- match_hostname(cert, hostname)
+ def __del__(self):
+ # Attempt to close when ref-count goes to zero.
+ self.close()
-class CertificateError(ValueError):
- pass
+ def __exit__(self, typ, value, traceback):
+ # Allow this to be used in "with" blocks.
+ self.close()
-def _dnsname_to_pat(dn):
- pats = []
- for frag in dn.split(r'.'):
- if frag == '*':
- # When '*' is a fragment by itself, it matches a non-empty dotless
- # fragment.
- pats.append('[^.]+')
- else:
- # Otherwise, '*' matches any dotless fragment.
- frag = re.escape(frag)
- pats.append(frag.replace(r'\*', '[^.]*'))
- return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ # -----------------
+ # Important methods
+ # -----------------
+ def read(self, amt=None):
+ """
+ Read data off the underlying socket.
-# This was ripped from Python 3.2 so it's not tested
-def match_hostname(cert, hostname):
- """Verify that *cert* (in decoded format as returned by
- SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
- are mostly followed, but IP addresses are not accepted for *hostname*.
+ Parameters
+ amt
+ Amount of data to read. Defaults to ``None``, indicating to read
+ everything.
- CertificateError is raised on failure. On success, the function
- returns nothing.
- """
- if not cert:
- raise ValueError("empty or no certificate")
- dnsnames = []
- san = cert.get('subjectAltName', ())
- for key, value in san:
- if key == 'DNS':
- if _dnsname_to_pat(value).match(hostname):
- return
- dnsnames.append(value)
- if not san:
- # The subject is only checked when subjectAltName is empty
- for sub in cert.get('subject', ()):
- for key, value in sub:
- # XXX according to RFC 2818, the most specific Common Name
- # must be used.
- if key == 'commonName':
- if _dnsname_to_pat(value).match(hostname):
- return
- dnsnames.append(value)
- if len(dnsnames) > 1:
- raise CertificateError("hostname %r doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames))))
- elif len(dnsnames) == 1:
- raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
- else:
- raise CertificateError("no appropriate commonName or subjectAltName fields were found")
+ Returns
+ Data off the socket. If ``amt`` is not ``None``, at most ``amt`` bytes are returned.
+ An empty string when the socket has no data.
+
+ Raises
+ ``ValueError``
+ If the ``RESTResponse`` has already been closed.
+ """
+ if self.is_closed:
+ raise ValueError('Response already closed')
+ return self.urllib3_response.read(amt)
+
+ BLOCKSIZE = 4 * 1024 * 1024 # 4MB at a time just because
+
+ def close(self):
+ """Closes the underlying socket."""
+
+ # Double closing is harmless
+ if self.is_closed:
+ return
+
+ # Read any remaining crap off the socket before releasing the
+ # connection. Buffer it just in case it's huge
+ while self.read(RESTResponse.BLOCKSIZE):
+ pass
+
+ # Mark as closed and release the connection (exactly once)
+ self.is_closed = True
+ self.urllib3_response.release_conn()
+
+ @property
+ def closed(self):
+ return self.is_closed
+
+
+ # ---------------------------------
+ # Backwards compat for HTTPResponse
+ # ---------------------------------
+ def getheaders(self):
+ """Returns a dictionary of the response headers."""
+ return self.urllib3_response.getheaders()
+
+ def getheader(self, name, default=None):
+ """Returns a given response header."""
+ return self.urllib3_response.getheader(name, default)
+
+ # Some compat functions showed up recently in urllib3
+ try:
+ urllib3.HTTPResponse.flush
+ urllib3.HTTPResponse.fileno
+ def fileno(self):
+ return self.urllib3_response.fileno()
+ def flush(self):
+ return self.urllib3_response.flush()
+ except AttributeError:
+ pass
def create_connection(address):
host, port = address
@@ -107,8 +135,8 @@ def create_connection(address):
sock.connect(sa)
return sock
- except socket.error, _:
- err = _
+ except socket.error as e:
+ err = e
if sock is not None:
sock.close()
@@ -122,11 +150,41 @@ def json_loadb(data):
data = data.decode('utf8')
return json.loads(data)
+
class RESTClientObject(object):
- def __init__(self, http_connect=None):
- self.http_connect = http_connect
+ def __init__(self, max_reusable_connections=8, mock_urlopen=None):
+ """
+ Parameters
+ max_reusable_connections
+ max connections to keep alive in the pool
+ mock_urlopen
+ an optional alternate urlopen function for testing
+
+ This class uses ``urllib3`` to maintain a pool of connections. We attempt
+ to grab an existing idle connection from the pool, otherwise we spin
+ up a new connection. Once a connection is closed, it is reinserted
+ into the pool (unless the pool is full).
+
+ SSL settings:
+ - Certificates validated using Dropbox-approved trusted root certs
+ - TLS v1.0 (newer TLS versions are not supported by urllib3)
+ - Default ciphersuites. Choosing ciphersuites is not supported by urllib3
+ - Hostname verification is provided by urllib3
+ """
+ self.mock_urlopen = mock_urlopen
+ self.pool_manager = urllib3.PoolManager(
+ num_pools=4, # only a handful of hosts. api.dropbox.com, api-content.dropbox.com
+ maxsize=max_reusable_connections,
+ block=False,
+ timeout=60.0, # long enough so datastores await doesn't get interrupted
+ cert_reqs=ssl.CERT_REQUIRED,
+ ca_certs=TRUSTED_CERT_FILE,
+ ssl_version=ssl.PROTOCOL_TLSv1,
+ )
def request(self, method, url, post_params=None, body=None, headers=None, raw_response=False):
+ """Performs a REST request. See :meth:`RESTClient.request()` for detailed description."""
+
post_params = post_params or {}
headers = headers or {}
headers['User-Agent'] = 'OfficialDropboxPythonSDK/' + SDK_VERSION
@@ -134,63 +192,52 @@ class RESTClientObject(object):
if post_params:
if body:
raise ValueError("body parameter cannot be used with post_params parameter")
- body = urllib.urlencode(post_params)
+ body = params_to_urlencoded(post_params)
headers["Content-type"] = "application/x-www-form-urlencoded"
- # maintain dynamic lookup of ProperHTTPConnection
- http_connect = self.http_connect
- if http_connect is None:
- http_connect = ProperHTTPSConnection
+ # Handle StringIO instances, because urllib3 doesn't.
+ if hasattr(body, 'getvalue'):
+ body = str(body.getvalue())
+ headers["Content-Length"] = len(body)
- host = urlparse.urlparse(url).hostname
- conn = http_connect(host, 443)
+ # Reject any headers containing newlines; the error from the server isn't pretty.
+ for key, value in headers.items():
+ if isinstance(value, basestring) and '\n' in value:
+ raise ValueError("headers should not contain newlines (%s: %s)" %
+ (key, value))
try:
- # This code is here because httplib in pre-2.6 Pythons
- # doesn't handle file-like objects as HTTP bodies and
- # thus requires manual buffering
- if not hasattr(body, 'read'):
- conn.request(method, url, body, headers)
- else:
- # Content-Length should be set to prevent upload truncation errors.
- clen, raw_data = util.analyze_file_obj(body)
- headers["Content-Length"] = str(clen)
- conn.request(method, url, "", headers)
- if raw_data is not None:
- conn.send(raw_data)
- else:
- BLOCKSIZE = 4 * 1024 * 1024 # 4MB buffering just because
- bytes_read = 0
- while True:
- data = body.read(BLOCKSIZE)
- if not data:
- break
- # Catch Content-Length overflow before the HTTP server does
- bytes_read += len(data)
- if bytes_read > clen:
- raise util.AnalyzeFileObjBug(clen, bytes_read)
- conn.send(data)
- if bytes_read != clen:
- raise util.AnalyzeFileObjBug(clen, bytes_read)
+ # Grab a connection from the pool to make the request.
+ # We return it to the pool when caller close() the response
+ urlopen = self.mock_urlopen if self.mock_urlopen else self.pool_manager.urlopen
+ r = urlopen(
+ method=method,
+ url=url,
+ body=body,
+ headers=headers,
+ preload_content=False
+ )
+ r = RESTResponse(r) # wrap up the urllib3 response before proceeding
+ except socket.error as e:
+ raise RESTSocketError(url, e)
+ except urllib3.exceptions.SSLError as e:
+ raise RESTSocketError(url, "SSL certificate error: %s" % e)
- except socket.error, e:
- raise RESTSocketError(host, e)
- except CertificateError, e:
- raise RESTSocketError(host, "SSL certificate error: " + e)
+ if r.status not in (200, 206):
+ raise ErrorResponse(r, r.read())
- r = conn.getresponse()
- if r.status != 200:
- raise ErrorResponse(r)
+ return self.process_response(r, raw_response)
+ def process_response(self, r, raw_response):
if raw_response:
return r
else:
+ s = r.read()
try:
- resp = json_loadb(r.read())
+ resp = json_loadb(s)
except ValueError:
- raise ErrorResponse(r)
- finally:
- conn.close()
+ raise ErrorResponse(r, s)
+ r.close()
return resp
@@ -210,88 +257,125 @@ class RESTClientObject(object):
assert type(raw_response) == bool
return self.request("PUT", url, body=body, headers=headers, raw_response=raw_response)
-class RESTClient(object):
- IMPL = RESTClientObject()
+class RESTClient(object):
"""
- An class with all static methods to perform JSON REST requests that is used internally
+ A class with all static methods to perform JSON REST requests that is used internally
by the Dropbox Client API. It provides just enough gear to make requests
and get responses as JSON data (when applicable). All requests happen over SSL.
"""
+ IMPL = RESTClientObject()
+
@classmethod
def request(cls, *n, **kw):
"""Perform a REST request and parse the response.
- Args:
- - ``method``: An HTTP method (e.g. 'GET' or 'POST').
- - ``url``: The URL to make a request to.
- - ``post_params``: A dictionary of parameters to put in the body of the request.
+ Parameters
+ method
+ An HTTP method (e.g. ``'GET'`` or ``'POST'``).
+ url
+ The URL to make a request to.
+ post_params
+ A dictionary of parameters to put in the body of the request.
This option may not be used if the body parameter is given.
- - ``body``: The body of the request. Typically, this value will be a string.
- It may also be a file-like object in Python 2.6 and above. The body
+ body
+ The body of the request. Typically, this value will be a string.
+ It may also be a file-like object. The body
parameter may not be used with the post_params parameter.
- - ``headers``: A dictionary of headers to send with the request.
- - ``raw_response``: Whether to return the raw httplib.HTTPReponse object. [default False]
+ headers
+ A dictionary of headers to send with the request.
+ raw_response
+ Whether to return a :class:`RESTResponse` object. Default ``False``.
It's best enabled for requests that return large amounts of data that you
- would want to .read() incrementally rather than loading into memory. Also
+ would want to ``.read()`` incrementally rather than loading into memory. Also
use this for calls where you need to read metadata like status or headers,
or if the body is not JSON.
- Returns:
- - The JSON-decoded data from the server, unless raw_response is
- specified, in which case an httplib.HTTPReponse object is returned instead.
+ Returns
+ The JSON-decoded data from the server, unless ``raw_response`` is
+ set, in which case a :class:`RESTResponse` object is returned instead.
- Raises:
- - dropbox.rest.ErrorResponse: The returned HTTP status is not 200, or the body was
+ Raises
+ :class:`ErrorResponse`
+ The returned HTTP status is not 200, or the body was
not parsed from JSON successfully.
- - dropbox.rest.RESTSocketError: A socket.error was raised while contacting Dropbox.
+ :class:`RESTSocketError`
+ A ``socket.error`` was raised while contacting Dropbox.
"""
return cls.IMPL.request(*n, **kw)
@classmethod
def GET(cls, *n, **kw):
- """Perform a GET request using RESTClient.request"""
+ """Perform a GET request using :meth:`RESTClient.request()`."""
return cls.IMPL.GET(*n, **kw)
@classmethod
def POST(cls, *n, **kw):
- """Perform a POST request using RESTClient.request"""
+ """Perform a POST request using :meth:`RESTClient.request()`."""
return cls.IMPL.POST(*n, **kw)
@classmethod
def PUT(cls, *n, **kw):
- """Perform a PUT request using RESTClient.request"""
+ """Perform a PUT request using :meth:`RESTClient.request()`."""
return cls.IMPL.PUT(*n, **kw)
+
class RESTSocketError(socket.error):
- """
- A light wrapper for socket.errors raised by dropbox.rest.RESTClient.request
- that adds more information to the socket.error.
- """
+ """A light wrapper for ``socket.error`` that adds some more information."""
def __init__(self, host, e):
msg = "Error connecting to \"%s\": %s" % (host, str(e))
socket.error.__init__(self, msg)
+
+# Dummy class for docstrings, see doco.py.
+class _ErrorResponse__doc__(Exception):
+ """Exception raised when :class:`DropboxClient` exeriences a problem.
+
+ For example, this is raised when the server returns an unexpected
+ non-200 HTTP response.
+ """
+
+ _status__doc__ = "HTTP response status (an int)."
+ _reason__doc__ = "HTTP response reason (a string)."
+ _headers__doc__ = "HTTP response headers (a list of (header, value) tuples)."
+ _body__doc__ = "HTTP response body (string or JSON dict)."
+ _error_msg__doc__ = "Error message for developer (optional)."
+ _user_error_msg__doc__ = "Error message for end user (optional)."
+
+
class ErrorResponse(Exception):
"""
- Raised by dropbox.rest.RESTClient.request for requests that:
- - Return a non-200 HTTP response, or
- - Have a non-JSON response body, or
- - Have a malformed/missing header in the response.
+ Raised by :meth:`RESTClient.request()` for requests that:
- Most errors that Dropbox returns will have a error field that is unpacked and
+ - Return a non-200 HTTP response, or
+ - Have a non-JSON response body, or
+ - Have a malformed/missing header in the response.
+
+ Most errors that Dropbox returns will have an error field that is unpacked and
placed on the ErrorResponse exception. In some situations, a user_error field
will also come back. Messages under user_error are worth showing to an end-user
of your app, while other errors are likely only useful for you as the developer.
"""
- def __init__(self, http_resp):
+ def __init__(self, http_resp, body):
+ """
+ Parameters
+ http_resp
+ The :class:`RESTResponse` which errored
+ body
+ Body of the :class:`RESTResponse`.
+ The reason we can't simply call ``http_resp.read()`` to
+ get the body, is that ``read()`` is not idempotent.
+ Since it can't be called more than once,
+ we have to pass the string body in separately
+ """
self.status = http_resp.status
self.reason = http_resp.reason
- self.body = http_resp.read()
+ self.body = body
self.headers = http_resp.getheaders()
+ http_resp.close() # won't need this connection anymore
try:
self.body = json_loadb(self.body)
@@ -304,14 +388,29 @@ class ErrorResponse(Exception):
def __str__(self):
if self.user_error_msg and self.user_error_msg != self.error_msg:
# one is translated and the other is English
- msg = "%s (%s)" % (self.user_error_msg, self.error_msg)
+ msg = "%r (%r)" % (self.user_error_msg, self.error_msg)
elif self.error_msg:
- msg = self.error_msg
+ msg = repr(self.error_msg)
elif not self.body:
- msg = self.reason
+ msg = repr(self.reason)
else:
msg = "Error parsing response body or headers: " +\
- "Body - %s Headers - %s" % (self.body, self.headers)
+ "Body - %.100r Headers - %r" % (self.body, self.headers)
- return "[%d] %s" % (self.status, repr(msg))
+ return "[%d] %s" % (self.status, msg)
+
+def params_to_urlencoded(params):
+ """
+ Returns a application/x-www-form-urlencoded 'str' representing the key/value pairs in 'params'.
+
+ Keys are values are str()'d before calling urllib.urlencode, with the exception of unicode
+ objects which are utf8-encoded.
+ """
+ def encode(o):
+ if isinstance(o, unicode):
+ return o.encode('utf8')
+ else:
+ return str(o)
+ utf8_params = {encode(k): encode(v) for k, v in params.iteritems()}
+ return urllib.urlencode(utf8_params)
diff --git a/resources/lib/dropbox/session.py b/resources/lib/dropbox/session.py
index a087a7f..d090b9b 100644
--- a/resources/lib/dropbox/session.py
+++ b/resources/lib/dropbox/session.py
@@ -1,11 +1,11 @@
"""
-dropbox.session.DropboxSession is responsible for holding OAuth authentication info
-(app key/secret, request key/secret, access key/secret) as well as configuration information for your app
-('app_folder' or 'dropbox' access type, optional locale preference). It knows how to
+dropbox.session.DropboxSession is responsible for holding OAuth authentication
+info (app key/secret, request key/secret, access key/secret). It knows how to
use all of this information to craft properly constructed requests to Dropbox.
A DropboxSession object must be passed to a dropbox.client.DropboxClient object upon
initialization.
+
"""
from __future__ import absolute_import
@@ -23,28 +23,33 @@ except ImportError:
from . import rest
class OAuthToken(object):
- __slots__ = ('key', 'secret')
+ """
+ A class representing an OAuth token. Contains two fields: ``key`` and
+ ``secret``.
+ """
def __init__(self, key, secret):
self.key = key
self.secret = secret
-class DropboxSession(object):
+class BaseSession(object):
API_VERSION = 1
API_HOST = "api.dropbox.com"
WEB_HOST = "www.dropbox.com"
API_CONTENT_HOST = "api-content.dropbox.com"
+ API_NOTIFICATION_HOST = "api-notify.dropbox.com"
- def __init__(self, consumer_key, consumer_secret, access_type, locale=None, rest_client=rest.RESTClient):
+ def __init__(self, consumer_key, consumer_secret, access_type="auto", locale=None, rest_client=rest.RESTClient):
"""Initialize a DropboxSession object.
Your consumer key and secret are available
at https://www.dropbox.com/developers/apps
Args:
- - ``access_type``: Either 'dropbox' or 'app_folder'. All path-based operations
- will occur relative to either the user's Dropbox root directory
- or your application's app folder.
+
+ - ``access_type``: Either 'auto' (the default), 'dropbox', or
+ 'app_folder'. You probably don't need to specify this and should
+ just use the default.
- ``locale``: A locale string ('en', 'pt_PT', etc.) [optional]
The locale setting will be used to translate any user-facing error
messages that the server generates. At this time Dropbox supports
@@ -52,12 +57,13 @@ class DropboxSession(object):
languages in the future. If you send a language the server doesn't
support, messages will remain in English. Look for these translated
messages in rest.ErrorResponse exceptions as e.user_error_msg.
+
"""
- assert access_type in ['dropbox', 'app_folder'], "expected access_type of 'dropbox' or 'app_folder'"
+ assert access_type in ['dropbox', 'app_folder', 'auto'], "expected access_type of 'dropbox' or 'app_folder'"
self.consumer_creds = OAuthToken(consumer_key, consumer_secret)
self.token = None
self.request_token = None
- self.root = 'sandbox' if access_type == 'app_folder' else 'dropbox'
+ self.root = 'sandbox' if access_type == 'app_folder' else access_type
self.locale = locale
self.rest_client = rest_client
@@ -69,22 +75,6 @@ class DropboxSession(object):
"""Remove any attached access token from the DropboxSession."""
self.token = None
- def set_token(self, access_token, access_token_secret):
- """Attach an access token to the DropboxSession.
-
- Note that the access 'token' is made up of both a token string
- and a secret string.
- """
- self.token = OAuthToken(access_token, access_token_secret)
-
- def set_request_token(self, request_token, request_token_secret):
- """Attach an request token to the DropboxSession.
-
- Note that the request 'token' is made up of both a token string
- and a secret string.
- """
- self.request_token = OAuthToken(request_token, request_token_secret)
-
def build_path(self, target, params=None):
"""Build the path component for an API URL.
@@ -111,9 +101,9 @@ class DropboxSession(object):
params['locale'] = self.locale
if params:
- return "/%d%s?%s" % (self.API_VERSION, target_path, urllib.urlencode(params))
+ return "/%s%s?%s" % (self.API_VERSION, target_path, urllib.urlencode(params))
else:
- return "/%d%s" % (self.API_VERSION, target_path)
+ return "/%s%s" % (self.API_VERSION, target_path)
def build_url(self, host, target, params=None):
"""Build an API URL.
@@ -130,6 +120,24 @@ class DropboxSession(object):
"""
return "https://%s%s" % (host, self.build_path(target, params))
+class DropboxSession(BaseSession):
+
+ def set_token(self, access_token, access_token_secret):
+ """Attach an access token to the DropboxSession.
+
+ Note that the access 'token' is made up of both a token string
+ and a secret string.
+ """
+ self.token = OAuthToken(access_token, access_token_secret)
+
+ def set_request_token(self, request_token, request_token_secret):
+ """Attach an request token to the DropboxSession.
+
+ Note that the request 'token' is made up of both a token string
+ and a secret string.
+ """
+ self.request_token = OAuthToken(request_token, request_token_secret)
+
def build_authorize_url(self, request_token, oauth_callback=None):
"""Build a request token authorization URL.
@@ -166,8 +174,9 @@ class DropboxSession(object):
can store the access token for that user for later operations.
Returns:
- - An dropbox.session.OAuthToken representing the request token Dropbox assigned
- to this app. Also attaches the request token as self.request_token.
+ - An :py:class:`OAuthToken` object representing the
+ request token Dropbox assigned to this app. Also attaches the
+ request token as self.request_token.
"""
self.token = None # clear any token currently on the request
url = self.build_url(self.API_HOST, '/oauth/request_token')
@@ -194,8 +203,9 @@ class DropboxSession(object):
DropboxSession instance.
Returns:
- - An tuple of (key, secret) representing the access token Dropbox assigned
- to this app and user. Also attaches the access token as self.token.
+ - An :py:class:`OAuthToken` object with fields ``key`` and ``secret``
+ representing the access token Dropbox assigned to this app and
+ user. Also attaches the access token as self.token.
"""
request_token = request_token or self.request_token
assert request_token, "No request_token available on the session. Please pass one."
@@ -284,3 +294,15 @@ class DropboxSession(object):
"OAuth request.")
return OAuthToken(key, secret)
+
+# Don't use this class directly.
+class DropboxOAuth2Session(BaseSession):
+
+ def __init__(self, oauth2_access_token, locale, rest_client=rest.RESTClient):
+ super(DropboxOAuth2Session, self).__init__("", "", "auto", locale=locale, rest_client=rest_client)
+ self.access_token = oauth2_access_token
+
+ def build_access_headers(self, method, resource_url, params=None, token=None):
+ assert token is None
+ headers = {"Authorization": "Bearer " + self.access_token}
+ return headers, params
diff --git a/resources/lib/dropbox/trusted-certs.crt b/resources/lib/dropbox/trusted-certs.crt
index 7d36ac7..00c2f2e 100644
--- a/resources/lib/dropbox/trusted-certs.crt
+++ b/resources/lib/dropbox/trusted-certs.crt
@@ -1,166 +1,918 @@
-# Subject: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Server CA/emailAddress=server-certs@thawte.com
-# Issuer: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Server CA/emailAddress= server-certs@thawte.com
+# DigiCert Assured ID Root CA.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 0c:e7:e0:e5:17:d8:46:fe:8f:e5:60:fc:1b:f0:30:39
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert Assured ID Root CA
+# Validity
+# Not Before: Nov 10 00:00:00 2006 GMT
+# Not After : Nov 10 00:00:00 2031 GMT
+# Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert Assured ID Root CA
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:ad:0e:15:ce:e4:43:80:5c:b1:87:f3:b7:60:f9:
+# 71:12:a5:ae:dc:26:94:88:aa:f4:ce:f5:20:39:28:
+# 58:60:0c:f8:80:da:a9:15:95:32:61:3c:b5:b1:28:
+# 84:8a:8a:dc:9f:0a:0c:83:17:7a:8f:90:ac:8a:e7:
+# 79:53:5c:31:84:2a:f6:0f:98:32:36:76:cc:de:dd:
+# 3c:a8:a2:ef:6a:fb:21:f2:52:61:df:9f:20:d7:1f:
+# e2:b1:d9:fe:18:64:d2:12:5b:5f:f9:58:18:35:bc:
+# 47:cd:a1:36:f9:6b:7f:d4:b0:38:3e:c1:1b:c3:8c:
+# 33:d9:d8:2f:18:fe:28:0f:b3:a7:83:d6:c3:6e:44:
+# c0:61:35:96:16:fe:59:9c:8b:76:6d:d7:f1:a2:4b:
+# 0d:2b:ff:0b:72:da:9e:60:d0:8e:90:35:c6:78:55:
+# 87:20:a1:cf:e5:6d:0a:c8:49:7c:31:98:33:6c:22:
+# e9:87:d0:32:5a:a2:ba:13:82:11:ed:39:17:9d:99:
+# 3a:72:a1:e6:fa:a4:d9:d5:17:31:75:ae:85:7d:22:
+# ae:3f:01:46:86:f6:28:79:c8:b1:da:e4:57:17:c4:
+# 7e:1c:0e:b0:b4:92:a6:56:b3:bd:b2:97:ed:aa:a7:
+# f0:b7:c5:a8:3f:95:16:d0:ff:a1:96:eb:08:5f:18:
+# 77:4f
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Digital Signature, Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# 45:EB:A2:AF:F4:92:CB:82:31:2D:51:8B:A7:A7:21:9D:F3:6D:C8:0F
+# X509v3 Authority Key Identifier:
+# keyid:45:EB:A2:AF:F4:92:CB:82:31:2D:51:8B:A7:A7:21:9D:F3:6D:C8:0F
+#
+# Signature Algorithm: sha1WithRSAEncryption
+# a2:0e:bc:df:e2:ed:f0:e3:72:73:7a:64:94:bf:f7:72:66:d8:
+# 32:e4:42:75:62:ae:87:eb:f2:d5:d9:de:56:b3:9f:cc:ce:14:
+# 28:b9:0d:97:60:5c:12:4c:58:e4:d3:3d:83:49:45:58:97:35:
+# 69:1a:a8:47:ea:56:c6:79:ab:12:d8:67:81:84:df:7f:09:3c:
+# 94:e6:b8:26:2c:20:bd:3d:b3:28:89:f7:5f:ff:22:e2:97:84:
+# 1f:e9:65:ef:87:e0:df:c1:67:49:b3:5d:eb:b2:09:2a:eb:26:
+# ed:78:be:7d:3f:2b:f3:b7:26:35:6d:5f:89:01:b6:49:5b:9f:
+# 01:05:9b:ab:3d:25:c1:cc:b6:7f:c2:f1:6f:86:c6:fa:64:68:
+# eb:81:2d:94:eb:42:b7:fa:8c:1e:dd:62:f1:be:50:67:b7:6c:
+# bd:f3:f1:1f:6b:0c:36:07:16:7f:37:7c:a9:5b:6d:7a:f1:12:
+# 46:60:83:d7:27:04:be:4b:ce:97:be:c3:67:2a:68:11:df:80:
+# e7:0c:33:66:bf:13:0d:14:6e:f3:7f:1f:63:10:1e:fa:8d:1b:
+# 25:6d:6c:8f:a5:b7:61:01:b1:d2:a3:26:a1:10:71:9d:ad:e2:
+# c3:f9:c3:99:51:b7:2b:07:08:ce:2e:e6:50:b2:a7:fa:0a:45:
+# 2f:a2:f0:f2
-----BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
-----END CERTIFICATE-----
-# Subject: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
-# Issuer: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
+# DigiCert Global Root CA.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 08:3b:e0:56:90:42:46:b1:a1:75:6a:c9:59:91:c7:4a
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert Global Root CA
+# Validity
+# Not Before: Nov 10 00:00:00 2006 GMT
+# Not After : Nov 10 00:00:00 2031 GMT
+# Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert Global Root CA
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:e2:3b:e1:11:72:de:a8:a4:d3:a3:57:aa:50:a2:
+# 8f:0b:77:90:c9:a2:a5:ee:12:ce:96:5b:01:09:20:
+# cc:01:93:a7:4e:30:b7:53:f7:43:c4:69:00:57:9d:
+# e2:8d:22:dd:87:06:40:00:81:09:ce:ce:1b:83:bf:
+# df:cd:3b:71:46:e2:d6:66:c7:05:b3:76:27:16:8f:
+# 7b:9e:1e:95:7d:ee:b7:48:a3:08:da:d6:af:7a:0c:
+# 39:06:65:7f:4a:5d:1f:bc:17:f8:ab:be:ee:28:d7:
+# 74:7f:7a:78:99:59:85:68:6e:5c:23:32:4b:bf:4e:
+# c0:e8:5a:6d:e3:70:bf:77:10:bf:fc:01:f6:85:d9:
+# a8:44:10:58:32:a9:75:18:d5:d1:a2:be:47:e2:27:
+# 6a:f4:9a:33:f8:49:08:60:8b:d4:5f:b4:3a:84:bf:
+# a1:aa:4a:4c:7d:3e:cf:4f:5f:6c:76:5e:a0:4b:37:
+# 91:9e:dc:22:e6:6d:ce:14:1a:8e:6a:cb:fe:cd:b3:
+# 14:64:17:c7:5b:29:9e:32:bf:f2:ee:fa:d3:0b:42:
+# d4:ab:b7:41:32:da:0c:d4:ef:f8:81:d5:bb:8d:58:
+# 3f:b5:1b:e8:49:28:a2:70:da:31:04:dd:f7:b2:16:
+# f2:4c:0a:4e:07:a8:ed:4a:3d:5e:b5:7f:a3:90:c3:
+# af:27
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Digital Signature, Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# 03:DE:50:35:56:D1:4C:BB:66:F0:A3:E2:1B:1B:C3:97:B2:3D:D1:55
+# X509v3 Authority Key Identifier:
+# keyid:03:DE:50:35:56:D1:4C:BB:66:F0:A3:E2:1B:1B:C3:97:B2:3D:D1:55
+#
+# Signature Algorithm: sha1WithRSAEncryption
+# cb:9c:37:aa:48:13:12:0a:fa:dd:44:9c:4f:52:b0:f4:df:ae:
+# 04:f5:79:79:08:a3:24:18:fc:4b:2b:84:c0:2d:b9:d5:c7:fe:
+# f4:c1:1f:58:cb:b8:6d:9c:7a:74:e7:98:29:ab:11:b5:e3:70:
+# a0:a1:cd:4c:88:99:93:8c:91:70:e2:ab:0f:1c:be:93:a9:ff:
+# 63:d5:e4:07:60:d3:a3:bf:9d:5b:09:f1:d5:8e:e3:53:f4:8e:
+# 63:fa:3f:a7:db:b4:66:df:62:66:d6:d1:6e:41:8d:f2:2d:b5:
+# ea:77:4a:9f:9d:58:e2:2b:59:c0:40:23:ed:2d:28:82:45:3e:
+# 79:54:92:26:98:e0:80:48:a8:37:ef:f0:d6:79:60:16:de:ac:
+# e8:0e:cd:6e:ac:44:17:38:2f:49:da:e1:45:3e:2a:b9:36:53:
+# cf:3a:50:06:f7:2e:e8:c4:57:49:6c:61:21:18:d5:04:ad:78:
+# 3c:2c:3a:80:6b:a7:eb:af:15:14:e9:d8:89:c1:b9:38:6c:e2:
+# 91:6c:8a:ff:64:b9:77:25:57:30:c0:1b:24:a3:e1:dc:e9:df:
+# 47:7c:b5:b4:24:08:05:30:ec:2d:bd:0b:bf:45:bf:50:b9:a9:
+# f3:eb:98:01:12:ad:c8:88:c6:98:34:5f:8d:0a:3c:c6:e9:d5:
+# 95:95:6d:de
-----BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
-----END CERTIFICATE-----
-# Subject: C=US, O=VeriSign, Inc., OU=Class 1 Public Primary Certification Authority
-# Issuer: C=US, O=VeriSign, Inc., OU=Class 1 Public Primary Certification Authority
+# DigiCert High Assurance EV Root CA.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 02:ac:5c:26:6a:0b:40:9b:8f:0b:79:f2:ae:46:25:77
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
+# Validity
+# Not Before: Nov 10 00:00:00 2006 GMT
+# Not After : Nov 10 00:00:00 2031 GMT
+# Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:c6:cc:e5:73:e6:fb:d4:bb:e5:2d:2d:32:a6:df:
+# e5:81:3f:c9:cd:25:49:b6:71:2a:c3:d5:94:34:67:
+# a2:0a:1c:b0:5f:69:a6:40:b1:c4:b7:b2:8f:d0:98:
+# a4:a9:41:59:3a:d3:dc:94:d6:3c:db:74:38:a4:4a:
+# cc:4d:25:82:f7:4a:a5:53:12:38:ee:f3:49:6d:71:
+# 91:7e:63:b6:ab:a6:5f:c3:a4:84:f8:4f:62:51:be:
+# f8:c5:ec:db:38:92:e3:06:e5:08:91:0c:c4:28:41:
+# 55:fb:cb:5a:89:15:7e:71:e8:35:bf:4d:72:09:3d:
+# be:3a:38:50:5b:77:31:1b:8d:b3:c7:24:45:9a:a7:
+# ac:6d:00:14:5a:04:b7:ba:13:eb:51:0a:98:41:41:
+# 22:4e:65:61:87:81:41:50:a6:79:5c:89:de:19:4a:
+# 57:d5:2e:e6:5d:1c:53:2c:7e:98:cd:1a:06:16:a4:
+# 68:73:d0:34:04:13:5c:a1:71:d3:5a:7c:55:db:5e:
+# 64:e1:37:87:30:56:04:e5:11:b4:29:80:12:f1:79:
+# 39:88:a2:02:11:7c:27:66:b7:88:b7:78:f2:ca:0a:
+# a8:38:ab:0a:64:c2:bf:66:5d:95:84:c1:a1:25:1e:
+# 87:5d:1a:50:0b:20:12:cc:41:bb:6e:0b:51:38:b8:
+# 4b:cb
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Digital Signature, Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
+# X509v3 Authority Key Identifier:
+# keyid:B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
+#
+# Signature Algorithm: sha1WithRSAEncryption
+# 1c:1a:06:97:dc:d7:9c:9f:3c:88:66:06:08:57:21:db:21:47:
+# f8:2a:67:aa:bf:18:32:76:40:10:57:c1:8a:f3:7a:d9:11:65:
+# 8e:35:fa:9e:fc:45:b5:9e:d9:4c:31:4b:b8:91:e8:43:2c:8e:
+# b3:78:ce:db:e3:53:79:71:d6:e5:21:94:01:da:55:87:9a:24:
+# 64:f6:8a:66:cc:de:9c:37:cd:a8:34:b1:69:9b:23:c8:9e:78:
+# 22:2b:70:43:e3:55:47:31:61:19:ef:58:c5:85:2f:4e:30:f6:
+# a0:31:16:23:c8:e7:e2:65:16:33:cb:bf:1a:1b:a0:3d:f8:ca:
+# 5e:8b:31:8b:60:08:89:2d:0c:06:5c:52:b7:c4:f9:0a:98:d1:
+# 15:5f:9f:12:be:7c:36:63:38:bd:44:a4:7f:e4:26:2b:0a:c4:
+# 97:69:0d:e9:8c:e2:c0:10:57:b8:c8:76:12:91:55:f2:48:69:
+# d8:bc:2a:02:5b:0f:44:d4:20:31:db:f4:ba:70:26:5d:90:60:
+# 9e:bc:4b:17:09:2f:b4:cb:1e:43:68:c9:07:27:c1:d2:5c:f7:
+# ea:21:b9:68:12:9c:3c:9c:bf:9e:fc:80:5c:9b:63:cd:ec:47:
+# aa:25:27:67:a0:37:f3:00:82:7d:54:d7:a9:f8:e9:2e:13:a3:
+# 77:e8:1f:4a
-----BEGIN CERTIFICATE-----
-MIICPDCCAaUCEDJQM89Q0VbzXIGtZVxPyCUwDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAxIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTIwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAxIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDlGb9to1ZhLZlIcfZn3rmN67eehoAKkQ76OCWvRoiC5XOooJskXQ0f
-zGVuDLDQVoQYh5oGmxChc9+0WDlrbsH2FdWoqD+qEgaNMax/sDTXjzRniAnNFBHi
-TkVWaR94AoDa3EeRKbs2yWNcxeDXLYd7obcysHswuiovMaruo2fa2wIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBAEtEZmBoZOSYG/OwcuaViXzde7OVwB0u2NgZ0C00PcZQ
-mhCGjKo/O6gE/DdSlcPZydvN8oYGxLEb8IKIMEKOF1AcZHq4PplJdJf8rAJD+5YM
-VgQlDHx8h50kp9jwMim1pN9dokzFFjKoQvZFprY2ueC/ZTaTwtLXa9zeWdaiNfhF
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
-----END CERTIFICATE-----
-# Subject: C=US, O=VeriSign, Inc., OU=Class 3 Public Primary Certification Authority
-# Issuer: C=US, O=VeriSign, Inc., OU=Class 3 Public Primary Certification Authority
+# Entrust Root Certification Authority - EC1.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# a6:8b:79:29:00:00:00:00:50:d0:91:f9
+# Signature Algorithm: ecdsa-with-SHA384
+# Issuer: C=US, O=Entrust, Inc., OU=See www.entrust.net/legal-terms, OU=(c) 2012 Entrust, Inc. - for authorized use only, CN=Entrust Root Certification Authority - EC1
+# Validity
+# Not Before: Dec 18 15:25:36 2012 GMT
+# Not After : Dec 18 15:55:36 2037 GMT
+# Subject: C=US, O=Entrust, Inc., OU=See www.entrust.net/legal-terms, OU=(c) 2012 Entrust, Inc. - for authorized use only, CN=Entrust Root Certification Authority - EC1
+# Subject Public Key Info:
+# Public Key Algorithm: id-ecPublicKey
+# Public-Key: (384 bit)
+# pub:
+# 04:84:13:c9:d0:ba:6d:41:7b:e2:6c:d0:eb:55:5f:
+# 66:02:1a:24:f4:5b:89:69:47:e3:b8:c2:7d:f1:f2:
+# 02:c5:9f:a0:f6:5b:d5:8b:06:19:86:4f:53:10:6d:
+# 07:24:27:a1:a0:f8:d5:47:19:61:4c:7d:ca:93:27:
+# ea:74:0c:ef:6f:96:09:fe:63:ec:70:5d:36:ad:67:
+# 77:ae:c9:9d:7c:55:44:3a:a2:63:51:1f:f5:e3:62:
+# d4:a9:47:07:3e:cc:20
+# ASN1 OID: secp384r1
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# B7:63:E7:1A:DD:8D:E9:08:A6:55:83:A4:E0:6A:50:41:65:11:42:49
+# Signature Algorithm: ecdsa-with-SHA384
+# 30:64:02:30:61:79:d8:e5:42:47:df:1c:ae:53:99:17:b6:6f:
+# 1c:7d:e1:bf:11:94:d1:03:88:75:e4:8d:89:a4:8a:77:46:de:
+# 6d:61:ef:02:f5:fb:b5:df:cc:fe:4e:ff:fe:a9:e6:a7:02:30:
+# 5b:99:d7:85:37:06:b5:7b:08:fd:eb:27:8b:4a:94:f9:e1:fa:
+# a7:8e:26:08:e8:7c:92:68:6d:73:d8:6f:26:ac:21:02:b8:99:
+# b7:26:41:5b:25:60:ae:d0:48:1a:ee:06
-----BEGIN CERTIFICATE-----
-MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
-lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
-AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
-----END CERTIFICATE-----
-# Subject: C=US, O=RSA Data Security, Inc., OU=Secure Server Certification Authority
-# Issuer: C=US, O=RSA Data Security, Inc., OU=Secure Server Certification Authority
+# Entrust Root Certification Authority - G2.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 1246989352 (0x4a538c28)
+# Signature Algorithm: sha256WithRSAEncryption
+# Issuer: C=US, O=Entrust, Inc., OU=See www.entrust.net/legal-terms, OU=(c) 2009 Entrust, Inc. - for authorized use only, CN=Entrust Root Certification Authority - G2
+# Validity
+# Not Before: Jul 7 17:25:54 2009 GMT
+# Not After : Dec 7 17:55:54 2030 GMT
+# Subject: C=US, O=Entrust, Inc., OU=See www.entrust.net/legal-terms, OU=(c) 2009 Entrust, Inc. - for authorized use only, CN=Entrust Root Certification Authority - G2
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:ba:84:b6:72:db:9e:0c:6b:e2:99:e9:30:01:a7:
+# 76:ea:32:b8:95:41:1a:c9:da:61:4e:58:72:cf:fe:
+# f6:82:79:bf:73:61:06:0a:a5:27:d8:b3:5f:d3:45:
+# 4e:1c:72:d6:4e:32:f2:72:8a:0f:f7:83:19:d0:6a:
+# 80:80:00:45:1e:b0:c7:e7:9a:bf:12:57:27:1c:a3:
+# 68:2f:0a:87:bd:6a:6b:0e:5e:65:f3:1c:77:d5:d4:
+# 85:8d:70:21:b4:b3:32:e7:8b:a2:d5:86:39:02:b1:
+# b8:d2:47:ce:e4:c9:49:c4:3b:a7:de:fb:54:7d:57:
+# be:f0:e8:6e:c2:79:b2:3a:0b:55:e2:50:98:16:32:
+# 13:5c:2f:78:56:c1:c2:94:b3:f2:5a:e4:27:9a:9f:
+# 24:d7:c6:ec:d0:9b:25:82:e3:cc:c2:c4:45:c5:8c:
+# 97:7a:06:6b:2a:11:9f:a9:0a:6e:48:3b:6f:db:d4:
+# 11:19:42:f7:8f:07:bf:f5:53:5f:9c:3e:f4:17:2c:
+# e6:69:ac:4e:32:4c:62:77:ea:b7:e8:e5:bb:34:bc:
+# 19:8b:ae:9c:51:e7:b7:7e:b5:53:b1:33:22:e5:6d:
+# cf:70:3c:1a:fa:e2:9b:67:b6:83:f4:8d:a5:af:62:
+# 4c:4d:e0:58:ac:64:34:12:03:f8:b6:8d:94:63:24:
+# a4:71
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# 6A:72:26:7A:D0:1E:EF:7D:E7:3B:69:51:D4:6C:8D:9F:90:12:66:AB
+# Signature Algorithm: sha256WithRSAEncryption
+# 79:9f:1d:96:c6:b6:79:3f:22:8d:87:d3:87:03:04:60:6a:6b:
+# 9a:2e:59:89:73:11:ac:43:d1:f5:13:ff:8d:39:2b:c0:f2:bd:
+# 4f:70:8c:a9:2f:ea:17:c4:0b:54:9e:d4:1b:96:98:33:3c:a8:
+# ad:62:a2:00:76:ab:59:69:6e:06:1d:7e:c4:b9:44:8d:98:af:
+# 12:d4:61:db:0a:19:46:47:f3:eb:f7:63:c1:40:05:40:a5:d2:
+# b7:f4:b5:9a:36:bf:a9:88:76:88:04:55:04:2b:9c:87:7f:1a:
+# 37:3c:7e:2d:a5:1a:d8:d4:89:5e:ca:bd:ac:3d:6c:d8:6d:af:
+# d5:f3:76:0f:cd:3b:88:38:22:9d:6c:93:9a:c4:3d:bf:82:1b:
+# 65:3f:a6:0f:5d:aa:fc:e5:b2:15:ca:b5:ad:c6:bc:3d:d0:84:
+# e8:ea:06:72:b0:4d:39:32:78:bf:3e:11:9c:0b:a4:9d:9a:21:
+# f3:f0:9b:0b:30:78:db:c1:dc:87:43:fe:bc:63:9a:ca:c5:c2:
+# 1c:c9:c7:8d:ff:3b:12:58:08:e6:b6:3d:ec:7a:2c:4e:fb:83:
+# 96:ce:0c:3c:69:87:54:73:a4:73:c2:93:ff:51:10:ac:15:54:
+# 01:d8:fc:05:b1:89:a1:7f:74:83:9a:49:d7:dc:4e:7b:8a:48:
+# 6f:8b:45:f6
-----BEGIN CERTIFICATE-----
-MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD
-VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0
-MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV
-BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy
-dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ
-ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII
-0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI
-uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI
-hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3
-YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc
-1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA==
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
-----END CERTIFICATE-----
-# Subject: C=US, O=Equifax Secure Inc., CN=Equifax Secure Global eBusiness CA-1
-# Issuer: C=US, O=Equifax Secure Inc., CN=Equifax Secure Global eBusiness CA-1
+# Entrust Root Certification Authority.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 1164660820 (0x456b5054)
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=Entrust, Inc., OU=www.entrust.net/CPS is incorporated by reference, OU=(c) 2006 Entrust, Inc., CN=Entrust Root Certification Authority
+# Validity
+# Not Before: Nov 27 20:23:42 2006 GMT
+# Not After : Nov 27 20:53:42 2026 GMT
+# Subject: C=US, O=Entrust, Inc., OU=www.entrust.net/CPS is incorporated by reference, OU=(c) 2006 Entrust, Inc., CN=Entrust Root Certification Authority
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:b6:95:b6:43:42:fa:c6:6d:2a:6f:48:df:94:4c:
+# 39:57:05:ee:c3:79:11:41:68:36:ed:ec:fe:9a:01:
+# 8f:a1:38:28:fc:f7:10:46:66:2e:4d:1e:1a:b1:1a:
+# 4e:c6:d1:c0:95:88:b0:c9:ff:31:8b:33:03:db:b7:
+# 83:7b:3e:20:84:5e:ed:b2:56:28:a7:f8:e0:b9:40:
+# 71:37:c5:cb:47:0e:97:2a:68:c0:22:95:62:15:db:
+# 47:d9:f5:d0:2b:ff:82:4b:c9:ad:3e:de:4c:db:90:
+# 80:50:3f:09:8a:84:00:ec:30:0a:3d:18:cd:fb:fd:
+# 2a:59:9a:23:95:17:2c:45:9e:1f:6e:43:79:6d:0c:
+# 5c:98:fe:48:a7:c5:23:47:5c:5e:fd:6e:e7:1e:b4:
+# f6:68:45:d1:86:83:5b:a2:8a:8d:b1:e3:29:80:fe:
+# 25:71:88:ad:be:bc:8f:ac:52:96:4b:aa:51:8d:e4:
+# 13:31:19:e8:4e:4d:9f:db:ac:b3:6a:d5:bc:39:54:
+# 71:ca:7a:7a:7f:90:dd:7d:1d:80:d9:81:bb:59:26:
+# c2:11:fe:e6:93:e2:f7:80:e4:65:fb:34:37:0e:29:
+# 80:70:4d:af:38:86:2e:9e:7f:57:af:9e:17:ae:eb:
+# 1c:cb:28:21:5f:b6:1c:d8:e7:a2:04:22:f9:d3:da:
+# d8:cb
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Private Key Usage Period:
+# Not Before: Nov 27 20:23:42 2006 GMT, Not After: Nov 27 20:53:42 2026 GMT
+# X509v3 Authority Key Identifier:
+# keyid:68:90:E4:67:A4:A6:53:80:C7:86:66:A4:F1:F7:4B:43:FB:84:BD:6D
+#
+# X509v3 Subject Key Identifier:
+# 68:90:E4:67:A4:A6:53:80:C7:86:66:A4:F1:F7:4B:43:FB:84:BD:6D
+# 1.2.840.113533.7.65.0:
+# 0...V7.1:4.0....
+# Signature Algorithm: sha1WithRSAEncryption
+# 93:d4:30:b0:d7:03:20:2a:d0:f9:63:e8:91:0c:05:20:a9:5f:
+# 19:ca:7b:72:4e:d4:b1:db:d0:96:fb:54:5a:19:2c:0c:08:f7:
+# b2:bc:85:a8:9d:7f:6d:3b:52:b3:2a:db:e7:d4:84:8c:63:f6:
+# 0f:cb:26:01:91:50:6c:f4:5f:14:e2:93:74:c0:13:9e:30:3a:
+# 50:e3:b4:60:c5:1c:f0:22:44:8d:71:47:ac:c8:1a:c9:e9:9b:
+# 9a:00:60:13:ff:70:7e:5f:11:4d:49:1b:b3:15:52:7b:c9:54:
+# da:bf:9d:95:af:6b:9a:d8:9e:e9:f1:e4:43:8d:e2:11:44:3a:
+# bf:af:bd:83:42:73:52:8b:aa:bb:a7:29:cf:f5:64:1c:0a:4d:
+# d1:bc:aa:ac:9f:2a:d0:ff:7f:7f:da:7d:ea:b1:ed:30:25:c1:
+# 84:da:34:d2:5b:78:83:56:ec:9c:36:c3:26:e2:11:f6:67:49:
+# 1d:92:ab:8c:fb:eb:ff:7a:ee:85:4a:a7:50:80:f0:a7:5c:4a:
+# 94:2e:5f:05:99:3c:52:41:e0:cd:b4:63:cf:01:43:ba:9c:83:
+# dc:8f:60:3b:f3:5a:b4:b4:7b:ae:da:0b:90:38:75:ef:81:1d:
+# 66:d2:f7:57:70:36:b3:bf:fc:28:af:71:25:85:5b:13:fe:1e:
+# 7f:5a:b4:3c
-----BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
-----END CERTIFICATE-----
-# Subject: C=US, ST=UT, L=Salt Lake City, O=The USERTRUST Network, OU=http://www.usertrust.com, CN=UTN-USERFirst-Hardware
-# Issuer: C=US, ST=UT, L=Salt Lake City, O=The USERTRUST Network, OU=http://www.usertrust.com, CN=UTN-USERFirst-Hardware
+# Entrust.net Certification Authority (2048).pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 946069240 (0x3863def8)
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: O=Entrust.net, OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Certification Authority (2048)
+# Validity
+# Not Before: Dec 24 17:50:51 1999 GMT
+# Not After : Jul 24 14:15:12 2029 GMT
+# Subject: O=Entrust.net, OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Certification Authority (2048)
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:ad:4d:4b:a9:12:86:b2:ea:a3:20:07:15:16:64:
+# 2a:2b:4b:d1:bf:0b:4a:4d:8e:ed:80:76:a5:67:b7:
+# 78:40:c0:73:42:c8:68:c0:db:53:2b:dd:5e:b8:76:
+# 98:35:93:8b:1a:9d:7c:13:3a:0e:1f:5b:b7:1e:cf:
+# e5:24:14:1e:b1:81:a9:8d:7d:b8:cc:6b:4b:03:f1:
+# 02:0c:dc:ab:a5:40:24:00:7f:74:94:a1:9d:08:29:
+# b3:88:0b:f5:87:77:9d:55:cd:e4:c3:7e:d7:6a:64:
+# ab:85:14:86:95:5b:97:32:50:6f:3d:c8:ba:66:0c:
+# e3:fc:bd:b8:49:c1:76:89:49:19:fd:c0:a8:bd:89:
+# a3:67:2f:c6:9f:bc:71:19:60:b8:2d:e9:2c:c9:90:
+# 76:66:7b:94:e2:af:78:d6:65:53:5d:3c:d6:9c:b2:
+# cf:29:03:f9:2f:a4:50:b2:d4:48:ce:05:32:55:8a:
+# fd:b2:64:4c:0e:e4:98:07:75:db:7f:df:b9:08:55:
+# 60:85:30:29:f9:7b:48:a4:69:86:e3:35:3f:1e:86:
+# 5d:7a:7a:15:bd:ef:00:8e:15:22:54:17:00:90:26:
+# 93:bc:0e:49:68:91:bf:f8:47:d3:9d:95:42:c1:0e:
+# 4d:df:6f:26:cf:c3:18:21:62:66:43:70:d6:d5:c0:
+# 07:e1
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# 55:E4:81:D1:11:80:BE:D8:89:B9:08:A3:31:F9:A1:24:09:16:B9:70
+# Signature Algorithm: sha1WithRSAEncryption
+# 3b:9b:8f:56:9b:30:e7:53:99:7c:7a:79:a7:4d:97:d7:19:95:
+# 90:fb:06:1f:ca:33:7c:46:63:8f:96:66:24:fa:40:1b:21:27:
+# ca:e6:72:73:f2:4f:fe:31:99:fd:c8:0c:4c:68:53:c6:80:82:
+# 13:98:fa:b6:ad:da:5d:3d:f1:ce:6e:f6:15:11:94:82:0c:ee:
+# 3f:95:af:11:ab:0f:d7:2f:de:1f:03:8f:57:2c:1e:c9:bb:9a:
+# 1a:44:95:eb:18:4f:a6:1f:cd:7d:57:10:2f:9b:04:09:5a:84:
+# b5:6e:d8:1d:3a:e1:d6:9e:d1:6c:79:5e:79:1c:14:c5:e3:d0:
+# 4c:93:3b:65:3c:ed:df:3d:be:a6:e5:95:1a:c3:b5:19:c3:bd:
+# 5e:5b:bb:ff:23:ef:68:19:cb:12:93:27:5c:03:2d:6f:30:d0:
+# 1e:b6:1a:ac:de:5a:f7:d1:aa:a8:27:a6:fe:79:81:c4:79:99:
+# 33:57:ba:12:b0:a9:e0:42:6c:93:ca:56:de:fe:6d:84:0b:08:
+# 8b:7e:8d:ea:d7:98:21:c6:f3:e7:3c:79:2f:5e:9c:d1:4c:15:
+# 8d:e1:ec:22:37:cc:9a:43:0b:97:dc:80:90:8d:b3:67:9b:6f:
+# 48:08:15:56:cf:bf:f1:2b:7c:5e:9a:76:e9:59:90:c5:7c:83:
+# 35:11:65:51
-----BEGIN CERTIFICATE-----
-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
-----END CERTIFICATE-----
-# Subject: C=US, O=Network Solutions L.L.C., CN=Network Solutions Certificate Authority
-# Issuer: C=US, O=Network Solutions L.L.C., CN=Network Solutions Certificate Authority
+# GeoTrust Global CA.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 144470 (0x23456)
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
+# Validity
+# Not Before: May 21 04:00:00 2002 GMT
+# Not After : May 21 04:00:00 2022 GMT
+# Subject: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:da:cc:18:63:30:fd:f4:17:23:1a:56:7e:5b:df:
+# 3c:6c:38:e4:71:b7:78:91:d4:bc:a1:d8:4c:f8:a8:
+# 43:b6:03:e9:4d:21:07:08:88:da:58:2f:66:39:29:
+# bd:05:78:8b:9d:38:e8:05:b7:6a:7e:71:a4:e6:c4:
+# 60:a6:b0:ef:80:e4:89:28:0f:9e:25:d6:ed:83:f3:
+# ad:a6:91:c7:98:c9:42:18:35:14:9d:ad:98:46:92:
+# 2e:4f:ca:f1:87:43:c1:16:95:57:2d:50:ef:89:2d:
+# 80:7a:57:ad:f2:ee:5f:6b:d2:00:8d:b9:14:f8:14:
+# 15:35:d9:c0:46:a3:7b:72:c8:91:bf:c9:55:2b:cd:
+# d0:97:3e:9c:26:64:cc:df:ce:83:19:71:ca:4e:e6:
+# d4:d5:7b:a9:19:cd:55:de:c8:ec:d2:5e:38:53:e5:
+# 5c:4f:8c:2d:fe:50:23:36:fc:66:e6:cb:8e:a4:39:
+# 19:00:b7:95:02:39:91:0b:0e:fe:38:2e:d1:1d:05:
+# 9a:f6:4d:3e:6f:0f:07:1d:af:2c:1e:8f:60:39:e2:
+# fa:36:53:13:39:d4:5e:26:2b:db:3d:a8:14:bd:32:
+# eb:18:03:28:52:04:71:e5:ab:33:3d:e1:38:bb:07:
+# 36:84:62:9c:79:ea:16:30:f4:5f:c0:2b:e8:71:6b:
+# e4:f9
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Subject Key Identifier:
+# C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
+# X509v3 Authority Key Identifier:
+# keyid:C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
+#
+# Signature Algorithm: sha1WithRSAEncryption
+# 35:e3:29:6a:e5:2f:5d:54:8e:29:50:94:9f:99:1a:14:e4:8f:
+# 78:2a:62:94:a2:27:67:9e:d0:cf:1a:5e:47:e9:c1:b2:a4:cf:
+# dd:41:1a:05:4e:9b:4b:ee:4a:6f:55:52:b3:24:a1:37:0a:eb:
+# 64:76:2a:2e:2c:f3:fd:3b:75:90:bf:fa:71:d8:c7:3d:37:d2:
+# b5:05:95:62:b9:a6:de:89:3d:36:7b:38:77:48:97:ac:a6:20:
+# 8f:2e:a6:c9:0c:c2:b2:99:45:00:c7:ce:11:51:22:22:e0:a5:
+# ea:b6:15:48:09:64:ea:5e:4f:74:f7:05:3e:c7:8a:52:0c:db:
+# 15:b4:bd:6d:9b:e5:c6:b1:54:68:a9:e3:69:90:b6:9a:a5:0f:
+# b8:b9:3f:20:7d:ae:4a:b5:b8:9c:e4:1d:b6:ab:e6:94:a5:c1:
+# c7:83:ad:db:f5:27:87:0e:04:6c:d5:ff:dd:a0:5d:ed:87:52:
+# b7:2b:15:02:ae:39:a6:6a:74:e9:da:c4:e7:bc:4d:34:1e:a9:
+# 5c:4d:33:5f:92:09:2f:88:66:5d:77:97:c7:1d:76:13:a9:d5:
+# e5:f1:16:09:11:35:d5:ac:db:24:71:70:2c:98:56:0b:d9:17:
+# b4:d1:e3:51:2b:5e:75:e8:d5:d0:dc:4f:34:ed:c2:05:66:80:
+# a1:cb:e6:33
-----BEGIN CERTIFICATE-----
-MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
-MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
-MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
-dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
-UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
-ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
-c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
-OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
-mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
-BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
-qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
-gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
-bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
-dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
-6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
-h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
-/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
-wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
-pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
-----END CERTIFICATE-----
-# Subject: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
-# Issuer: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
+# GeoTrust Primary Certification Authority - G2.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 3c:b2:f4:48:0a:00:e2:fe:eb:24:3b:5e:60:3e:c3:6b
+# Signature Algorithm: ecdsa-with-SHA384
+# Issuer: C=US, O=GeoTrust Inc., OU=(c) 2007 GeoTrust Inc. - For authorized use only, CN=GeoTrust Primary Certification Authority - G2
+# Validity
+# Not Before: Nov 5 00:00:00 2007 GMT
+# Not After : Jan 18 23:59:59 2038 GMT
+# Subject: C=US, O=GeoTrust Inc., OU=(c) 2007 GeoTrust Inc. - For authorized use only, CN=GeoTrust Primary Certification Authority - G2
+# Subject Public Key Info:
+# Public Key Algorithm: id-ecPublicKey
+# Public-Key: (384 bit)
+# pub:
+# 04:15:b1:e8:fd:03:15:43:e5:ac:eb:87:37:11:62:
+# ef:d2:83:36:52:7d:45:57:0b:4a:8d:7b:54:3b:3a:
+# 6e:5f:15:02:c0:50:a6:cf:25:2f:7d:ca:48:b8:c7:
+# 50:63:1c:2a:21:08:7c:9a:36:d8:0b:fe:d1:26:c5:
+# 58:31:30:28:25:f3:5d:5d:a3:b8:b6:a5:b4:92:ed:
+# 6c:2c:9f:eb:dd:43:89:a2:3c:4b:48:91:1d:50:ec:
+# 26:df:d6:60:2e:bd:21
+# ASN1 OID: secp384r1
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# 15:5F:35:57:51:55:FB:25:B2:AD:03:69:FC:01:A3:FA:BE:11:55:D5
+# Signature Algorithm: ecdsa-with-SHA384
+# 30:64:02:30:64:96:59:a6:e8:09:de:8b:ba:fa:5a:88:88:f0:
+# 1f:91:d3:46:a8:f2:4a:4c:02:63:fb:6c:5f:38:db:2e:41:93:
+# a9:0e:e6:9d:dc:31:1c:b2:a0:a7:18:1c:79:e1:c7:36:02:30:
+# 3a:56:af:9a:74:6c:f6:fb:83:e0:33:d3:08:5f:a1:9c:c2:5b:
+# 9f:46:d6:b6:cb:91:06:63:a2:06:e7:33:ac:3e:a8:81:12:d0:
+# cb:ba:d0:92:0b:b6:9e:96:aa:04:0f:8a
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+# GeoTrust Primary Certification Authority - G3.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 15:ac:6e:94:19:b2:79:4b:41:f6:27:a9:c3:18:0f:1f
+# Signature Algorithm: sha256WithRSAEncryption
+# Issuer: C=US, O=GeoTrust Inc., OU=(c) 2008 GeoTrust Inc. - For authorized use only, CN=GeoTrust Primary Certification Authority - G3
+# Validity
+# Not Before: Apr 2 00:00:00 2008 GMT
+# Not After : Dec 1 23:59:59 2037 GMT
+# Subject: C=US, O=GeoTrust Inc., OU=(c) 2008 GeoTrust Inc. - For authorized use only, CN=GeoTrust Primary Certification Authority - G3
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:dc:e2:5e:62:58:1d:33:57:39:32:33:fa:eb:cb:
+# 87:8c:a7:d4:4a:dd:06:88:ea:64:8e:31:98:a5:38:
+# 90:1e:98:cf:2e:63:2b:f0:46:bc:44:b2:89:a1:c0:
+# 28:0c:49:70:21:95:9f:64:c0:a6:93:12:02:65:26:
+# 86:c6:a5:89:f0:fa:d7:84:a0:70:af:4f:1a:97:3f:
+# 06:44:d5:c9:eb:72:10:7d:e4:31:28:fb:1c:61:e6:
+# 28:07:44:73:92:22:69:a7:03:88:6c:9d:63:c8:52:
+# da:98:27:e7:08:4c:70:3e:b4:c9:12:c1:c5:67:83:
+# 5d:33:f3:03:11:ec:6a:d0:53:e2:d1:ba:36:60:94:
+# 80:bb:61:63:6c:5b:17:7e:df:40:94:1e:ab:0d:c2:
+# 21:28:70:88:ff:d6:26:6c:6c:60:04:25:4e:55:7e:
+# 7d:ef:bf:94:48:de:b7:1d:dd:70:8d:05:5f:88:a5:
+# 9b:f2:c2:ee:ea:d1:40:41:6d:62:38:1d:56:06:c5:
+# 03:47:51:20:19:fc:7b:10:0b:0e:62:ae:76:55:bf:
+# 5f:77:be:3e:49:01:53:3d:98:25:03:76:24:5a:1d:
+# b4:db:89:ea:79:e5:b6:b3:3b:3f:ba:4c:28:41:7f:
+# 06:ac:6a:8e:c1:d0:f6:05:1d:7d:e6:42:86:e3:a5:
+# d5:47
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# C4:79:CA:8E:A1:4E:03:1D:1C:DC:6B:DB:31:5B:94:3E:3F:30:7F:2D
+# Signature Algorithm: sha256WithRSAEncryption
+# 2d:c5:13:cf:56:80:7b:7a:78:bd:9f:ae:2c:99:e7:ef:da:df:
+# 94:5e:09:69:a7:e7:6e:68:8c:bd:72:be:47:a9:0e:97:12:b8:
+# 4a:f1:64:d3:39:df:25:34:d4:c1:cd:4e:81:f0:0f:04:c4:24:
+# b3:34:96:c6:a6:aa:30:df:68:61:73:d7:f9:8e:85:89:ef:0e:
+# 5e:95:28:4a:2a:27:8f:10:8e:2e:7c:86:c4:02:9e:da:0c:77:
+# 65:0e:44:0d:92:fd:fd:b3:16:36:fa:11:0d:1d:8c:0e:07:89:
+# 6a:29:56:f7:72:f4:dd:15:9c:77:35:66:57:ab:13:53:d8:8e:
+# c1:40:c5:d7:13:16:5a:72:c7:b7:69:01:c4:7a:b1:83:01:68:
+# 7d:8d:41:a1:94:18:c1:25:5c:fc:f0:fe:83:02:87:7c:0d:0d:
+# cf:2e:08:5c:4a:40:0d:3e:ec:81:61:e6:24:db:ca:e0:0e:2d:
+# 07:b2:3e:56:dc:8d:f5:41:85:07:48:9b:0c:0b:cb:49:3f:7d:
+# ec:b7:fd:cb:8d:67:89:1a:ab:ed:bb:1e:a3:00:08:08:17:2a:
+# 82:5c:31:5d:46:8a:2d:0f:86:9b:74:d9:45:fb:d4:40:b1:7a:
+# aa:68:2d:86:b2:99:22:e1:c1:2b:c7:9c:f8:f3:5f:a8:82:12:
+# eb:19:11:2d
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+# GeoTrust Primary Certification Authority.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 18:ac:b5:6a:fd:69:b6:15:3a:63:6c:af:da:fa:c4:a1
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Primary Certification Authority
+# Validity
+# Not Before: Nov 27 00:00:00 2006 GMT
+# Not After : Jul 16 23:59:59 2036 GMT
+# Subject: C=US, O=GeoTrust Inc., CN=GeoTrust Primary Certification Authority
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:be:b8:15:7b:ff:d4:7c:7d:67:ad:83:64:7b:c8:
+# 42:53:2d:df:f6:84:08:20:61:d6:01:59:6a:9c:44:
+# 11:af:ef:76:fd:95:7e:ce:61:30:bb:7a:83:5f:02:
+# bd:01:66:ca:ee:15:8d:6f:a1:30:9c:bd:a1:85:9e:
+# 94:3a:f3:56:88:00:31:cf:d8:ee:6a:96:02:d9:ed:
+# 03:8c:fb:75:6d:e7:ea:b8:55:16:05:16:9a:f4:e0:
+# 5e:b1:88:c0:64:85:5c:15:4d:88:c7:b7:ba:e0:75:
+# e9:ad:05:3d:9d:c7:89:48:e0:bb:28:c8:03:e1:30:
+# 93:64:5e:52:c0:59:70:22:35:57:88:8a:f1:95:0a:
+# 83:d7:bc:31:73:01:34:ed:ef:46:71:e0:6b:02:a8:
+# 35:72:6b:97:9b:66:e0:cb:1c:79:5f:d8:1a:04:68:
+# 1e:47:02:e6:9d:60:e2:36:97:01:df:ce:35:92:df:
+# be:67:c7:6d:77:59:3b:8f:9d:d6:90:15:94:bc:42:
+# 34:10:c1:39:f9:b1:27:3e:7e:d6:8a:75:c5:b2:af:
+# 96:d3:a2:de:9b:e4:98:be:7d:e1:e9:81:ad:b6:6f:
+# fc:d7:0e:da:e0:34:b0:0d:1a:77:e7:e3:08:98:ef:
+# 58:fa:9c:84:b7:36:af:c2:df:ac:d2:f4:10:06:70:
+# 71:35
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# 2C:D5:50:41:97:15:8B:F0:8F:36:61:5B:4A:FB:6B:D9:99:C9:33:92
+# Signature Algorithm: sha1WithRSAEncryption
+# 5a:70:7f:2c:dd:b7:34:4f:f5:86:51:a9:26:be:4b:b8:aa:f1:
+# 71:0d:dc:61:c7:a0:ea:34:1e:7a:77:0f:04:35:e8:27:8f:6c:
+# 90:bf:91:16:24:46:3e:4a:4e:ce:2b:16:d5:0b:52:1d:fc:1f:
+# 67:a2:02:45:31:4f:ce:f3:fa:03:a7:79:9d:53:6a:d9:da:63:
+# 3a:f8:80:d7:d3:99:e1:a5:e1:be:d4:55:71:98:35:3a:be:93:
+# ea:ae:ad:42:b2:90:6f:e0:fc:21:4d:35:63:33:89:49:d6:9b:
+# 4e:ca:c7:e7:4e:09:00:f7:da:c7:ef:99:62:99:77:b6:95:22:
+# 5e:8a:a0:ab:f4:b8:78:98:ca:38:19:99:c9:72:9e:78:cd:4b:
+# ac:af:19:a0:73:12:2d:fc:c2:41:ba:81:91:da:16:5a:31:b7:
+# f9:b4:71:80:12:48:99:72:73:5a:59:53:c1:63:52:33:ed:a7:
+# c9:d2:39:02:70:fa:e0:b1:42:66:29:aa:9b:51:ed:30:54:22:
+# 14:5f:d9:ab:1d:c1:e4:94:f0:f8:f5:2b:f7:ea:ca:78:46:d6:
+# b8:91:fd:a6:0d:2b:1a:14:01:3e:80:f0:42:a0:95:07:5e:6d:
+# cd:cc:4b:a4:45:8d:ab:12:e8:b3:de:5a:e5:a0:7c:e8:0f:22:
+# 1d:5a:e9:59
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+# Go Daddy Class 2 Certification Authority.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 0 (0x0)
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
+# Validity
+# Not Before: Jun 29 17:06:20 2004 GMT
+# Not After : Jun 29 17:06:20 2034 GMT
+# Subject: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:de:9d:d7:ea:57:18:49:a1:5b:eb:d7:5f:48:86:
+# ea:be:dd:ff:e4:ef:67:1c:f4:65:68:b3:57:71:a0:
+# 5e:77:bb:ed:9b:49:e9:70:80:3d:56:18:63:08:6f:
+# da:f2:cc:d0:3f:7f:02:54:22:54:10:d8:b2:81:d4:
+# c0:75:3d:4b:7f:c7:77:c3:3e:78:ab:1a:03:b5:20:
+# 6b:2f:6a:2b:b1:c5:88:7e:c4:bb:1e:b0:c1:d8:45:
+# 27:6f:aa:37:58:f7:87:26:d7:d8:2d:f6:a9:17:b7:
+# 1f:72:36:4e:a6:17:3f:65:98:92:db:2a:6e:5d:a2:
+# fe:88:e0:0b:de:7f:e5:8d:15:e1:eb:cb:3a:d5:e2:
+# 12:a2:13:2d:d8:8e:af:5f:12:3d:a0:08:05:08:b6:
+# 5c:a5:65:38:04:45:99:1e:a3:60:60:74:c5:41:a5:
+# 72:62:1b:62:c5:1f:6f:5f:1a:42:be:02:51:65:a8:
+# ae:23:18:6a:fc:78:03:a9:4d:7f:80:c3:fa:ab:5a:
+# fc:a1:40:a4:ca:19:16:fe:b2:c8:ef:5e:73:0d:ee:
+# 77:bd:9a:f6:79:98:bc:b1:07:67:a2:15:0d:dd:a0:
+# 58:c6:44:7b:0a:3e:62:28:5f:ba:41:07:53:58:cf:
+# 11:7e:38:74:c5:f8:ff:b5:69:90:8f:84:74:ea:97:
+# 1b:af
+# Exponent: 3 (0x3)
+# X509v3 extensions:
+# X509v3 Subject Key Identifier:
+# D2:C4:B0:D2:91:D4:4C:11:71:B3:61:CB:3D:A1:FE:DD:A8:6A:D4:E3
+# X509v3 Authority Key Identifier:
+# keyid:D2:C4:B0:D2:91:D4:4C:11:71:B3:61:CB:3D:A1:FE:DD:A8:6A:D4:E3
+# DirName:/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority
+# serial:00
+#
+# X509v3 Basic Constraints:
+# CA:TRUE
+# Signature Algorithm: sha1WithRSAEncryption
+# 32:4b:f3:b2:ca:3e:91:fc:12:c6:a1:07:8c:8e:77:a0:33:06:
+# 14:5c:90:1e:18:f7:08:a6:3d:0a:19:f9:87:80:11:6e:69:e4:
+# 96:17:30:ff:34:91:63:72:38:ee:cc:1c:01:a3:1d:94:28:a4:
+# 31:f6:7a:c4:54:d7:f6:e5:31:58:03:a2:cc:ce:62:db:94:45:
+# 73:b5:bf:45:c9:24:b5:d5:82:02:ad:23:79:69:8d:b8:b6:4d:
+# ce:cf:4c:ca:33:23:e8:1c:88:aa:9d:8b:41:6e:16:c9:20:e5:
+# 89:9e:cd:3b:da:70:f7:7e:99:26:20:14:54:25:ab:6e:73:85:
+# e6:9b:21:9d:0a:6c:82:0e:a8:f8:c2:0c:fa:10:1e:6c:96:ef:
+# 87:0d:c4:0f:61:8b:ad:ee:83:2b:95:f8:8e:92:84:72:39:eb:
+# 20:ea:83:ed:83:cd:97:6e:08:bc:eb:4e:26:b6:73:2b:e4:d3:
+# f6:4c:fe:26:71:e2:61:11:74:4a:ff:57:1a:87:0f:75:48:2e:
+# cf:51:69:17:a0:02:12:61:95:d5:d1:40:b2:10:4c:ee:c4:ac:
+# 10:43:a6:a5:9e:0a:d5:95:62:9a:0d:cf:88:82:c5:32:0c:e4:
+# 2b:9f:45:e6:0d:9f:28:9c:b1:b9:2a:5a:57:ad:37:0f:af:1d:
+# 7f:db:bd:9f
-----BEGIN CERTIFICATE-----
MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
@@ -185,8 +937,158 @@ HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
ReYNnyicsbkqWletNw+vHX/bvZ8=
-----END CERTIFICATE-----
-# Subject: C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com, Inc., OU=http://certificates.godaddy.com/repository, CN=Go Daddy Secure Certification Authority/serialNumber=07969287
-# Issuer: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
+# Go Daddy Root Certificate Authority - G2.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 0 (0x0)
+# Signature Algorithm: sha256WithRSAEncryption
+# Issuer: C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com, Inc., CN=Go Daddy Root Certificate Authority - G2
+# Validity
+# Not Before: Sep 1 00:00:00 2009 GMT
+# Not After : Dec 31 23:59:59 2037 GMT
+# Subject: C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com, Inc., CN=Go Daddy Root Certificate Authority - G2
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:bf:71:62:08:f1:fa:59:34:f7:1b:c9:18:a3:f7:
+# 80:49:58:e9:22:83:13:a6:c5:20:43:01:3b:84:f1:
+# e6:85:49:9f:27:ea:f6:84:1b:4e:a0:b4:db:70:98:
+# c7:32:01:b1:05:3e:07:4e:ee:f4:fa:4f:2f:59:30:
+# 22:e7:ab:19:56:6b:e2:80:07:fc:f3:16:75:80:39:
+# 51:7b:e5:f9:35:b6:74:4e:a9:8d:82:13:e4:b6:3f:
+# a9:03:83:fa:a2:be:8a:15:6a:7f:de:0b:c3:b6:19:
+# 14:05:ca:ea:c3:a8:04:94:3b:46:7c:32:0d:f3:00:
+# 66:22:c8:8d:69:6d:36:8c:11:18:b7:d3:b2:1c:60:
+# b4:38:fa:02:8c:ce:d3:dd:46:07:de:0a:3e:eb:5d:
+# 7c:c8:7c:fb:b0:2b:53:a4:92:62:69:51:25:05:61:
+# 1a:44:81:8c:2c:a9:43:96:23:df:ac:3a:81:9a:0e:
+# 29:c5:1c:a9:e9:5d:1e:b6:9e:9e:30:0a:39:ce:f1:
+# 88:80:fb:4b:5d:cc:32:ec:85:62:43:25:34:02:56:
+# 27:01:91:b4:3b:70:2a:3f:6e:b1:e8:9c:88:01:7d:
+# 9f:d4:f9:db:53:6d:60:9d:bf:2c:e7:58:ab:b8:5f:
+# 46:fc:ce:c4:1b:03:3c:09:eb:49:31:5c:69:46:b3:
+# e0:47
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# 3A:9A:85:07:10:67:28:B6:EF:F6:BD:05:41:6E:20:C1:94:DA:0F:DE
+# Signature Algorithm: sha256WithRSAEncryption
+# 99:db:5d:79:d5:f9:97:59:67:03:61:f1:7e:3b:06:31:75:2d:
+# a1:20:8e:4f:65:87:b4:f7:a6:9c:bc:d8:e9:2f:d0:db:5a:ee:
+# cf:74:8c:73:b4:38:42:da:05:7b:f8:02:75:b8:fd:a5:b1:d7:
+# ae:f6:d7:de:13:cb:53:10:7e:8a:46:d1:97:fa:b7:2e:2b:11:
+# ab:90:b0:27:80:f9:e8:9f:5a:e9:37:9f:ab:e4:df:6c:b3:85:
+# 17:9d:3d:d9:24:4f:79:91:35:d6:5f:04:eb:80:83:ab:9a:02:
+# 2d:b5:10:f4:d8:90:c7:04:73:40:ed:72:25:a0:a9:9f:ec:9e:
+# ab:68:12:99:57:c6:8f:12:3a:09:a4:bd:44:fd:06:15:37:c1:
+# 9b:e4:32:a3:ed:38:e8:d8:64:f3:2c:7e:14:fc:02:ea:9f:cd:
+# ff:07:68:17:db:22:90:38:2d:7a:8d:d1:54:f1:69:e3:5f:33:
+# ca:7a:3d:7b:0a:e3:ca:7f:5f:39:e5:e2:75:ba:c5:76:18:33:
+# ce:2c:f0:2f:4c:ad:f7:b1:e7:ce:4f:a8:c4:9b:4a:54:06:c5:
+# 7f:7d:d5:08:0f:e2:1c:fe:7e:17:b8:ac:5e:f6:d4:16:b2:43:
+# 09:0c:4d:f6:a7:6b:b4:99:84:65:ca:7a:88:e2:e2:44:be:5c:
+# f7:ea:1c:f5
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+# Go Daddy Secure Certification Authority serialNumber=07969287.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 769 (0x301)
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
+# Validity
+# Not Before: Nov 16 01:54:37 2006 GMT
+# Not After : Nov 16 01:54:37 2026 GMT
+# Subject: C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com, Inc., OU=http://certificates.godaddy.com/repository, CN=Go Daddy Secure Certification Authority/serialNumber=07969287
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:c4:2d:d5:15:8c:9c:26:4c:ec:32:35:eb:5f:b8:
+# 59:01:5a:a6:61:81:59:3b:70:63:ab:e3:dc:3d:c7:
+# 2a:b8:c9:33:d3:79:e4:3a:ed:3c:30:23:84:8e:b3:
+# 30:14:b6:b2:87:c3:3d:95:54:04:9e:df:99:dd:0b:
+# 25:1e:21:de:65:29:7e:35:a8:a9:54:eb:f6:f7:32:
+# 39:d4:26:55:95:ad:ef:fb:fe:58:86:d7:9e:f4:00:
+# 8d:8c:2a:0c:bd:42:04:ce:a7:3f:04:f6:ee:80:f2:
+# aa:ef:52:a1:69:66:da:be:1a:ad:5d:da:2c:66:ea:
+# 1a:6b:bb:e5:1a:51:4a:00:2f:48:c7:98:75:d8:b9:
+# 29:c8:ee:f8:66:6d:0a:9c:b3:f3:fc:78:7c:a2:f8:
+# a3:f2:b5:c3:f3:b9:7a:91:c1:a7:e6:25:2e:9c:a8:
+# ed:12:65:6e:6a:f6:12:44:53:70:30:95:c3:9c:2b:
+# 58:2b:3d:08:74:4a:f2:be:51:b0:bf:87:d0:4c:27:
+# 58:6b:b5:35:c5:9d:af:17:31:f8:0b:8f:ee:ad:81:
+# 36:05:89:08:98:cf:3a:af:25:87:c0:49:ea:a7:fd:
+# 67:f7:45:8e:97:cc:14:39:e2:36:85:b5:7e:1a:37:
+# fd:16:f6:71:11:9a:74:30:16:fe:13:94:a3:3f:84:
+# 0d:4f
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Subject Key Identifier:
+# FD:AC:61:32:93:6C:45:D6:E2:EE:85:5F:9A:BA:E7:76:99:68:CC:E7
+# X509v3 Authority Key Identifier:
+# keyid:D2:C4:B0:D2:91:D4:4C:11:71:B3:61:CB:3D:A1:FE:DD:A8:6A:D4:E3
+#
+# X509v3 Basic Constraints: critical
+# CA:TRUE, pathlen:0
+# Authority Information Access:
+# OCSP - URI:http://ocsp.godaddy.com
+#
+# X509v3 CRL Distribution Points:
+#
+# Full Name:
+# URI:http://certificates.godaddy.com/repository/gdroot.crl
+#
+# X509v3 Certificate Policies:
+# Policy: X509v3 Any Policy
+# CPS: http://certificates.godaddy.com/repository
+#
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# Signature Algorithm: sha1WithRSAEncryption
+# d2:86:c0:ec:bd:f9:a1:b6:67:ee:66:0b:a2:06:3a:04:50:8e:
+# 15:72:ac:4a:74:95:53:cb:37:cb:44:49:ef:07:90:6b:33:d9:
+# 96:f0:94:56:a5:13:30:05:3c:85:32:21:7b:c9:c7:0a:a8:24:
+# a4:90:de:46:d3:25:23:14:03:67:c2:10:d6:6f:0f:5d:7b:7a:
+# cc:9f:c5:58:2a:c1:c4:9e:21:a8:5a:f3:ac:a4:46:f3:9e:e4:
+# 63:cb:2f:90:a4:29:29:01:d9:72:2c:29:df:37:01:27:bc:4f:
+# ee:68:d3:21:8f:c0:b3:e4:f5:09:ed:d2:10:aa:53:b4:be:f0:
+# cc:59:0b:d6:3b:96:1c:95:24:49:df:ce:ec:fd:a7:48:91:14:
+# 45:0e:3a:36:6f:da:45:b3:45:a2:41:c9:d4:d7:44:4e:3e:b9:
+# 74:76:d5:a2:13:55:2c:c6:87:a3:b5:99:ac:06:84:87:7f:75:
+# 06:fc:bf:14:4c:0e:cc:6e:c4:df:3d:b7:12:71:f4:e8:f1:51:
+# 40:22:28:49:e0:1d:4b:87:a8:34:cc:06:a2:dd:12:5a:d1:86:
+# 36:64:03:35:6f:6f:77:6e:eb:f2:85:50:98:5e:ab:03:53:ad:
+# 91:23:63:1f:16:9c:cd:b9:b2:05:63:3a:e1:f4:68:1b:17:05:
+# 35:95:53:ee
-----BEGIN CERTIFICATE-----
MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx
ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g
@@ -216,126 +1118,279 @@ RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH
qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV
U+4=
-----END CERTIFICATE-----
-# Subject: C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com, Inc., CN=Go Daddy Root Certificate Authority - G2
-# Issuer: C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com, Inc., CN=Go Daddy Root Certificate Authority - G2
+# Thawte Premium Server CA.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number: 1 (0x1)
+# Signature Algorithm: md5WithRSAEncryption
+# Issuer: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
+# Validity
+# Not Before: Aug 1 00:00:00 1996 GMT
+# Not After : Dec 31 23:59:59 2020 GMT
+# Subject: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (1024 bit)
+# Modulus:
+# 00:d2:36:36:6a:8b:d7:c2:5b:9e:da:81:41:62:8f:
+# 38:ee:49:04:55:d6:d0:ef:1c:1b:95:16:47:ef:18:
+# 48:35:3a:52:f4:2b:6a:06:8f:3b:2f:ea:56:e3:af:
+# 86:8d:9e:17:f7:9e:b4:65:75:02:4d:ef:cb:09:a2:
+# 21:51:d8:9b:d0:67:d0:ba:0d:92:06:14:73:d4:93:
+# cb:97:2a:00:9c:5c:4e:0c:bc:fa:15:52:fc:f2:44:
+# 6e:da:11:4a:6e:08:9f:2f:2d:e3:f9:aa:3a:86:73:
+# b6:46:53:58:c8:89:05:bd:83:11:b8:73:3f:aa:07:
+# 8d:f4:42:4d:e7:40:9d:1c:37
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# Signature Algorithm: md5WithRSAEncryption
+# 26:48:2c:16:c2:58:fa:e8:16:74:0c:aa:aa:5f:54:3f:f2:d7:
+# c9:78:60:5e:5e:6e:37:63:22:77:36:7e:b2:17:c4:34:b9:f5:
+# 08:85:fc:c9:01:38:ff:4d:be:f2:16:42:43:e7:bb:5a:46:fb:
+# c1:c6:11:1f:f1:4a:b0:28:46:c9:c3:c4:42:7d:bc:fa:ab:59:
+# 6e:d5:b7:51:88:11:e3:a4:85:19:6b:82:4c:a4:0c:12:ad:e9:
+# a4:ae:3f:f1:c3:49:65:9a:8c:c5:c8:3e:25:b7:94:99:bb:92:
+# 32:71:07:f0:86:5e:ed:50:27:a6:0d:a6:23:f9:bb:cb:a6:07:
+# 14:42
-----BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
-EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
-ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
-NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
-EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
-AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
-DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
-E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
-/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
-DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
-GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
-tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
-AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
-WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
-9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
-gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
-2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
-LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
-4uJEvlz36hz1
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
-----END CERTIFICATE-----
-# Subject: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
-# Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
+# Thawte Primary Root CA - G2.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 35:fc:26:5c:d9:84:4f:c9:3d:26:3d:57:9b:ae:d7:56
+# Signature Algorithm: ecdsa-with-SHA384
+# Issuer: C=US, O=thawte, Inc., OU=(c) 2007 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA - G2
+# Validity
+# Not Before: Nov 5 00:00:00 2007 GMT
+# Not After : Jan 18 23:59:59 2038 GMT
+# Subject: C=US, O=thawte, Inc., OU=(c) 2007 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA - G2
+# Subject Public Key Info:
+# Public Key Algorithm: id-ecPublicKey
+# Public-Key: (384 bit)
+# pub:
+# 04:a2:d5:9c:82:7b:95:9d:f1:52:78:87:fe:8a:16:
+# bf:05:e6:df:a3:02:4f:0d:07:c6:00:51:ba:0c:02:
+# 52:2d:22:a4:42:39:c4:fe:8f:ea:c9:c1:be:d4:4d:
+# ff:9f:7a:9e:e2:b1:7c:9a:ad:a7:86:09:73:87:d1:
+# e7:9a:e3:7a:a5:aa:6e:fb:ba:b3:70:c0:67:88:a2:
+# 35:d4:a3:9a:b1:fd:ad:c2:ef:31:fa:a8:b9:f3:fb:
+# 08:c6:91:d1:fb:29:95
+# ASN1 OID: secp384r1
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# 9A:D8:00:30:00:E7:6B:7F:85:18:EE:8B:B6:CE:8A:0C:F8:11:E1:BB
+# Signature Algorithm: ecdsa-with-SHA384
+# 30:66:02:31:00:dd:f8:e0:57:47:5b:a7:e6:0a:c3:bd:f5:80:
+# 8a:97:35:0d:1b:89:3c:54:86:77:28:ca:a1:f4:79:de:b5:e6:
+# 38:b0:f0:65:70:8c:7f:02:54:c2:bf:ff:d8:a1:3e:d9:cf:02:
+# 31:00:c4:8d:94:fc:dc:53:d2:dc:9d:78:16:1f:15:33:23:53:
+# 52:e3:5a:31:5d:9d:ca:ae:bd:13:29:44:0d:27:5b:a8:e7:68:
+# 9c:12:f7:58:3f:2e:72:02:57:a3:8f:a1:14:2e
-----BEGIN CERTIFICATE-----
-MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
-YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
-R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
-9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
-fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
-iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
-1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
-bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
-MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
-ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
-uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
-Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
-tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
-PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
-hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
-5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
-----END CERTIFICATE-----
-# Subject: C=US, O=GeoTrust Inc., CN=GeoTrust Primary Certification Authority
-# Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Primary Certification Authority
+# Thawte Primary Root CA - G3.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 60:01:97:b7:46:a7:ea:b4:b4:9a:d6:4b:2f:f7:90:fb
+# Signature Algorithm: sha256WithRSAEncryption
+# Issuer: C=US, O=thawte, Inc., OU=Certification Services Division, OU=(c) 2008 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA - G3
+# Validity
+# Not Before: Apr 2 00:00:00 2008 GMT
+# Not After : Dec 1 23:59:59 2037 GMT
+# Subject: C=US, O=thawte, Inc., OU=Certification Services Division, OU=(c) 2008 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA - G3
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:b2:bf:27:2c:fb:db:d8:5b:dd:78:7b:1b:9e:77:
+# 66:81:cb:3e:bc:7c:ae:f3:a6:27:9a:34:a3:68:31:
+# 71:38:33:62:e4:f3:71:66:79:b1:a9:65:a3:a5:8b:
+# d5:8f:60:2d:3f:42:cc:aa:6b:32:c0:23:cb:2c:41:
+# dd:e4:df:fc:61:9c:e2:73:b2:22:95:11:43:18:5f:
+# c4:b6:1f:57:6c:0a:05:58:22:c8:36:4c:3a:7c:a5:
+# d1:cf:86:af:88:a7:44:02:13:74:71:73:0a:42:59:
+# 02:f8:1b:14:6b:42:df:6f:5f:ba:6b:82:a2:9d:5b:
+# e7:4a:bd:1e:01:72:db:4b:74:e8:3b:7f:7f:7d:1f:
+# 04:b4:26:9b:e0:b4:5a:ac:47:3d:55:b8:d7:b0:26:
+# 52:28:01:31:40:66:d8:d9:24:bd:f6:2a:d8:ec:21:
+# 49:5c:9b:f6:7a:e9:7f:55:35:7e:96:6b:8d:93:93:
+# 27:cb:92:bb:ea:ac:40:c0:9f:c2:f8:80:cf:5d:f4:
+# 5a:dc:ce:74:86:a6:3e:6c:0b:53:ca:bd:92:ce:19:
+# 06:72:e6:0c:5c:38:69:c7:04:d6:bc:6c:ce:5b:f6:
+# f7:68:9c:dc:25:15:48:88:a1:e9:a9:f8:98:9c:e0:
+# f3:d5:31:28:61:11:6c:67:96:8d:39:99:cb:c2:45:
+# 24:39
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# AD:6C:AA:94:60:9C:ED:E4:FF:FA:3E:0A:74:2B:63:03:F7:B6:59:BF
+# Signature Algorithm: sha256WithRSAEncryption
+# 1a:40:d8:95:65:ac:09:92:89:c6:39:f4:10:e5:a9:0e:66:53:
+# 5d:78:de:fa:24:91:bb:e7:44:51:df:c6:16:34:0a:ef:6a:44:
+# 51:ea:2b:07:8a:03:7a:c3:eb:3f:0a:2c:52:16:a0:2b:43:b9:
+# 25:90:3f:70:a9:33:25:6d:45:1a:28:3b:27:cf:aa:c3:29:42:
+# 1b:df:3b:4c:c0:33:34:5b:41:88:bf:6b:2b:65:af:28:ef:b2:
+# f5:c3:aa:66:ce:7b:56:ee:b7:c8:cb:67:c1:c9:9c:1a:18:b8:
+# c4:c3:49:03:f1:60:0e:50:cd:46:c5:f3:77:79:f7:b6:15:e0:
+# 38:db:c7:2f:28:a0:0c:3f:77:26:74:d9:25:12:da:31:da:1a:
+# 1e:dc:29:41:91:22:3c:69:a7:bb:02:f2:b6:5c:27:03:89:f4:
+# 06:ea:9b:e4:72:82:e3:a1:09:c1:e9:00:19:d3:3e:d4:70:6b:
+# ba:71:a6:aa:58:ae:f4:bb:e9:6c:b6:ef:87:cc:9b:bb:ff:39:
+# e6:56:61:d3:0a:a7:c4:5c:4c:60:7b:05:77:26:7a:bf:d8:07:
+# 52:2c:62:f7:70:63:d9:39:bc:6f:1c:c2:79:dc:76:29:af:ce:
+# c5:2c:64:04:5e:88:36:6e:31:d4:40:1a:62:34:36:3f:35:01:
+# ae:ac:63:a0
-----BEGIN CERTIFICATE-----
-MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
-MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
-R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
-MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
-Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
-AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
-ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
-7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
-kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
-mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
-KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
-6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
-4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
-oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
-UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
-AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
-----END CERTIFICATE-----
-# Subject: C=US, O=The Go Daddy Group, Inc., OU=Go Daddy Class 2 Certification Authority
-# Issuer: L=ValiCert Validation Network, O=ValiCert, Inc., OU=ValiCert Class 2 Policy Validation Authority, CN=http://www.valicert.com//emailAddress=info@valicert.com
+# Thawte Primary Root CA.pem
+# Certificate:
+# Data:
+# Version: 3 (0x2)
+# Serial Number:
+# 34:4e:d5:57:20:d5:ed:ec:49:f4:2f:ce:37:db:2b:6d
+# Signature Algorithm: sha1WithRSAEncryption
+# Issuer: C=US, O=thawte, Inc., OU=Certification Services Division, OU=(c) 2006 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA
+# Validity
+# Not Before: Nov 17 00:00:00 2006 GMT
+# Not After : Jul 16 23:59:59 2036 GMT
+# Subject: C=US, O=thawte, Inc., OU=Certification Services Division, OU=(c) 2006 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA
+# Subject Public Key Info:
+# Public Key Algorithm: rsaEncryption
+# Public-Key: (2048 bit)
+# Modulus:
+# 00:ac:a0:f0:fb:80:59:d4:9c:c7:a4:cf:9d:a1:59:
+# 73:09:10:45:0c:0d:2c:6e:68:f1:6c:5b:48:68:49:
+# 59:37:fc:0b:33:19:c2:77:7f:cc:10:2d:95:34:1c:
+# e6:eb:4d:09:a7:1c:d2:b8:c9:97:36:02:b7:89:d4:
+# 24:5f:06:c0:cc:44:94:94:8d:02:62:6f:eb:5a:dd:
+# 11:8d:28:9a:5c:84:90:10:7a:0d:bd:74:66:2f:6a:
+# 38:a0:e2:d5:54:44:eb:1d:07:9f:07:ba:6f:ee:e9:
+# fd:4e:0b:29:f5:3e:84:a0:01:f1:9c:ab:f8:1c:7e:
+# 89:a4:e8:a1:d8:71:65:0d:a3:51:7b:ee:bc:d2:22:
+# 60:0d:b9:5b:9d:df:ba:fc:51:5b:0b:af:98:b2:e9:
+# 2e:e9:04:e8:62:87:de:2b:c8:d7:4e:c1:4c:64:1e:
+# dd:cf:87:58:ba:4a:4f:ca:68:07:1d:1c:9d:4a:c6:
+# d5:2f:91:cc:7c:71:72:1c:c5:c0:67:eb:32:fd:c9:
+# 92:5c:94:da:85:c0:9b:bf:53:7d:2b:09:f4:8c:9d:
+# 91:1f:97:6a:52:cb:de:09:36:a4:77:d8:7b:87:50:
+# 44:d5:3e:6e:29:69:fb:39:49:26:1e:09:a5:80:7b:
+# 40:2d:eb:e8:27:85:c9:fe:61:fd:7e:e6:7c:97:1d:
+# d5:9d
+# Exponent: 65537 (0x10001)
+# X509v3 extensions:
+# X509v3 Basic Constraints: critical
+# CA:TRUE
+# X509v3 Key Usage: critical
+# Certificate Sign, CRL Sign
+# X509v3 Subject Key Identifier:
+# 7B:5B:45:CF:AF:CE:CB:7A:FD:31:92:1A:6A:B6:F3:46:EB:57:48:50
+# Signature Algorithm: sha1WithRSAEncryption
+# 79:11:c0:4b:b3:91:b6:fc:f0:e9:67:d4:0d:6e:45:be:55:e8:
+# 93:d2:ce:03:3f:ed:da:25:b0:1d:57:cb:1e:3a:76:a0:4c:ec:
+# 50:76:e8:64:72:0c:a4:a9:f1:b8:8b:d6:d6:87:84:bb:32:e5:
+# 41:11:c0:77:d9:b3:60:9d:eb:1b:d5:d1:6e:44:44:a9:a6:01:
+# ec:55:62:1d:77:b8:5c:8e:48:49:7c:9c:3b:57:11:ac:ad:73:
+# 37:8e:2f:78:5c:90:68:47:d9:60:60:e6:fc:07:3d:22:20:17:
+# c4:f7:16:e9:c4:d8:72:f9:c8:73:7c:df:16:2f:15:a9:3e:fd:
+# 6a:27:b6:a1:eb:5a:ba:98:1f:d5:e3:4d:64:0a:9d:13:c8:61:
+# ba:f5:39:1c:87:ba:b8:bd:7b:22:7f:f6:fe:ac:40:79:e5:ac:
+# 10:6f:3d:8f:1b:79:76:8b:c4:37:b3:21:18:84:e5:36:00:eb:
+# 63:20:99:b9:e9:fe:33:04:bb:41:c8:c1:02:f9:44:63:20:9e:
+# 81:ce:42:d3:d6:3f:2c:76:d3:63:9c:59:dd:8f:a6:e1:0e:a0:
+# 2e:41:f7:2e:95:47:cf:bc:fd:33:f3:f6:0b:61:7e:7e:91:2b:
+# 81:47:c2:27:30:ee:a7:10:5d:37:8f:5c:39:2b:e4:04:f0:7b:
+# 8d:56:8c:68
-----BEGIN CERTIFICATE-----
-MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh
-bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu
-Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g
-QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe
-BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX
-DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE
-YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC
-ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv
-2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q
-N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO
-r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN
-f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH
-U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU
-TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb
-VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg
-SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv
-biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg
-MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw
-AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv
-ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu
-Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd
-IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv
-bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1
-QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O
-WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf
-SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw==
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
-----END CERTIFICATE-----
-# Subject: L=ValiCert Validation Network, O=ValiCert, Inc., OU=ValiCert Class 2 Policy Validation Authority, CN=http://www.valicert.com//emailAddress=info@valicert.com
-# Issuer: L=ValiCert Validation Network, O=ValiCert, Inc., OU=ValiCert Class 2 Policy Validation Authority, CN=http://www.valicert.com//emailAddress=info@valicert.com
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-
diff --git a/resources/lib/dropbox/util.py b/resources/lib/dropbox/util.py
deleted file mode 100644
index e3babf8..0000000
--- a/resources/lib/dropbox/util.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import os
-
-class AnalyzeFileObjBug(Exception):
- msg = ("\n"
- "Expected file object to have %d bytes, instead we read %d bytes.\n"
- "File size detection may have failed (see dropbox.util.AnalyzeFileObj)\n")
- def __init__(self, expected, actual):
- self.expected = expected
- self.actual = actual
-
- def __str__(self):
- return self.msg % (self.expected, self.actual)
-
-def analyze_file_obj(obj):
- """ Get the size and contents of a file-like object.
- Returns: (size, raw_data)
- size: The amount of data waiting to be read
- raw_data: If not None, the entire contents of the stream (as a string).
- None if the stream should be read() in chunks.
- """
- pos = 0
- if hasattr(obj, 'tell'):
- pos = obj.tell()
-
- # Handle cStringIO and StringIO
- if hasattr(obj, 'getvalue'):
- # Why using getvalue() makes sense:
- # For StringIO, this string is pre-computed anyway by read().
- # For cStringIO, getvalue() is the only way
- # to determine the length without read()'ing the whole thing.
- raw_data = obj.getvalue()
- if pos == 0:
- return (len(raw_data), raw_data)
- else:
- # We could return raw_data[pos:], but that could drastically
- # increase memory usage. Better to read it block at a time.
- size = max(0, len(raw_data) - pos)
- return (size, None)
-
- # Handle real files
- if hasattr(obj, 'fileno'):
- size = max(0, os.fstat(obj.fileno()).st_size - pos)
- return (size, None)
-
- # User-defined object with len()
- if hasattr(obj, '__len__'):
- size = max(0, len(obj) - pos)
- return (size, None)
-
- # We don't know what kind of stream this is.
- # To determine the size, we must read the whole thing.
- raw_data = obj.read()
- return (len(raw_data), raw_data)
diff --git a/resources/lib/vfs.py b/resources/lib/vfs.py
index 36ab2ba..f5511cf 100644
--- a/resources/lib/vfs.py
+++ b/resources/lib/vfs.py
@@ -135,7 +135,7 @@ class DropboxFileSystem(Vfs):
user_token_key,user_token_secret = self.getToken()
sess = session.DropboxSession(self.APP_KEY,self.APP_SECRET,"app_folder")
-
+ utils.log("token:" + user_token_key + ":" + user_token_secret)
if(user_token_key == '' and user_token_secret == ''):
token = sess.obtain_request_token()
url = sess.build_authorize_url(token)