diff --git a/DBHelper.py b/DBHelper.py
index f781e0473..dcaed833d 100644
--- a/DBHelper.py
+++ b/DBHelper.py
@@ -1,4 +1,5 @@
import pyrebase
+import firebase_admin
firebaseConfig = {
'apiKey': "AIzaSyAdL0W5HscjEDFPK4BDi6Cnc7FLa30GPYY",
@@ -8,7 +9,8 @@ firebaseConfig = {
'storageBucket': "vehicleantitheftrecognition.appspot.com",
'messagingSenderId': "163692530359",
'appId': "1:163692530359:web:b6dc7ccfc56a79afb11b32",
- 'measurementId': "G-EPWP2LK89Q"
+ 'measurementId': "G-EPWP2LK89Q",
+ 'serviceAccount': 'vehicleantitheftrecognition-firebase-adminsdk-krrgw-05da515de5.json'
}
firebase = pyrebase.initialize_app(firebaseConfig)
@@ -19,7 +21,7 @@ storage = firebase.storage()
class DBHelper:
# Create account function which creates a new authentication info.
- def createaccount(username, password, confirmpassword):
+ def createAccount(username, password, confirmpassword):
email = username + "@hotmail.com"
if password == confirmpassword:
auth.create_user_with_email_and_password(email,password)
@@ -46,7 +48,7 @@ class DBHelper:
db.child("Users").child(userID).remove()
# Returns the first name or else an empty string.
- def getfirstname(userID):
+ def getFirstName(userID):
firstname = ""
users = db.child("Users").get()
for user in users.each():
@@ -55,7 +57,7 @@ class DBHelper:
return firstname
# Returns the last name or else an empty string.
- def getlastname(userID):
+ def getLastName(userID):
lastname = ""
users = db.child("Users").get()
for user in users.each():
@@ -64,7 +66,7 @@ class DBHelper:
return lastname
# Returns the e-mail or else an empty string.
- def getemail(userID):
+ def getEmail(userID):
email = ""
users = db.child("Users").get()
for user in users.each():
@@ -73,7 +75,7 @@ class DBHelper:
return email
# Returns the phone or else an empty string.
- def getphone(userID):
+ def getPhone(userID):
phone = ""
users = db.child("Users").get()
for user in users.each():
@@ -82,7 +84,7 @@ class DBHelper:
return phone
# Returns the address or else an empty string.
- def getaddress(userID):
+ def getAddress(userID):
address = ""
users = db.child("Users").get()
for user in users.each():
@@ -91,19 +93,24 @@ class DBHelper:
return address
# Uploads the photo of user, input should be something like "example.png"
- def uploaduserphoto(userphoto):
+ def uploadUserPhoto(userphoto):
userphoto_str = str(userphoto)
storage.child("Photos_of_Users/" + str(userphoto)).put("Photos_of_Users/" + str(userphoto))
# Uploads the photo of thief, input should be something like "example.png"
- def uploadthiefphoto(userphoto):
+ def uploadThiefPhoto(userphoto):
userphoto_str = str(userphoto)
storage.child("Photos_of_Thieves/" + str(userphoto)).put("Photos_of_Thieves/" + str(userphoto))
# Downloads all the user photos.
- def downloadalluserphotos(self):
+ def downloadAllUserphotos(self):
storage.child("Photos_of_Users").download("Storage_from_Database")
# Downloads all the thief photos.
- def downloadallthiefphotos(self):
+ def downloadAllThiefphotos(self):
storage.child("Photos_of_Thieves").download("Storage_from_Thieves")
+
+ # Deletes photo of the specified user.
+ def deleteUserPhoto(userphoto):
+ storage.delete('Photos_of_Users/' + userphoto)
+
diff --git a/vehicleantitheftrecognition-firebase-adminsdk-krrgw-05da515de5.json b/vehicleantitheftrecognition-firebase-adminsdk-krrgw-05da515de5.json
new file mode 100644
index 000000000..4319726a6
--- /dev/null
+++ b/vehicleantitheftrecognition-firebase-adminsdk-krrgw-05da515de5.json
@@ -0,0 +1,12 @@
+{
+ "type": "service_account",
+ "project_id": "vehicleantitheftrecognition",
+ "private_key_id": "05da515de56c8ac1f4882cca95e59b528327689d",
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDEuXY99rdd3tUr\n478TMukS5x7nWXDHTFHZcxFXIyjkgjcm7a4JieJCuf7VtteAoJQ+8q4qLxypwkOA\nmVpGFXtP9xQCeoiL/lsZGUQ2GAZVmOOumlRMbDJ6bJANe4V7QnmdfLInjTks/ahr\nw6xBNBmbOXxk7adF3Qj3rp/1iUG54AHOkBvQkz53zCRVi2yX7PiwvARDErgj3LNn\nEoc/95P7HXgzuwGfwfziGqN1EtxcKzDsVi5Sg5L2cyjm5V17sEkFRKA2zXqjdOrM\nOQINL4UW4mcU7zyj8Iyu9IFTzWgEVS64+O2+mA9T2Rf325LNa46yqdtoQA8A40oZ\nYrV089bLAgMBAAECggEAAZ94vEDMI81yJhng5QBR2pgRhzMTsnE3aPssCh3+I7R3\nTq499fzZ4wG0eoA2/Zaq4my6HHBQZON3ZG40x8iRld1MoEtGfnErazpBd84cRJnY\nTFkgQj56RCGf0UdS7etkeXTNO3T3eHQ3RCb/Gs1FO3numXPkd2JqxTBUSCrztzGA\nMmk5ikE3giL+ZVB3TaKwkuDLNzxaQSKR+tHGrBPndb6exVfYEROk8HBeTmE+UfBY\nnKXM1uAk7dGXkjq3dFekv0CmEBydebx862/pWgo/a4o7eGDwmPPlxgu3mMJ7fVjB\nji70iusIDc7QCPWYCVCFW8BkZTNy/lGIGNMAEK5w0QKBgQDsl5FPJCOdERQCylcF\nxh6ECw6LqxioFRovXgtsfn/eXoLVCh3Vulqu6zL78qn0f7JQNdUMt9x++wIpNMWb\nV0GHJZVUnqzrvpTZdHWYVDj4tD9WC8vOjbCf5pDfrN4bTh0q4SJasm8tFcHUNcpW\nechUCQMdtxVoi2jgwHBJWlwXnQKBgQDU3KzASyuPZip2oELx+jbXBZ840P/8aekh\nNQUit2jG0twM7Qgk1M929fhY3xyAFKjptk8vLFZcmfi0p0FeNUYnF4bmORuz+bng\nxEW4+dzrf9eeekQcspUf8kwgUkzuuePwJJ4LDuL6d3I/wzClU7B74BAaDqzs9gTK\nFiOPSvb/hwKBgQDqcMKXppr3sA2hOjmDSi/A7mfqdI+JNufsmgc96hfxFLwWOOEZ\ngYEMpZmu2WYaFlNucfl8kdCXr0kT5ewOIyeWsOJJqLZ3IDHFTUadvI97urisHiJF\nuleUC1fxnQ22BvCWJeLx9rB9/3pDO04V5LViuE9zKZG4N7SkSWy68yQgbQKBgQCo\n0PsQ5oz9hYFX43I0hsTHc2X7oYXjofuKoooYJm2qgcCTX8l9rGl9Z0Y29Xuc+MWd\n1UCnoPo9Jr/gRmXJWWbxye7q14/pBL0uTXseYMuc2h8fSMiMGfW7CGbnm134VuU3\np1LQYlYRXnn1p9AEzoLBO8qJX+o1ZEgYHcbF9iY+MQKBgFWBolVPSddIGB/btoOA\nVncnhDSziXdbyEprCO0omsRLG+ciJwWXv4Rd+ftGD7fvFLFFBH/JR9L2lU9eKcF3\nBZA5KMb66zmQ2nVzRiJEUZmE6vxp8E7nXlfxTy76euZSkFDKvhhbYU1XLcPj2ES3\nMMy1jE3LrpNrAT6iHvsz/pt0\n-----END PRIVATE KEY-----\n",
+ "client_email": "firebase-adminsdk-krrgw@vehicleantitheftrecognition.iam.gserviceaccount.com",
+ "client_id": "105455250176735842254",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-krrgw%40vehicleantitheftrecognition.iam.gserviceaccount.com"
+}
diff --git a/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/DESCRIPTION.rst b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/DESCRIPTION.rst
new file mode 100644
index 000000000..6e4ca7647
--- /dev/null
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/DESCRIPTION.rst
@@ -0,0 +1,44 @@
+==============
+ CacheControl
+==============
+
+.. image:: https://img.shields.io/pypi/v/cachecontrol.svg
+ :target: https://pypi.python.org/pypi/cachecontrol
+ :alt: Latest Version
+
+.. image:: https://travis-ci.org/ionrock/cachecontrol.png?branch=master
+ :target: https://travis-ci.org/ionrock/cachecontrol
+
+CacheControl is a port of the caching algorithms in httplib2_ for use with
+requests_ session object.
+
+It was written because httplib2's better support for caching is often
+mitigated by its lack of thread safety. The same is true of requests in
+terms of caching.
+
+
+Quickstart
+==========
+
+.. code-block:: python
+
+ import requests
+
+ from cachecontrol import CacheControl
+
+
+ sess = requests.session()
+ cached_sess = CacheControl(sess)
+
+ response = cached_sess.get('http://google.com')
+
+If the URL contains any caching based headers, it will cache the
+result in a simple dictionary.
+
+For more info, check out the docs_
+
+.. _docs: http://cachecontrol.readthedocs.org/en/latest/
+.. _httplib2: https://github.com/jcgregorio/httplib2
+.. _requests: http://docs.python-requests.org/
+
+
diff --git a/venv/Lib/site-packages/requests-2.11.1.dist-info/INSTALLER b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/INSTALLER
similarity index 100%
rename from venv/Lib/site-packages/requests-2.11.1.dist-info/INSTALLER
rename to venv/Lib/site-packages/CacheControl-0.12.6.dist-info/INSTALLER
diff --git a/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/METADATA b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/METADATA
new file mode 100644
index 000000000..2f614f940
--- /dev/null
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/METADATA
@@ -0,0 +1,74 @@
+Metadata-Version: 2.0
+Name: CacheControl
+Version: 0.12.6
+Summary: httplib2 caching for requests
+Home-page: https://github.com/ionrock/cachecontrol
+Author: Eric Larson
+Author-email: eric@ionrock.org
+License: UNKNOWN
+Description-Content-Type: UNKNOWN
+Keywords: requests http caching web
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Web Environment
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+Requires-Dist: requests
+Requires-Dist: msgpack (>=0.5.2)
+Provides-Extra: filecache
+Requires-Dist: lockfile (>=0.9); extra == 'filecache'
+Provides-Extra: redis
+Requires-Dist: redis (>=2.10.5); extra == 'redis'
+
+==============
+ CacheControl
+==============
+
+.. image:: https://img.shields.io/pypi/v/cachecontrol.svg
+ :target: https://pypi.python.org/pypi/cachecontrol
+ :alt: Latest Version
+
+.. image:: https://travis-ci.org/ionrock/cachecontrol.png?branch=master
+ :target: https://travis-ci.org/ionrock/cachecontrol
+
+CacheControl is a port of the caching algorithms in httplib2_ for use with
+requests_ session object.
+
+It was written because httplib2's better support for caching is often
+mitigated by its lack of thread safety. The same is true of requests in
+terms of caching.
+
+
+Quickstart
+==========
+
+.. code-block:: python
+
+ import requests
+
+ from cachecontrol import CacheControl
+
+
+ sess = requests.session()
+ cached_sess = CacheControl(sess)
+
+ response = cached_sess.get('http://google.com')
+
+If the URL contains any caching based headers, it will cache the
+result in a simple dictionary.
+
+For more info, check out the docs_
+
+.. _docs: http://cachecontrol.readthedocs.org/en/latest/
+.. _httplib2: https://github.com/jcgregorio/httplib2
+.. _requests: http://docs.python-requests.org/
+
+
diff --git a/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/RECORD b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/RECORD
new file mode 100644
index 000000000..f310b5ce3
--- /dev/null
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/RECORD
@@ -0,0 +1,35 @@
+../../Scripts/doesitcache.exe,sha256=oUrPldHAhd6L96tleSYm9TnI_wQAlAXhZcgsF_m648I,97232
+CacheControl-0.12.6.dist-info/DESCRIPTION.rst,sha256=AVvOiHd6xGEt-8qj3nBO0wevsy94ATbiolgWP-hAdOw,1090
+CacheControl-0.12.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+CacheControl-0.12.6.dist-info/METADATA,sha256=KdpEZki2tVLM6jOflJwcwdg_7YOT-HA08g1lF3BSU8A,2196
+CacheControl-0.12.6.dist-info/RECORD,,
+CacheControl-0.12.6.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
+CacheControl-0.12.6.dist-info/entry_points.txt,sha256=HjCekaRCv8kfNqP5WehMR29IWxIA5VrhoOeKrCykCLc,56
+CacheControl-0.12.6.dist-info/metadata.json,sha256=fj7gcSEiRyF5g5Nt5ShwF7GP7QAPJFTqZ0YwhYlxMZE,1380
+CacheControl-0.12.6.dist-info/top_level.txt,sha256=vGYWzpbe3h6gkakV4f7iCK2x3KyK3oMkV5pe5v25-d4,13
+cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302
+cachecontrol/__pycache__/__init__.cpython-36.pyc,,
+cachecontrol/__pycache__/_cmd.cpython-36.pyc,,
+cachecontrol/__pycache__/adapter.cpython-36.pyc,,
+cachecontrol/__pycache__/cache.cpython-36.pyc,,
+cachecontrol/__pycache__/compat.cpython-36.pyc,,
+cachecontrol/__pycache__/controller.cpython-36.pyc,,
+cachecontrol/__pycache__/filewrapper.cpython-36.pyc,,
+cachecontrol/__pycache__/heuristics.cpython-36.pyc,,
+cachecontrol/__pycache__/serialize.cpython-36.pyc,,
+cachecontrol/__pycache__/wrapper.cpython-36.pyc,,
+cachecontrol/_cmd.py,sha256=88j4P3JlJGqg6xAXR4btN9fYruXUH4CE-M93Sie5IB8,1242
+cachecontrol/adapter.py,sha256=ctnbSXDOj0V0NaxJP2jFauOYRDHaNYMP9QCE8kB4kfk,4870
+cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
+cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
+cachecontrol/caches/__pycache__/__init__.cpython-36.pyc,,
+cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc,,
+cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc,,
+cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
+cachecontrol/caches/redis_cache.py,sha256=yZP1PoUgAvxEZZrCVwImZ-5pFKU41v5HYJf1rfbXYmM,844
+cachecontrol/compat.py,sha256=Fn_aYzqNbN0bK9gUn8SQUzMLxQ_ruGnsEMvryYDFh3o,647
+cachecontrol/controller.py,sha256=fpLmIvxce2mKVFmtDFiiyydqU_pPbCucYLC9qP-LqvY,14137
+cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
+cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
+cachecontrol/serialize.py,sha256=Jms7OS4GB2JFUzuMPlmQtuCDzcjjE-2ijrHpUXC2BV0,7062
+cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690
diff --git a/venv/Lib/site-packages/requests-2.11.1.dist-info/WHEEL b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/WHEEL
similarity index 70%
rename from venv/Lib/site-packages/requests-2.11.1.dist-info/WHEEL
rename to venv/Lib/site-packages/CacheControl-0.12.6.dist-info/WHEEL
index 0de529b1e..7332a419c 100644
--- a/venv/Lib/site-packages/requests-2.11.1.dist-info/WHEEL
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/WHEEL
@@ -1,5 +1,5 @@
Wheel-Version: 1.0
-Generator: bdist_wheel (0.26.0)
+Generator: bdist_wheel (0.30.0)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any
diff --git a/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/entry_points.txt b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/entry_points.txt
new file mode 100644
index 000000000..7c31574e9
--- /dev/null
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+doesitcache = cachecontrol._cmd:main
+
diff --git a/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/metadata.json b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/metadata.json
new file mode 100644
index 000000000..59890528a
--- /dev/null
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/metadata.json
@@ -0,0 +1 @@
+{"classifiers": ["Development Status :: 4 - Beta", "Environment :: Web Environment", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet :: WWW/HTTP"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"doesitcache": "cachecontrol._cmd:main"}}, "python.details": {"contacts": [{"email": "eric@ionrock.org", "name": "Eric Larson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/ionrock/cachecontrol"}}, "python.exports": {"console_scripts": {"doesitcache": "cachecontrol._cmd:main"}}}, "extras": ["filecache", "redis"], "generator": "bdist_wheel (0.30.0)", "keywords": ["requests", "http", "caching", "web"], "metadata_version": "2.0", "name": "CacheControl", "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", "run_requires": [{"extra": "filecache", "requires": ["lockfile (>=0.9)"]}, {"requires": ["msgpack (>=0.5.2)", "requests"]}, {"extra": "redis", "requires": ["redis (>=2.10.5)"]}], "summary": "httplib2 caching for requests", "version": "0.12.6"}
\ No newline at end of file
diff --git a/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/top_level.txt b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/top_level.txt
new file mode 100644
index 000000000..af37ac627
--- /dev/null
+++ b/venv/Lib/site-packages/CacheControl-0.12.6.dist-info/top_level.txt
@@ -0,0 +1 @@
+cachecontrol
diff --git a/venv/Lib/site-packages/__pycache__/google_auth_httplib2.cpython-36.pyc b/venv/Lib/site-packages/__pycache__/google_auth_httplib2.cpython-36.pyc
new file mode 100644
index 000000000..8354ae1c5
Binary files /dev/null and b/venv/Lib/site-packages/__pycache__/google_auth_httplib2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/_cffi_backend.cp36-win32.pyd b/venv/Lib/site-packages/_cffi_backend.cp36-win32.pyd
new file mode 100644
index 000000000..ed8f4e5b8
Binary files /dev/null and b/venv/Lib/site-packages/_cffi_backend.cp36-win32.pyd differ
diff --git a/venv/Lib/site-packages/apiclient/__init__.py b/venv/Lib/site-packages/apiclient/__init__.py
new file mode 100644
index 000000000..8d9c4ecb8
--- /dev/null
+++ b/venv/Lib/site-packages/apiclient/__init__.py
@@ -0,0 +1,36 @@
+"""Retain apiclient as an alias for googleapiclient."""
+
+from six import iteritems
+
+import googleapiclient
+
+from googleapiclient import channel
+from googleapiclient import discovery
+from googleapiclient import errors
+from googleapiclient import http
+from googleapiclient import mimeparse
+from googleapiclient import model
+
+try:
+ from googleapiclient import sample_tools
+except ImportError:
+ # Silently ignore, because the vast majority of consumers won't use it and
+ # it has deep dependence on oauth2client, an optional dependency.
+ sample_tools = None
+from googleapiclient import schema
+
+_SUBMODULES = {
+ "channel": channel,
+ "discovery": discovery,
+ "errors": errors,
+ "http": http,
+ "mimeparse": mimeparse,
+ "model": model,
+ "sample_tools": sample_tools,
+ "schema": schema,
+}
+
+import sys
+
+for module_name, module in iteritems(_SUBMODULES):
+ sys.modules["apiclient.%s" % module_name] = module
diff --git a/venv/Lib/site-packages/apiclient/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/apiclient/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..300a76a88
Binary files /dev/null and b/venv/Lib/site-packages/apiclient/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__init__.py b/venv/Lib/site-packages/cachecontrol/__init__.py
new file mode 100644
index 000000000..a1bbbbe3b
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/__init__.py
@@ -0,0 +1,11 @@
+"""CacheControl import Interface.
+
+Make it easy to import from cachecontrol without long namespaces.
+"""
+__author__ = "Eric Larson"
+__email__ = "eric@ionrock.org"
+__version__ = "0.12.6"
+
+from .wrapper import CacheControl
+from .adapter import CacheControlAdapter
+from .controller import CacheController
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..8b3838207
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/_cmd.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/_cmd.cpython-36.pyc
new file mode 100644
index 000000000..43baceee2
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/_cmd.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/adapter.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/adapter.cpython-36.pyc
new file mode 100644
index 000000000..d1c40f357
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/adapter.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/cache.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/cache.cpython-36.pyc
new file mode 100644
index 000000000..612faa1f8
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/cache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/compat.cpython-36.pyc
new file mode 100644
index 000000000..a03ef683b
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/compat.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/controller.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/controller.cpython-36.pyc
new file mode 100644
index 000000000..0a7427b75
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/controller.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/filewrapper.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/filewrapper.cpython-36.pyc
new file mode 100644
index 000000000..4fe73924a
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/filewrapper.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/heuristics.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/heuristics.cpython-36.pyc
new file mode 100644
index 000000000..c895f6249
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/heuristics.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/serialize.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/serialize.cpython-36.pyc
new file mode 100644
index 000000000..cf61bfb4f
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/serialize.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/__pycache__/wrapper.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/__pycache__/wrapper.cpython-36.pyc
new file mode 100644
index 000000000..5e5842c8f
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/__pycache__/wrapper.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/_cmd.py b/venv/Lib/site-packages/cachecontrol/_cmd.py
new file mode 100644
index 000000000..ee8d60d10
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/_cmd.py
@@ -0,0 +1,57 @@
+import logging
+
+import requests
+
+from cachecontrol.adapter import CacheControlAdapter
+from cachecontrol.cache import DictCache
+from cachecontrol.controller import logger
+
+from argparse import ArgumentParser
+
+
+def setup_logging():
+ logger.setLevel(logging.DEBUG)
+ handler = logging.StreamHandler()
+ logger.addHandler(handler)
+
+
+def get_session():
+ adapter = CacheControlAdapter(
+ DictCache(), cache_etags=True, serializer=None, heuristic=None
+ )
+ sess = requests.Session()
+ sess.mount("http://", adapter)
+ sess.mount("https://", adapter)
+
+ sess.cache_controller = adapter.controller
+ return sess
+
+
+def get_args():
+ parser = ArgumentParser()
+ parser.add_argument("url", help="The URL to try and cache")
+ return parser.parse_args()
+
+
+def main(args=None):
+ args = get_args()
+ sess = get_session()
+
+ # Make a request to get a response
+ resp = sess.get(args.url)
+
+ # Turn on logging
+ setup_logging()
+
+ # try setting the cache
+ sess.cache_controller.cache_response(resp.request, resp.raw)
+
+ # Now try to get it
+ if sess.cache_controller.cached_request(resp.request):
+ print("Cached!")
+ else:
+ print("Not cached :(")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/venv/Lib/site-packages/cachecontrol/adapter.py b/venv/Lib/site-packages/cachecontrol/adapter.py
new file mode 100644
index 000000000..de50006af
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/adapter.py
@@ -0,0 +1,133 @@
+import types
+import functools
+import zlib
+
+from requests.adapters import HTTPAdapter
+
+from .controller import CacheController
+from .cache import DictCache
+from .filewrapper import CallbackFileWrapper
+
+
+class CacheControlAdapter(HTTPAdapter):
+ invalidating_methods = {"PUT", "DELETE"}
+
+ def __init__(
+ self,
+ cache=None,
+ cache_etags=True,
+ controller_class=None,
+ serializer=None,
+ heuristic=None,
+ cacheable_methods=None,
+ *args,
+ **kw
+ ):
+ super(CacheControlAdapter, self).__init__(*args, **kw)
+ self.cache = DictCache() if cache is None else cache
+ self.heuristic = heuristic
+ self.cacheable_methods = cacheable_methods or ("GET",)
+
+ controller_factory = controller_class or CacheController
+ self.controller = controller_factory(
+ self.cache, cache_etags=cache_etags, serializer=serializer
+ )
+
+ def send(self, request, cacheable_methods=None, **kw):
+ """
+ Send a request. Use the request information to see if it
+ exists in the cache and cache the response if we need to and can.
+ """
+ cacheable = cacheable_methods or self.cacheable_methods
+ if request.method in cacheable:
+ try:
+ cached_response = self.controller.cached_request(request)
+ except zlib.error:
+ cached_response = None
+ if cached_response:
+ return self.build_response(request, cached_response, from_cache=True)
+
+ # check for etags and add headers if appropriate
+ request.headers.update(self.controller.conditional_headers(request))
+
+ resp = super(CacheControlAdapter, self).send(request, **kw)
+
+ return resp
+
+ def build_response(
+ self, request, response, from_cache=False, cacheable_methods=None
+ ):
+ """
+ Build a response by making a request or using the cache.
+
+ This will end up calling send and returning a potentially
+ cached response
+ """
+ cacheable = cacheable_methods or self.cacheable_methods
+ if not from_cache and request.method in cacheable:
+ # Check for any heuristics that might update headers
+ # before trying to cache.
+ if self.heuristic:
+ response = self.heuristic.apply(response)
+
+ # apply any expiration heuristics
+ if response.status == 304:
+ # We must have sent an ETag request. This could mean
+ # that we've been expired already or that we simply
+ # have an etag. In either case, we want to try and
+ # update the cache if that is the case.
+ cached_response = self.controller.update_cached_response(
+ request, response
+ )
+
+ if cached_response is not response:
+ from_cache = True
+
+ # We are done with the server response, read a
+ # possible response body (compliant servers will
+ # not return one, but we cannot be 100% sure) and
+ # release the connection back to the pool.
+ response.read(decode_content=False)
+ response.release_conn()
+
+ response = cached_response
+
+ # We always cache the 301 responses
+ elif response.status == 301:
+ self.controller.cache_response(request, response)
+ else:
+ # Wrap the response file with a wrapper that will cache the
+ # response when the stream has been consumed.
+ response._fp = CallbackFileWrapper(
+ response._fp,
+ functools.partial(
+ self.controller.cache_response, request, response
+ ),
+ )
+ if response.chunked:
+ super_update_chunk_length = response._update_chunk_length
+
+ def _update_chunk_length(self):
+ super_update_chunk_length()
+ if self.chunk_left == 0:
+ self._fp._close()
+
+ response._update_chunk_length = types.MethodType(
+ _update_chunk_length, response
+ )
+
+ resp = super(CacheControlAdapter, self).build_response(request, response)
+
+ # See if we should invalidate the cache.
+ if request.method in self.invalidating_methods and resp.ok:
+ cache_url = self.controller.cache_url(request.url)
+ self.cache.delete(cache_url)
+
+ # Give the request a from_cache attr to let people use it
+ resp.from_cache = from_cache
+
+ return resp
+
+ def close(self):
+ self.cache.close()
+ super(CacheControlAdapter, self).close()
diff --git a/venv/Lib/site-packages/cachecontrol/cache.py b/venv/Lib/site-packages/cachecontrol/cache.py
new file mode 100644
index 000000000..94e07732d
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/cache.py
@@ -0,0 +1,39 @@
+"""
+The cache object API for implementing caches. The default is a thread
+safe in-memory dictionary.
+"""
+from threading import Lock
+
+
+class BaseCache(object):
+
+ def get(self, key):
+ raise NotImplementedError()
+
+ def set(self, key, value):
+ raise NotImplementedError()
+
+ def delete(self, key):
+ raise NotImplementedError()
+
+ def close(self):
+ pass
+
+
+class DictCache(BaseCache):
+
+ def __init__(self, init_dict=None):
+ self.lock = Lock()
+ self.data = init_dict or {}
+
+ def get(self, key):
+ return self.data.get(key, None)
+
+ def set(self, key, value):
+ with self.lock:
+ self.data.update({key: value})
+
+ def delete(self, key):
+ with self.lock:
+ if key in self.data:
+ self.data.pop(key)
diff --git a/venv/Lib/site-packages/cachecontrol/caches/__init__.py b/venv/Lib/site-packages/cachecontrol/caches/__init__.py
new file mode 100644
index 000000000..0e1658fa5
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/caches/__init__.py
@@ -0,0 +1,2 @@
+from .file_cache import FileCache # noqa
+from .redis_cache import RedisCache # noqa
diff --git a/venv/Lib/site-packages/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..705c01dcd
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc
new file mode 100644
index 000000000..1dc588792
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc b/venv/Lib/site-packages/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc
new file mode 100644
index 000000000..f3a4b9386
Binary files /dev/null and b/venv/Lib/site-packages/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachecontrol/caches/file_cache.py b/venv/Lib/site-packages/cachecontrol/caches/file_cache.py
new file mode 100644
index 000000000..607b94524
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/caches/file_cache.py
@@ -0,0 +1,146 @@
+import hashlib
+import os
+from textwrap import dedent
+
+from ..cache import BaseCache
+from ..controller import CacheController
+
+try:
+ FileNotFoundError
+except NameError:
+ # py2.X
+ FileNotFoundError = (IOError, OSError)
+
+
+def _secure_open_write(filename, fmode):
+ # We only want to write to this file, so open it in write only mode
+ flags = os.O_WRONLY
+
+ # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
+ # will open *new* files.
+ # We specify this because we want to ensure that the mode we pass is the
+ # mode of the file.
+ flags |= os.O_CREAT | os.O_EXCL
+
+ # Do not follow symlinks to prevent someone from making a symlink that
+ # we follow and insecurely open a cache file.
+ if hasattr(os, "O_NOFOLLOW"):
+ flags |= os.O_NOFOLLOW
+
+ # On Windows we'll mark this file as binary
+ if hasattr(os, "O_BINARY"):
+ flags |= os.O_BINARY
+
+ # Before we open our file, we want to delete any existing file that is
+ # there
+ try:
+ os.remove(filename)
+ except (IOError, OSError):
+ # The file must not exist already, so we can just skip ahead to opening
+ pass
+
+ # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
+ # race condition happens between the os.remove and this line, that an
+ # error will be raised. Because we utilize a lockfile this should only
+ # happen if someone is attempting to attack us.
+ fd = os.open(filename, flags, fmode)
+ try:
+ return os.fdopen(fd, "wb")
+
+ except:
+ # An error occurred wrapping our FD in a file object
+ os.close(fd)
+ raise
+
+
+class FileCache(BaseCache):
+
+ def __init__(
+ self,
+ directory,
+ forever=False,
+ filemode=0o0600,
+ dirmode=0o0700,
+ use_dir_lock=None,
+ lock_class=None,
+ ):
+
+ if use_dir_lock is not None and lock_class is not None:
+ raise ValueError("Cannot use use_dir_lock and lock_class together")
+
+ try:
+ from lockfile import LockFile
+ from lockfile.mkdirlockfile import MkdirLockFile
+ except ImportError:
+ notice = dedent(
+ """
+ NOTE: In order to use the FileCache you must have
+ lockfile installed. You can install it via pip:
+ pip install lockfile
+ """
+ )
+ raise ImportError(notice)
+
+ else:
+ if use_dir_lock:
+ lock_class = MkdirLockFile
+
+ elif lock_class is None:
+ lock_class = LockFile
+
+ self.directory = directory
+ self.forever = forever
+ self.filemode = filemode
+ self.dirmode = dirmode
+ self.lock_class = lock_class
+
+ @staticmethod
+ def encode(x):
+ return hashlib.sha224(x.encode()).hexdigest()
+
+ def _fn(self, name):
+ # NOTE: This method should not change as some may depend on it.
+ # See: https://github.com/ionrock/cachecontrol/issues/63
+ hashed = self.encode(name)
+ parts = list(hashed[:5]) + [hashed]
+ return os.path.join(self.directory, *parts)
+
+ def get(self, key):
+ name = self._fn(key)
+ try:
+ with open(name, "rb") as fh:
+ return fh.read()
+
+ except FileNotFoundError:
+ return None
+
+ def set(self, key, value):
+ name = self._fn(key)
+
+ # Make sure the directory exists
+ try:
+ os.makedirs(os.path.dirname(name), self.dirmode)
+ except (IOError, OSError):
+ pass
+
+ with self.lock_class(name) as lock:
+ # Write our actual file
+ with _secure_open_write(lock.path, self.filemode) as fh:
+ fh.write(value)
+
+ def delete(self, key):
+ name = self._fn(key)
+ if not self.forever:
+ try:
+ os.remove(name)
+ except FileNotFoundError:
+ pass
+
+
+def url_to_file_path(url, filecache):
+ """Return the file cache path based on the URL.
+
+ This does not ensure the file exists!
+ """
+ key = CacheController.cache_url(url)
+ return filecache._fn(key)
diff --git a/venv/Lib/site-packages/cachecontrol/caches/redis_cache.py b/venv/Lib/site-packages/cachecontrol/caches/redis_cache.py
new file mode 100644
index 000000000..16da0aed9
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/caches/redis_cache.py
@@ -0,0 +1,33 @@
+from __future__ import division
+
+from datetime import datetime
+from cachecontrol.cache import BaseCache
+
+
+class RedisCache(BaseCache):
+
+ def __init__(self, conn):
+ self.conn = conn
+
+ def get(self, key):
+ return self.conn.get(key)
+
+ def set(self, key, value, expires=None):
+ if not expires:
+ self.conn.set(key, value)
+ else:
+ expires = expires - datetime.utcnow()
+ self.conn.setex(key, int(expires.total_seconds()), value)
+
+ def delete(self, key):
+ self.conn.delete(key)
+
+ def clear(self):
+ """Helper for clearing all the keys in a database. Use with
+ caution!"""
+ for key in self.conn.keys():
+ self.conn.delete(key)
+
+ def close(self):
+ """Redis uses connection pooling, no need to close the connection."""
+ pass
diff --git a/venv/Lib/site-packages/cachecontrol/compat.py b/venv/Lib/site-packages/cachecontrol/compat.py
new file mode 100644
index 000000000..143c8ab08
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/compat.py
@@ -0,0 +1,29 @@
+try:
+ from urllib.parse import urljoin
+except ImportError:
+ from urlparse import urljoin
+
+
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+
+# Handle the case where the requests module has been patched to not have
+# urllib3 bundled as part of its source.
+try:
+ from requests.packages.urllib3.response import HTTPResponse
+except ImportError:
+ from urllib3.response import HTTPResponse
+
+try:
+ from requests.packages.urllib3.util import is_fp_closed
+except ImportError:
+ from urllib3.util import is_fp_closed
+
+# Replicate some six behaviour
+try:
+ text_type = unicode
+except NameError:
+ text_type = str
diff --git a/venv/Lib/site-packages/cachecontrol/controller.py b/venv/Lib/site-packages/cachecontrol/controller.py
new file mode 100644
index 000000000..c5c4a5080
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/controller.py
@@ -0,0 +1,376 @@
+"""
+The httplib2 algorithms ported for use with requests.
+"""
+import logging
+import re
+import calendar
+import time
+from email.utils import parsedate_tz
+
+from requests.structures import CaseInsensitiveDict
+
+from .cache import DictCache
+from .serialize import Serializer
+
+
+logger = logging.getLogger(__name__)
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+
+def parse_uri(uri):
+ """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+ (scheme, authority, path, query, fragment) = parse_uri(uri)
+ """
+ groups = URI.match(uri).groups()
+ return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+class CacheController(object):
+ """An interface to see if request should cached or not.
+ """
+
+ def __init__(
+ self, cache=None, cache_etags=True, serializer=None, status_codes=None
+ ):
+ self.cache = DictCache() if cache is None else cache
+ self.cache_etags = cache_etags
+ self.serializer = serializer or Serializer()
+ self.cacheable_status_codes = status_codes or (200, 203, 300, 301)
+
+ @classmethod
+ def _urlnorm(cls, uri):
+ """Normalize the URL to create a safe key for the cache"""
+ (scheme, authority, path, query, fragment) = parse_uri(uri)
+ if not scheme or not authority:
+ raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
+
+ scheme = scheme.lower()
+ authority = authority.lower()
+
+ if not path:
+ path = "/"
+
+ # Could do syntax based normalization of the URI before
+ # computing the digest. See Section 6.2.2 of Std 66.
+ request_uri = query and "?".join([path, query]) or path
+ defrag_uri = scheme + "://" + authority + request_uri
+
+ return defrag_uri
+
+ @classmethod
+ def cache_url(cls, uri):
+ return cls._urlnorm(uri)
+
+ def parse_cache_control(self, headers):
+ known_directives = {
+ # https://tools.ietf.org/html/rfc7234#section-5.2
+ "max-age": (int, True),
+ "max-stale": (int, False),
+ "min-fresh": (int, True),
+ "no-cache": (None, False),
+ "no-store": (None, False),
+ "no-transform": (None, False),
+ "only-if-cached": (None, False),
+ "must-revalidate": (None, False),
+ "public": (None, False),
+ "private": (None, False),
+ "proxy-revalidate": (None, False),
+ "s-maxage": (int, True),
+ }
+
+ cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
+
+ retval = {}
+
+ for cc_directive in cc_headers.split(","):
+ if not cc_directive.strip():
+ continue
+
+ parts = cc_directive.split("=", 1)
+ directive = parts[0].strip()
+
+ try:
+ typ, required = known_directives[directive]
+ except KeyError:
+ logger.debug("Ignoring unknown cache-control directive: %s", directive)
+ continue
+
+ if not typ or not required:
+ retval[directive] = None
+ if typ:
+ try:
+ retval[directive] = typ(parts[1].strip())
+ except IndexError:
+ if required:
+ logger.debug(
+ "Missing value for cache-control " "directive: %s",
+ directive,
+ )
+ except ValueError:
+ logger.debug(
+ "Invalid value for cache-control directive " "%s, must be %s",
+ directive,
+ typ.__name__,
+ )
+
+ return retval
+
+ def cached_request(self, request):
+ """
+ Return a cached response if it exists in the cache, otherwise
+ return False.
+ """
+ cache_url = self.cache_url(request.url)
+ logger.debug('Looking up "%s" in the cache', cache_url)
+ cc = self.parse_cache_control(request.headers)
+
+ # Bail out if the request insists on fresh data
+ if "no-cache" in cc:
+ logger.debug('Request header has "no-cache", cache bypassed')
+ return False
+
+ if "max-age" in cc and cc["max-age"] == 0:
+ logger.debug('Request header has "max_age" as 0, cache bypassed')
+ return False
+
+ # Request allows serving from the cache, let's see if we find something
+ cache_data = self.cache.get(cache_url)
+ if cache_data is None:
+ logger.debug("No cache entry available")
+ return False
+
+ # Check whether it can be deserialized
+ resp = self.serializer.loads(request, cache_data)
+ if not resp:
+ logger.warning("Cache entry deserialization failed, entry ignored")
+ return False
+
+ # If we have a cached 301, return it immediately. We don't
+ # need to test our response for other headers b/c it is
+ # intrinsically "cacheable" as it is Permanent.
+ # See:
+ # https://tools.ietf.org/html/rfc7231#section-6.4.2
+ #
+ # Client can try to refresh the value by repeating the request
+ # with cache busting headers as usual (ie no-cache).
+ if resp.status == 301:
+ msg = (
+ 'Returning cached "301 Moved Permanently" response '
+ "(ignoring date and etag information)"
+ )
+ logger.debug(msg)
+ return resp
+
+ headers = CaseInsensitiveDict(resp.headers)
+ if not headers or "date" not in headers:
+ if "etag" not in headers:
+ # Without date or etag, the cached response can never be used
+ # and should be deleted.
+ logger.debug("Purging cached response: no date or etag")
+ self.cache.delete(cache_url)
+ logger.debug("Ignoring cached response: no date")
+ return False
+
+ now = time.time()
+ date = calendar.timegm(parsedate_tz(headers["date"]))
+ current_age = max(0, now - date)
+ logger.debug("Current age based on date: %i", current_age)
+
+ # TODO: There is an assumption that the result will be a
+ # urllib3 response object. This may not be best since we
+ # could probably avoid instantiating or constructing the
+ # response until we know we need it.
+ resp_cc = self.parse_cache_control(headers)
+
+ # determine freshness
+ freshness_lifetime = 0
+
+ # Check the max-age pragma in the cache control header
+ if "max-age" in resp_cc:
+ freshness_lifetime = resp_cc["max-age"]
+ logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
+
+ # If there isn't a max-age, check for an expires header
+ elif "expires" in headers:
+ expires = parsedate_tz(headers["expires"])
+ if expires is not None:
+ expire_time = calendar.timegm(expires) - date
+ freshness_lifetime = max(0, expire_time)
+ logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
+
+ # Determine if we are setting freshness limit in the
+ # request. Note, this overrides what was in the response.
+ if "max-age" in cc:
+ freshness_lifetime = cc["max-age"]
+ logger.debug(
+ "Freshness lifetime from request max-age: %i", freshness_lifetime
+ )
+
+ if "min-fresh" in cc:
+ min_fresh = cc["min-fresh"]
+ # adjust our current age by our min fresh
+ current_age += min_fresh
+ logger.debug("Adjusted current age from min-fresh: %i", current_age)
+
+ # Return entry if it is fresh enough
+ if freshness_lifetime > current_age:
+ logger.debug('The response is "fresh", returning cached response')
+ logger.debug("%i > %i", freshness_lifetime, current_age)
+ return resp
+
+ # we're not fresh. If we don't have an Etag, clear it out
+ if "etag" not in headers:
+ logger.debug('The cached response is "stale" with no etag, purging')
+ self.cache.delete(cache_url)
+
+ # return the original handler
+ return False
+
+ def conditional_headers(self, request):
+ cache_url = self.cache_url(request.url)
+ resp = self.serializer.loads(request, self.cache.get(cache_url))
+ new_headers = {}
+
+ if resp:
+ headers = CaseInsensitiveDict(resp.headers)
+
+ if "etag" in headers:
+ new_headers["If-None-Match"] = headers["ETag"]
+
+ if "last-modified" in headers:
+ new_headers["If-Modified-Since"] = headers["Last-Modified"]
+
+ return new_headers
+
+ def cache_response(self, request, response, body=None, status_codes=None):
+ """
+ Algorithm for caching requests.
+
+ This assumes a requests Response object.
+ """
+ # From httplib2: Don't cache 206's since we aren't going to
+ # handle byte range requests
+ cacheable_status_codes = status_codes or self.cacheable_status_codes
+ if response.status not in cacheable_status_codes:
+ logger.debug(
+ "Status code %s not in %s", response.status, cacheable_status_codes
+ )
+ return
+
+ response_headers = CaseInsensitiveDict(response.headers)
+
+ # If we've been given a body, our response has a Content-Length, that
+ # Content-Length is valid then we can check to see if the body we've
+ # been given matches the expected size, and if it doesn't we'll just
+ # skip trying to cache it.
+ if (
+ body is not None
+ and "content-length" in response_headers
+ and response_headers["content-length"].isdigit()
+ and int(response_headers["content-length"]) != len(body)
+ ):
+ return
+
+ cc_req = self.parse_cache_control(request.headers)
+ cc = self.parse_cache_control(response_headers)
+
+ cache_url = self.cache_url(request.url)
+ logger.debug('Updating cache with response from "%s"', cache_url)
+
+ # Delete it from the cache if we happen to have it stored there
+ no_store = False
+ if "no-store" in cc:
+ no_store = True
+ logger.debug('Response header has "no-store"')
+ if "no-store" in cc_req:
+ no_store = True
+ logger.debug('Request header has "no-store"')
+ if no_store and self.cache.get(cache_url):
+ logger.debug('Purging existing cache entry to honor "no-store"')
+ self.cache.delete(cache_url)
+ if no_store:
+ return
+
+ # https://tools.ietf.org/html/rfc7234#section-4.1:
+ # A Vary header field-value of "*" always fails to match.
+ # Storing such a response leads to a deserialization warning
+ # during cache lookup and is not allowed to ever be served,
+ # so storing it can be avoided.
+ if "*" in response_headers.get("vary", ""):
+ logger.debug('Response header has "Vary: *"')
+ return
+
+ # If we've been given an etag, then keep the response
+ if self.cache_etags and "etag" in response_headers:
+ logger.debug("Caching due to etag")
+ self.cache.set(
+ cache_url, self.serializer.dumps(request, response, body=body)
+ )
+
+ # Add to the cache any 301s. We do this before looking that
+ # the Date headers.
+ elif response.status == 301:
+ logger.debug("Caching permanant redirect")
+ self.cache.set(cache_url, self.serializer.dumps(request, response))
+
+ # Add to the cache if the response headers demand it. If there
+ # is no date header then we can't do anything about expiring
+ # the cache.
+ elif "date" in response_headers:
+ # cache when there is a max-age > 0
+ if "max-age" in cc and cc["max-age"] > 0:
+ logger.debug("Caching b/c date exists and max-age > 0")
+ self.cache.set(
+ cache_url, self.serializer.dumps(request, response, body=body)
+ )
+
+ # If the request can expire, it means we should cache it
+ # in the meantime.
+ elif "expires" in response_headers:
+ if response_headers["expires"]:
+ logger.debug("Caching b/c of expires header")
+ self.cache.set(
+ cache_url, self.serializer.dumps(request, response, body=body)
+ )
+
+ def update_cached_response(self, request, response):
+ """On a 304 we will get a new set of headers that we want to
+ update our cached value with, assuming we have one.
+
+ This should only ever be called when we've sent an ETag and
+ gotten a 304 as the response.
+ """
+ cache_url = self.cache_url(request.url)
+
+ cached_response = self.serializer.loads(request, self.cache.get(cache_url))
+
+ if not cached_response:
+ # we didn't have a cached response
+ return response
+
+ # Lets update our headers with the headers from the new request:
+ # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
+ #
+ # The server isn't supposed to send headers that would make
+ # the cached body invalid. But... just in case, we'll be sure
+ # to strip out ones we know that might be problmatic due to
+ # typical assumptions.
+ excluded_headers = ["content-length"]
+
+ cached_response.headers.update(
+ dict(
+ (k, v)
+ for k, v in response.headers.items()
+ if k.lower() not in excluded_headers
+ )
+ )
+
+ # we want a 200 b/c we have content via the cache
+ cached_response.status = 200
+
+ # update our cache
+ self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
+
+ return cached_response
diff --git a/venv/Lib/site-packages/cachecontrol/filewrapper.py b/venv/Lib/site-packages/cachecontrol/filewrapper.py
new file mode 100644
index 000000000..30ed4c5a6
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/filewrapper.py
@@ -0,0 +1,80 @@
+from io import BytesIO
+
+
+class CallbackFileWrapper(object):
+ """
+ Small wrapper around a fp object which will tee everything read into a
+ buffer, and when that file is closed it will execute a callback with the
+ contents of that buffer.
+
+ All attributes are proxied to the underlying file object.
+
+ This class uses members with a double underscore (__) leading prefix so as
+ not to accidentally shadow an attribute.
+ """
+
+ def __init__(self, fp, callback):
+ self.__buf = BytesIO()
+ self.__fp = fp
+ self.__callback = callback
+
+ def __getattr__(self, name):
+ # The vaguaries of garbage collection means that self.__fp is
+ # not always set. By using __getattribute__ and the private
+ # name[0] allows looking up the attribute value and raising an
+ # AttributeError when it doesn't exist. This stop thigns from
+ # infinitely recursing calls to getattr in the case where
+ # self.__fp hasn't been set.
+ #
+ # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
+ fp = self.__getattribute__("_CallbackFileWrapper__fp")
+ return getattr(fp, name)
+
+ def __is_fp_closed(self):
+ try:
+ return self.__fp.fp is None
+
+ except AttributeError:
+ pass
+
+ try:
+ return self.__fp.closed
+
+ except AttributeError:
+ pass
+
+ # We just don't cache it then.
+ # TODO: Add some logging here...
+ return False
+
+ def _close(self):
+ if self.__callback:
+ self.__callback(self.__buf.getvalue())
+
+ # We assign this to None here, because otherwise we can get into
+ # really tricky problems where the CPython interpreter dead locks
+ # because the callback is holding a reference to something which
+ # has a __del__ method. Setting this to None breaks the cycle
+ # and allows the garbage collector to do it's thing normally.
+ self.__callback = None
+
+ def read(self, amt=None):
+ data = self.__fp.read(amt)
+ self.__buf.write(data)
+ if self.__is_fp_closed():
+ self._close()
+
+ return data
+
+ def _safe_read(self, amt):
+ data = self.__fp._safe_read(amt)
+ if amt == 2 and data == b"\r\n":
+ # urllib executes this read to toss the CRLF at the end
+ # of the chunk.
+ return data
+
+ self.__buf.write(data)
+ if self.__is_fp_closed():
+ self._close()
+
+ return data
diff --git a/venv/Lib/site-packages/cachecontrol/heuristics.py b/venv/Lib/site-packages/cachecontrol/heuristics.py
new file mode 100644
index 000000000..6c0e9790d
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/heuristics.py
@@ -0,0 +1,135 @@
+import calendar
+import time
+
+from email.utils import formatdate, parsedate, parsedate_tz
+
+from datetime import datetime, timedelta
+
+TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
+
+
+def expire_after(delta, date=None):
+ date = date or datetime.utcnow()
+ return date + delta
+
+
+def datetime_to_header(dt):
+ return formatdate(calendar.timegm(dt.timetuple()))
+
+
+class BaseHeuristic(object):
+
+ def warning(self, response):
+ """
+ Return a valid 1xx warning header value describing the cache
+ adjustments.
+
+ The response is provided too allow warnings like 113
+ http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
+ to explicitly say response is over 24 hours old.
+ """
+ return '110 - "Response is Stale"'
+
+ def update_headers(self, response):
+ """Update the response headers with any new headers.
+
+ NOTE: This SHOULD always include some Warning header to
+ signify that the response was cached by the client, not
+ by way of the provided headers.
+ """
+ return {}
+
+ def apply(self, response):
+ updated_headers = self.update_headers(response)
+
+ if updated_headers:
+ response.headers.update(updated_headers)
+ warning_header_value = self.warning(response)
+ if warning_header_value is not None:
+ response.headers.update({"Warning": warning_header_value})
+
+ return response
+
+
+class OneDayCache(BaseHeuristic):
+ """
+ Cache the response by providing an expires 1 day in the
+ future.
+ """
+
+ def update_headers(self, response):
+ headers = {}
+
+ if "expires" not in response.headers:
+ date = parsedate(response.headers["date"])
+ expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
+ headers["expires"] = datetime_to_header(expires)
+ headers["cache-control"] = "public"
+ return headers
+
+
+class ExpiresAfter(BaseHeuristic):
+ """
+ Cache **all** requests for a defined time period.
+ """
+
+ def __init__(self, **kw):
+ self.delta = timedelta(**kw)
+
+ def update_headers(self, response):
+ expires = expire_after(self.delta)
+ return {"expires": datetime_to_header(expires), "cache-control": "public"}
+
+ def warning(self, response):
+ tmpl = "110 - Automatically cached for %s. Response might be stale"
+ return tmpl % self.delta
+
+
+class LastModified(BaseHeuristic):
+ """
+ If there is no Expires header already, fall back on Last-Modified
+ using the heuristic from
+ http://tools.ietf.org/html/rfc7234#section-4.2.2
+ to calculate a reasonable value.
+
+ Firefox also does something like this per
+ https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
+ http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
+ Unlike mozilla we limit this to 24-hr.
+ """
+ cacheable_by_default_statuses = {
+ 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
+ }
+
+ def update_headers(self, resp):
+ headers = resp.headers
+
+ if "expires" in headers:
+ return {}
+
+ if "cache-control" in headers and headers["cache-control"] != "public":
+ return {}
+
+ if resp.status not in self.cacheable_by_default_statuses:
+ return {}
+
+ if "date" not in headers or "last-modified" not in headers:
+ return {}
+
+ date = calendar.timegm(parsedate_tz(headers["date"]))
+ last_modified = parsedate(headers["last-modified"])
+ if date is None or last_modified is None:
+ return {}
+
+ now = time.time()
+ current_age = max(0, now - date)
+ delta = date - calendar.timegm(last_modified)
+ freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
+ if freshness_lifetime <= current_age:
+ return {}
+
+ expires = date + freshness_lifetime
+ return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
+
+ def warning(self, resp):
+ return None
diff --git a/venv/Lib/site-packages/cachecontrol/serialize.py b/venv/Lib/site-packages/cachecontrol/serialize.py
new file mode 100644
index 000000000..572cf0e6c
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/serialize.py
@@ -0,0 +1,188 @@
+import base64
+import io
+import json
+import zlib
+
+import msgpack
+from requests.structures import CaseInsensitiveDict
+
+from .compat import HTTPResponse, pickle, text_type
+
+
+def _b64_decode_bytes(b):
+ return base64.b64decode(b.encode("ascii"))
+
+
+def _b64_decode_str(s):
+ return _b64_decode_bytes(s).decode("utf8")
+
+
+class Serializer(object):
+
+ def dumps(self, request, response, body=None):
+ response_headers = CaseInsensitiveDict(response.headers)
+
+ if body is None:
+ body = response.read(decode_content=False)
+
+ # NOTE: 99% sure this is dead code. I'm only leaving it
+ # here b/c I don't have a test yet to prove
+ # it. Basically, before using
+ # `cachecontrol.filewrapper.CallbackFileWrapper`,
+ # this made an effort to reset the file handle. The
+ # `CallbackFileWrapper` short circuits this code by
+ # setting the body as the content is consumed, the
+ # result being a `body` argument is *always* passed
+ # into cache_response, and in turn,
+ # `Serializer.dump`.
+ response._fp = io.BytesIO(body)
+
+ # NOTE: This is all a bit weird, but it's really important that on
+ # Python 2.x these objects are unicode and not str, even when
+ # they contain only ascii. The problem here is that msgpack
+ # understands the difference between unicode and bytes and we
+ # have it set to differentiate between them, however Python 2
+ # doesn't know the difference. Forcing these to unicode will be
+ # enough to have msgpack know the difference.
+ data = {
+ u"response": {
+ u"body": body,
+ u"headers": dict(
+ (text_type(k), text_type(v)) for k, v in response.headers.items()
+ ),
+ u"status": response.status,
+ u"version": response.version,
+ u"reason": text_type(response.reason),
+ u"strict": response.strict,
+ u"decode_content": response.decode_content,
+ }
+ }
+
+ # Construct our vary headers
+ data[u"vary"] = {}
+ if u"vary" in response_headers:
+ varied_headers = response_headers[u"vary"].split(",")
+ for header in varied_headers:
+ header = text_type(header).strip()
+ header_value = request.headers.get(header, None)
+ if header_value is not None:
+ header_value = text_type(header_value)
+ data[u"vary"][header] = header_value
+
+ return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
+
+ def loads(self, request, data):
+ # Short circuit if we've been given an empty set of data
+ if not data:
+ return
+
+ # Determine what version of the serializer the data was serialized
+ # with
+ try:
+ ver, data = data.split(b",", 1)
+ except ValueError:
+ ver = b"cc=0"
+
+ # Make sure that our "ver" is actually a version and isn't a false
+ # positive from a , being in the data stream.
+ if ver[:3] != b"cc=":
+ data = ver + data
+ ver = b"cc=0"
+
+ # Get the version number out of the cc=N
+ ver = ver.split(b"=", 1)[-1].decode("ascii")
+
+ # Dispatch to the actual load method for the given version
+ try:
+ return getattr(self, "_loads_v{}".format(ver))(request, data)
+
+ except AttributeError:
+ # This is a version we don't have a loads function for, so we'll
+ # just treat it as a miss and return None
+ return
+
+ def prepare_response(self, request, cached):
+ """Verify our vary headers match and construct a real urllib3
+ HTTPResponse object.
+ """
+ # Special case the '*' Vary value as it means we cannot actually
+ # determine if the cached response is suitable for this request.
+ # This case is also handled in the controller code when creating
+ # a cache entry, but is left here for backwards compatibility.
+ if "*" in cached.get("vary", {}):
+ return
+
+ # Ensure that the Vary headers for the cached response match our
+ # request
+ for header, value in cached.get("vary", {}).items():
+ if request.headers.get(header, None) != value:
+ return
+
+ body_raw = cached["response"].pop("body")
+
+ headers = CaseInsensitiveDict(data=cached["response"]["headers"])
+ if headers.get("transfer-encoding", "") == "chunked":
+ headers.pop("transfer-encoding")
+
+ cached["response"]["headers"] = headers
+
+ try:
+ body = io.BytesIO(body_raw)
+ except TypeError:
+ # This can happen if cachecontrol serialized to v1 format (pickle)
+ # using Python 2. A Python 2 str(byte string) will be unpickled as
+ # a Python 3 str (unicode string), which will cause the above to
+ # fail with:
+ #
+ # TypeError: 'str' does not support the buffer interface
+ body = io.BytesIO(body_raw.encode("utf8"))
+
+ return HTTPResponse(body=body, preload_content=False, **cached["response"])
+
+ def _loads_v0(self, request, data):
+ # The original legacy cache data. This doesn't contain enough
+ # information to construct everything we need, so we'll treat this as
+ # a miss.
+ return
+
+ def _loads_v1(self, request, data):
+ try:
+ cached = pickle.loads(data)
+ except ValueError:
+ return
+
+ return self.prepare_response(request, cached)
+
+ def _loads_v2(self, request, data):
+ try:
+ cached = json.loads(zlib.decompress(data).decode("utf8"))
+ except (ValueError, zlib.error):
+ return
+
+ # We need to decode the items that we've base64 encoded
+ cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
+ cached["response"]["headers"] = dict(
+ (_b64_decode_str(k), _b64_decode_str(v))
+ for k, v in cached["response"]["headers"].items()
+ )
+ cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
+ cached["vary"] = dict(
+ (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
+ for k, v in cached["vary"].items()
+ )
+
+ return self.prepare_response(request, cached)
+
+ def _loads_v3(self, request, data):
+ # Due to Python 2 encoding issues, it's impossible to know for sure
+ # exactly how to load v3 entries, thus we'll treat these as a miss so
+ # that they get rewritten out as v4 entries.
+ return
+
+ def _loads_v4(self, request, data):
+ try:
+ cached = msgpack.loads(data, raw=False)
+ except ValueError:
+ return
+
+ return self.prepare_response(request, cached)
diff --git a/venv/Lib/site-packages/cachecontrol/wrapper.py b/venv/Lib/site-packages/cachecontrol/wrapper.py
new file mode 100644
index 000000000..d8e6fc6a9
--- /dev/null
+++ b/venv/Lib/site-packages/cachecontrol/wrapper.py
@@ -0,0 +1,29 @@
+from .adapter import CacheControlAdapter
+from .cache import DictCache
+
+
+def CacheControl(
+ sess,
+ cache=None,
+ cache_etags=True,
+ serializer=None,
+ heuristic=None,
+ controller_class=None,
+ adapter_class=None,
+ cacheable_methods=None,
+):
+
+ cache = DictCache() if cache is None else cache
+ adapter_class = adapter_class or CacheControlAdapter
+ adapter = adapter_class(
+ cache,
+ cache_etags=cache_etags,
+ serializer=serializer,
+ heuristic=heuristic,
+ controller_class=controller_class,
+ cacheable_methods=cacheable_methods,
+ )
+ sess.mount("http://", adapter)
+ sess.mount("https://", adapter)
+
+ return sess
diff --git a/venv/Lib/site-packages/cachetools-4.1.1.dist-info/INSTALLER b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/cachetools-4.1.1.dist-info/LICENSE b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/LICENSE
new file mode 100644
index 000000000..0dc186434
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2020 Thomas Kemmer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/venv/Lib/site-packages/cachetools-4.1.1.dist-info/METADATA b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/METADATA
new file mode 100644
index 000000000..44223974d
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/METADATA
@@ -0,0 +1,124 @@
+Metadata-Version: 2.1
+Name: cachetools
+Version: 4.1.1
+Summary: Extensible memoizing collections and decorators
+Home-page: https://github.com/tkem/cachetools/
+Author: Thomas Kemmer
+Author-email: tkemmer@computer.org
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: ~=3.5
+
+cachetools
+========================================================================
+
+.. image:: http://img.shields.io/pypi/v/cachetools
+ :target: https://pypi.org/project/cachetools/
+ :alt: Latest PyPI version
+
+.. image:: https://img.shields.io/readthedocs/cachetools
+ :target: http://cachetools.readthedocs.io/
+ :alt: Documentation build status
+
+.. image:: http://img.shields.io/travis/tkem/cachetools
+ :target: https://travis-ci.org/tkem/cachetools/
+ :alt: Travis CI build status
+
+.. image:: http://img.shields.io/coveralls/tkem/cachetools
+ :target: https://coveralls.io/r/tkem/cachetools
+ :alt: Test coverage
+
+.. image:: https://img.shields.io/github/license/tkem/cachetools
+ :target: http://raw.github.com/tkem/cachetools/master/LICENSE
+ :alt: License
+
+This module provides various memoizing collections and decorators,
+including variants of the Python Standard Library's `@lru_cache`_
+function decorator.
+
+.. code-block:: python
+
+ from cachetools import cached, LRUCache, TTLCache
+
+ # speed up calculating Fibonacci numbers with dynamic programming
+ @cached(cache={})
+ def fib(n):
+ return n if n < 2 else fib(n - 1) + fib(n - 2)
+
+ # cache least recently used Python Enhancement Proposals
+ @cached(cache=LRUCache(maxsize=32))
+ def get_pep(num):
+ url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+ with urllib.request.urlopen(url) as s:
+ return s.read()
+
+ # cache weather data for no longer than ten minutes
+ @cached(cache=TTLCache(maxsize=1024, ttl=600))
+ def get_weather(place):
+ return owm.weather_at_place(place).get_weather()
+
+For the purpose of this module, a *cache* is a mutable_ mapping_ of a
+fixed maximum size. When the cache is full, i.e. by adding another
+item the cache would exceed its maximum size, the cache must choose
+which item(s) to discard based on a suitable `cache algorithm`_. In
+general, a cache's size is the total size of its items, and an item's
+size is a property or function of its value, e.g. the result of
+``sys.getsizeof(value)``. For the trivial but common case that each
+item counts as ``1``, a cache's size is equal to the number of its
+items, or ``len(cache)``.
+
+Multiple cache classes based on different caching algorithms are
+implemented, and decorators for easily memoizing function and method
+calls are provided, too.
+
+
+Installation
+------------------------------------------------------------------------
+
+cachetools is available from PyPI_ and can be installed by running::
+
+ pip install cachetools
+
+
+Project Resources
+------------------------------------------------------------------------
+
+- `Documentation`_
+- `Issue tracker`_
+- `Source code`_
+- `Change log`_
+
+
+License
+------------------------------------------------------------------------
+
+Copyright (c) 2014-2020 Thomas Kemmer.
+
+Licensed under the `MIT License`_.
+
+
+.. _@lru_cache: http://docs.python.org/3/library/functools.html#functools.lru_cache
+.. _mutable: http://docs.python.org/dev/glossary.html#term-mutable
+.. _mapping: http://docs.python.org/dev/glossary.html#term-mapping
+.. _cache algorithm: http://en.wikipedia.org/wiki/Cache_algorithms
+
+.. _PyPI: https://pypi.org/project/cachetools/
+.. _Documentation: https://cachetools.readthedocs.io/
+.. _Issue tracker: https://github.com/tkem/cachetools/issues/
+.. _Source code: https://github.com/tkem/cachetools/
+.. _Change log: https://github.com/tkem/cachetools/blob/master/CHANGELOG.rst
+.. _MIT License: http://raw.github.com/tkem/cachetools/master/LICENSE
+
+
diff --git a/venv/Lib/site-packages/cachetools-4.1.1.dist-info/RECORD b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/RECORD
new file mode 100644
index 000000000..6597cfdf0
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/RECORD
@@ -0,0 +1,26 @@
+cachetools-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cachetools-4.1.1.dist-info/LICENSE,sha256=WjqbFSk9D0xU0ftRzw9RpxHwz1gvgKDnMwR4ZwwX9ns,1085
+cachetools-4.1.1.dist-info/METADATA,sha256=UCFBVawzngdeCUWD5P33LTAx5AShjKmQ29q3kcc696A,4383
+cachetools-4.1.1.dist-info/RECORD,,
+cachetools-4.1.1.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92
+cachetools-4.1.1.dist-info/top_level.txt,sha256=ai2FH78TGwoBcCgVfoqbzk5IQCtnDukdSs4zKuVPvDs,11
+cachetools/__init__.py,sha256=65iD423Ll5taTrDqqSQH2oxmUBHfLP48oWTcOQGGS6M,375
+cachetools/__pycache__/__init__.cpython-36.pyc,,
+cachetools/__pycache__/abc.cpython-36.pyc,,
+cachetools/__pycache__/cache.cpython-36.pyc,,
+cachetools/__pycache__/decorators.cpython-36.pyc,,
+cachetools/__pycache__/func.cpython-36.pyc,,
+cachetools/__pycache__/keys.cpython-36.pyc,,
+cachetools/__pycache__/lfu.cpython-36.pyc,,
+cachetools/__pycache__/lru.cpython-36.pyc,,
+cachetools/__pycache__/rr.cpython-36.pyc,,
+cachetools/__pycache__/ttl.cpython-36.pyc,,
+cachetools/abc.py,sha256=KdAOSBVp5jb_MUYdaoiWqbfXsiO9epC-KWVEXXD2TXc,1076
+cachetools/cache.py,sha256=JQPstpjP-TgdpLdQbrGN3gU8F9yk1IQdkFtaK0_CJEo,2272
+cachetools/decorators.py,sha256=Z8XaWDAnlq50Qf3FVrKSPbwr15dDkGRITMcHsVdy2AQ,2829
+cachetools/func.py,sha256=XXIllKSnfzt_Z8NcALeT5gz-tc1uU2V91502Z2QFTYQ,4009
+cachetools/keys.py,sha256=bKwFwU15s-vKWM1lnNdcJWfyQxu7uqIcRRJNg9hUfFg,1466
+cachetools/lfu.py,sha256=xAkYTpx8-7Gg1IOw08UVxncQys8tn7sPg09lr9IvTyQ,1065
+cachetools/lru.py,sha256=0XNTY7VzYEdV9yCdOMwnhkBeQox_N6VscVzNFm-VwRo,1188
+cachetools/rr.py,sha256=uoIxqj9xFYcA2sfKwoOQYd8JE6wzMXPrHLlUsuscILA,974
+cachetools/ttl.py,sha256=VI1Dci_sozLA8m15-l5OfNFfJ1GUhuWm39ISjvxrMg4,5830
diff --git a/venv/Lib/site-packages/cachetools-4.1.1.dist-info/WHEEL b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/WHEEL
new file mode 100644
index 000000000..b552003ff
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/cachetools-4.1.1.dist-info/top_level.txt b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/top_level.txt
new file mode 100644
index 000000000..50d14084a
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools-4.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+cachetools
diff --git a/venv/Lib/site-packages/cachetools/__init__.py b/venv/Lib/site-packages/cachetools/__init__.py
new file mode 100644
index 000000000..51d8f7c82
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/__init__.py
@@ -0,0 +1,20 @@
+"""Extensible memoizing collections and decorators."""
+
+from .cache import Cache
+from .decorators import cached, cachedmethod
+from .lfu import LFUCache
+from .lru import LRUCache
+from .rr import RRCache
+from .ttl import TTLCache
+
+__all__ = (
+ 'Cache',
+ 'LFUCache',
+ 'LRUCache',
+ 'RRCache',
+ 'TTLCache',
+ 'cached',
+ 'cachedmethod'
+)
+
+__version__ = '4.1.1'
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..057baf494
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/abc.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/abc.cpython-36.pyc
new file mode 100644
index 000000000..187fc2b6a
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/abc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/cache.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/cache.cpython-36.pyc
new file mode 100644
index 000000000..363f0dd7b
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/cache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/decorators.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/decorators.cpython-36.pyc
new file mode 100644
index 000000000..663d110be
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/decorators.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/func.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/func.cpython-36.pyc
new file mode 100644
index 000000000..8296f9943
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/func.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/keys.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/keys.cpython-36.pyc
new file mode 100644
index 000000000..dbc955516
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/keys.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/lfu.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/lfu.cpython-36.pyc
new file mode 100644
index 000000000..9456892ec
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/lfu.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/lru.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/lru.cpython-36.pyc
new file mode 100644
index 000000000..3c3e7e6ee
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/lru.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/rr.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/rr.cpython-36.pyc
new file mode 100644
index 000000000..078112adf
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/rr.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/__pycache__/ttl.cpython-36.pyc b/venv/Lib/site-packages/cachetools/__pycache__/ttl.cpython-36.pyc
new file mode 100644
index 000000000..e377378ba
Binary files /dev/null and b/venv/Lib/site-packages/cachetools/__pycache__/ttl.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cachetools/abc.py b/venv/Lib/site-packages/cachetools/abc.py
new file mode 100644
index 000000000..b61e49bba
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/abc.py
@@ -0,0 +1,46 @@
+from abc import abstractmethod
+from collections.abc import MutableMapping
+
+
+class DefaultMapping(MutableMapping):
+
+ __slots__ = ()
+
+ @abstractmethod
+ def __contains__(self, key): # pragma: nocover
+ return False
+
+ @abstractmethod
+ def __getitem__(self, key): # pragma: nocover
+ if hasattr(self.__class__, '__missing__'):
+ return self.__class__.__missing__(self, key)
+ else:
+ raise KeyError(key)
+
+ def get(self, key, default=None):
+ if key in self:
+ return self[key]
+ else:
+ return default
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ if key in self:
+ value = self[key]
+ del self[key]
+ elif default is self.__marker:
+ raise KeyError(key)
+ else:
+ value = default
+ return value
+
+ def setdefault(self, key, default=None):
+ if key in self:
+ value = self[key]
+ else:
+ self[key] = value = default
+ return value
+
+
+DefaultMapping.register(dict)
diff --git a/venv/Lib/site-packages/cachetools/cache.py b/venv/Lib/site-packages/cachetools/cache.py
new file mode 100644
index 000000000..4354ca69b
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/cache.py
@@ -0,0 +1,89 @@
+from .abc import DefaultMapping
+
+
+class _DefaultSize(object):
+ def __getitem__(self, _):
+ return 1
+
+ def __setitem__(self, _, value):
+ assert value == 1
+
+ def pop(self, _):
+ return 1
+
+
+class Cache(DefaultMapping):
+ """Mutable mapping to serve as a simple cache or cache base class."""
+
+ __size = _DefaultSize()
+
+ def __init__(self, maxsize, getsizeof=None):
+ if getsizeof:
+ self.getsizeof = getsizeof
+ if self.getsizeof is not Cache.getsizeof:
+ self.__size = dict()
+ self.__data = dict()
+ self.__currsize = 0
+ self.__maxsize = maxsize
+
+ def __repr__(self):
+ return '%s(%r, maxsize=%r, currsize=%r)' % (
+ self.__class__.__name__,
+ list(self.__data.items()),
+ self.__maxsize,
+ self.__currsize,
+ )
+
+ def __getitem__(self, key):
+ try:
+ return self.__data[key]
+ except KeyError:
+ return self.__missing__(key)
+
+ def __setitem__(self, key, value):
+ maxsize = self.__maxsize
+ size = self.getsizeof(value)
+ if size > maxsize:
+ raise ValueError('value too large')
+ if key not in self.__data or self.__size[key] < size:
+ while self.__currsize + size > maxsize:
+ self.popitem()
+ if key in self.__data:
+ diffsize = size - self.__size[key]
+ else:
+ diffsize = size
+ self.__data[key] = value
+ self.__size[key] = size
+ self.__currsize += diffsize
+
+ def __delitem__(self, key):
+ size = self.__size.pop(key)
+ del self.__data[key]
+ self.__currsize -= size
+
+ def __contains__(self, key):
+ return key in self.__data
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __iter__(self):
+ return iter(self.__data)
+
+ def __len__(self):
+ return len(self.__data)
+
+ @property
+ def maxsize(self):
+ """The maximum size of the cache."""
+ return self.__maxsize
+
+ @property
+ def currsize(self):
+ """The current size of the cache."""
+ return self.__currsize
+
+ @staticmethod
+ def getsizeof(value):
+ """Return the size of a cache element's value."""
+ return 1
diff --git a/venv/Lib/site-packages/cachetools/decorators.py b/venv/Lib/site-packages/cachetools/decorators.py
new file mode 100644
index 000000000..cbea9fcb3
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/decorators.py
@@ -0,0 +1,88 @@
+import functools
+
+from .keys import hashkey
+
+
+def cached(cache, key=hashkey, lock=None):
+ """Decorator to wrap a function with a memoizing callable that saves
+ results in a cache.
+
+ """
+ def decorator(func):
+ if cache is None:
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ elif lock is None:
+ def wrapper(*args, **kwargs):
+ k = key(*args, **kwargs)
+ try:
+ return cache[k]
+ except KeyError:
+ pass # key not found
+ v = func(*args, **kwargs)
+ try:
+ cache[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+ else:
+ def wrapper(*args, **kwargs):
+ k = key(*args, **kwargs)
+ try:
+ with lock:
+ return cache[k]
+ except KeyError:
+ pass # key not found
+ v = func(*args, **kwargs)
+ try:
+ with lock:
+ cache[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+ return functools.update_wrapper(wrapper, func)
+ return decorator
+
+
+def cachedmethod(cache, key=hashkey, lock=None):
+ """Decorator to wrap a class or instance method with a memoizing
+ callable that saves results in a cache.
+
+ """
+ def decorator(method):
+ if lock is None:
+ def wrapper(self, *args, **kwargs):
+ c = cache(self)
+ if c is None:
+ return method(self, *args, **kwargs)
+ k = key(*args, **kwargs)
+ try:
+ return c[k]
+ except KeyError:
+ pass # key not found
+ v = method(self, *args, **kwargs)
+ try:
+ c[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+ else:
+ def wrapper(self, *args, **kwargs):
+ c = cache(self)
+ if c is None:
+ return method(self, *args, **kwargs)
+ k = key(*args, **kwargs)
+ try:
+ with lock(self):
+ return c[k]
+ except KeyError:
+ pass # key not found
+ v = method(self, *args, **kwargs)
+ try:
+ with lock(self):
+ c[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+ return functools.update_wrapper(wrapper, method)
+ return decorator
diff --git a/venv/Lib/site-packages/cachetools/func.py b/venv/Lib/site-packages/cachetools/func.py
new file mode 100644
index 000000000..5baf6de7e
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/func.py
@@ -0,0 +1,147 @@
+"""`functools.lru_cache` compatible memoizing function decorators."""
+
+import collections
+import functools
+import math
+import random
+import time
+
+try:
+ from threading import RLock
+except ImportError: # pragma: no cover
+ from dummy_threading import RLock
+
+from . import keys
+from .lfu import LFUCache
+from .lru import LRUCache
+from .rr import RRCache
+from .ttl import TTLCache
+
+__all__ = ('lfu_cache', 'lru_cache', 'rr_cache', 'ttl_cache')
+
+
+_CacheInfo = collections.namedtuple('CacheInfo', [
+ 'hits', 'misses', 'maxsize', 'currsize'
+])
+
+
+class _UnboundCache(dict):
+
+ @property
+ def maxsize(self):
+ return None
+
+ @property
+ def currsize(self):
+ return len(self)
+
+
+class _UnboundTTLCache(TTLCache):
+ def __init__(self, ttl, timer):
+ TTLCache.__init__(self, math.inf, ttl, timer)
+
+ @property
+ def maxsize(self):
+ return None
+
+
+def _cache(cache, typed):
+ maxsize = cache.maxsize
+
+ def decorator(func):
+ key = keys.typedkey if typed else keys.hashkey
+ lock = RLock()
+ stats = [0, 0]
+
+ def wrapper(*args, **kwargs):
+ k = key(*args, **kwargs)
+ with lock:
+ try:
+ v = cache[k]
+ stats[0] += 1
+ return v
+ except KeyError:
+ stats[1] += 1
+ v = func(*args, **kwargs)
+ try:
+ with lock:
+ cache[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+
+ def cache_info():
+ with lock:
+ hits, misses = stats
+ maxsize = cache.maxsize
+ currsize = cache.currsize
+ return _CacheInfo(hits, misses, maxsize, currsize)
+
+ def cache_clear():
+ with lock:
+ try:
+ cache.clear()
+ finally:
+ stats[:] = [0, 0]
+
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
+ functools.update_wrapper(wrapper, func)
+ return wrapper
+ return decorator
+
+
+def lfu_cache(maxsize=128, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Frequently Used (LFU)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache(_UnboundCache(), typed)
+ elif callable(maxsize):
+ return _cache(LFUCache(128), typed)(maxsize)
+ else:
+ return _cache(LFUCache(maxsize), typed)
+
+
+def lru_cache(maxsize=128, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Recently Used (LRU)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache(_UnboundCache(), typed)
+ elif callable(maxsize):
+ return _cache(LRUCache(128), typed)(maxsize)
+ else:
+ return _cache(LRUCache(maxsize), typed)
+
+
+def rr_cache(maxsize=128, choice=random.choice, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Random Replacement (RR)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache(_UnboundCache(), typed)
+ elif callable(maxsize):
+ return _cache(RRCache(128, choice), typed)(maxsize)
+ else:
+ return _cache(RRCache(maxsize, choice), typed)
+
+
+def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Recently Used (LRU)
+ algorithm with a per-item time-to-live (TTL) value.
+ """
+ if maxsize is None:
+ return _cache(_UnboundTTLCache(ttl, timer), typed)
+ elif callable(maxsize):
+ return _cache(TTLCache(128, ttl, timer), typed)(maxsize)
+ else:
+ return _cache(TTLCache(maxsize, ttl, timer), typed)
diff --git a/venv/Lib/site-packages/cachetools/keys.py b/venv/Lib/site-packages/cachetools/keys.py
new file mode 100644
index 000000000..355d742df
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/keys.py
@@ -0,0 +1,52 @@
+"""Key functions for memoizing decorators."""
+
+__all__ = ('hashkey', 'typedkey')
+
+
+class _HashedTuple(tuple):
+ """A tuple that ensures that hash() will be called no more than once
+ per element, since cache decorators will hash the key multiple
+ times on a cache miss. See also _HashedSeq in the standard
+ library functools implementation.
+
+ """
+
+ __hashvalue = None
+
+ def __hash__(self, hash=tuple.__hash__):
+ hashvalue = self.__hashvalue
+ if hashvalue is None:
+ self.__hashvalue = hashvalue = hash(self)
+ return hashvalue
+
+ def __add__(self, other, add=tuple.__add__):
+ return _HashedTuple(add(self, other))
+
+ def __radd__(self, other, add=tuple.__add__):
+ return _HashedTuple(add(other, self))
+
+ def __getstate__(self):
+ return {}
+
+
+# used for separating keyword arguments; we do not use an object
+# instance here so identity is preserved when pickling/unpickling
+_kwmark = (_HashedTuple,)
+
+
+def hashkey(*args, **kwargs):
+ """Return a cache key for the specified hashable arguments."""
+
+ if kwargs:
+ return _HashedTuple(args + sum(sorted(kwargs.items()), _kwmark))
+ else:
+ return _HashedTuple(args)
+
+
+def typedkey(*args, **kwargs):
+ """Return a typed cache key for the specified hashable arguments."""
+
+ key = hashkey(*args, **kwargs)
+ key += tuple(type(v) for v in args)
+ key += tuple(type(v) for _, v in sorted(kwargs.items()))
+ return key
diff --git a/venv/Lib/site-packages/cachetools/lfu.py b/venv/Lib/site-packages/cachetools/lfu.py
new file mode 100644
index 000000000..adb45ee27
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/lfu.py
@@ -0,0 +1,34 @@
+import collections
+
+from .cache import Cache
+
+
+class LFUCache(Cache):
+ """Least Frequently Used (LFU) cache implementation."""
+
+ def __init__(self, maxsize, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__counter = collections.Counter()
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ value = cache_getitem(self, key)
+ self.__counter[key] -= 1
+ return value
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, value)
+ self.__counter[key] -= 1
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ del self.__counter[key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least frequently used."""
+ try:
+ (key, _), = self.__counter.most_common(1)
+ except ValueError:
+ msg = '%s is empty' % self.__class__.__name__
+ raise KeyError(msg) from None
+ else:
+ return (key, self.pop(key))
diff --git a/venv/Lib/site-packages/cachetools/lru.py b/venv/Lib/site-packages/cachetools/lru.py
new file mode 100644
index 000000000..7634f9cf4
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/lru.py
@@ -0,0 +1,40 @@
+import collections
+
+from .cache import Cache
+
+
+class LRUCache(Cache):
+ """Least Recently Used (LRU) cache implementation."""
+
+ def __init__(self, maxsize, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__order = collections.OrderedDict()
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ value = cache_getitem(self, key)
+ self.__update(key)
+ return value
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, value)
+ self.__update(key)
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ del self.__order[key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least recently used."""
+ try:
+ key = next(iter(self.__order))
+ except StopIteration:
+ msg = '%s is empty' % self.__class__.__name__
+ raise KeyError(msg) from None
+ else:
+ return (key, self.pop(key))
+
+ def __update(self, key):
+ try:
+ self.__order.move_to_end(key)
+ except KeyError:
+ self.__order[key] = None
diff --git a/venv/Lib/site-packages/cachetools/rr.py b/venv/Lib/site-packages/cachetools/rr.py
new file mode 100644
index 000000000..30f38226d
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/rr.py
@@ -0,0 +1,35 @@
+import random
+
+from .cache import Cache
+
+
+# random.choice cannot be pickled in Python 2.7
+def _choice(seq):
+ return random.choice(seq)
+
+
+class RRCache(Cache):
+ """Random Replacement (RR) cache implementation."""
+
+ def __init__(self, maxsize, choice=random.choice, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ # TODO: use None as default, assing to self.choice directly?
+ if choice is random.choice:
+ self.__choice = _choice
+ else:
+ self.__choice = choice
+
+ @property
+ def choice(self):
+ """The `choice` function used by the cache."""
+ return self.__choice
+
+ def popitem(self):
+ """Remove and return a random `(key, value)` pair."""
+ try:
+ key = self.__choice(list(self))
+ except IndexError:
+ msg = '%s is empty' % self.__class__.__name__
+ raise KeyError(msg) from None
+ else:
+ return (key, self.pop(key))
diff --git a/venv/Lib/site-packages/cachetools/ttl.py b/venv/Lib/site-packages/cachetools/ttl.py
new file mode 100644
index 000000000..7822e8bea
--- /dev/null
+++ b/venv/Lib/site-packages/cachetools/ttl.py
@@ -0,0 +1,209 @@
+import collections
+import time
+
+from .cache import Cache
+
+
+class _Link(object):
+
+ __slots__ = ('key', 'expire', 'next', 'prev')
+
+ def __init__(self, key=None, expire=None):
+ self.key = key
+ self.expire = expire
+
+ def __reduce__(self):
+ return _Link, (self.key, self.expire)
+
+ def unlink(self):
+ next = self.next
+ prev = self.prev
+ prev.next = next
+ next.prev = prev
+
+
+class _Timer(object):
+
+ def __init__(self, timer):
+ self.__timer = timer
+ self.__nesting = 0
+
+ def __call__(self):
+ if self.__nesting == 0:
+ return self.__timer()
+ else:
+ return self.__time
+
+ def __enter__(self):
+ if self.__nesting == 0:
+ self.__time = time = self.__timer()
+ else:
+ time = self.__time
+ self.__nesting += 1
+ return time
+
+ def __exit__(self, *exc):
+ self.__nesting -= 1
+
+ def __reduce__(self):
+ return _Timer, (self.__timer,)
+
+ def __getattr__(self, name):
+ return getattr(self.__timer, name)
+
+
+class TTLCache(Cache):
+ """LRU Cache implementation with per-item time-to-live (TTL) value."""
+
+ def __init__(self, maxsize, ttl, timer=time.monotonic, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__root = root = _Link()
+ root.prev = root.next = root
+ self.__links = collections.OrderedDict()
+ self.__timer = _Timer(timer)
+ self.__ttl = ttl
+
+ def __contains__(self, key):
+ try:
+ link = self.__links[key] # no reordering
+ except KeyError:
+ return False
+ else:
+ return not (link.expire < self.__timer())
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ try:
+ link = self.__getlink(key)
+ except KeyError:
+ expired = False
+ else:
+ expired = link.expire < self.__timer()
+ if expired:
+ return self.__missing__(key)
+ else:
+ return cache_getitem(self, key)
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ with self.__timer as time:
+ self.expire(time)
+ cache_setitem(self, key, value)
+ try:
+ link = self.__getlink(key)
+ except KeyError:
+ self.__links[key] = link = _Link(key)
+ else:
+ link.unlink()
+ link.expire = time + self.__ttl
+ link.next = root = self.__root
+ link.prev = prev = root.prev
+ prev.next = root.prev = link
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ link = self.__links.pop(key)
+ link.unlink()
+ if link.expire < self.__timer():
+ raise KeyError(key)
+
+ def __iter__(self):
+ root = self.__root
+ curr = root.next
+ while curr is not root:
+ # "freeze" time for iterator access
+ with self.__timer as time:
+ if not (curr.expire < time):
+ yield curr.key
+ curr = curr.next
+
+ def __len__(self):
+ root = self.__root
+ curr = root.next
+ time = self.__timer()
+ count = len(self.__links)
+ while curr is not root and curr.expire < time:
+ count -= 1
+ curr = curr.next
+ return count
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ root = self.__root
+ root.prev = root.next = root
+ for link in sorted(self.__links.values(), key=lambda obj: obj.expire):
+ link.next = root
+ link.prev = prev = root.prev
+ prev.next = root.prev = link
+ self.expire(self.__timer())
+
+ def __repr__(self, cache_repr=Cache.__repr__):
+ with self.__timer as time:
+ self.expire(time)
+ return cache_repr(self)
+
+ @property
+ def currsize(self):
+ with self.__timer as time:
+ self.expire(time)
+ return super(TTLCache, self).currsize
+
+ @property
+ def timer(self):
+ """The timer function used by the cache."""
+ return self.__timer
+
+ @property
+ def ttl(self):
+ """The time-to-live value of the cache's items."""
+ return self.__ttl
+
+ def expire(self, time=None):
+ """Remove expired items from the cache."""
+ if time is None:
+ time = self.__timer()
+ root = self.__root
+ curr = root.next
+ links = self.__links
+ cache_delitem = Cache.__delitem__
+ while curr is not root and curr.expire < time:
+ cache_delitem(self, curr.key)
+ del links[curr.key]
+ next = curr.next
+ curr.unlink()
+ curr = next
+
+ def clear(self):
+ with self.__timer as time:
+ self.expire(time)
+ Cache.clear(self)
+
+ def get(self, *args, **kwargs):
+ with self.__timer:
+ return Cache.get(self, *args, **kwargs)
+
+ def pop(self, *args, **kwargs):
+ with self.__timer:
+ return Cache.pop(self, *args, **kwargs)
+
+ def setdefault(self, *args, **kwargs):
+ with self.__timer:
+ return Cache.setdefault(self, *args, **kwargs)
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least recently used that
+ has not already expired.
+
+ """
+ with self.__timer as time:
+ self.expire(time)
+ try:
+ key = next(iter(self.__links))
+ except StopIteration:
+ msg = '%s is empty' % self.__class__.__name__
+ raise KeyError(msg) from None
+ else:
+ return (key, self.pop(key))
+
+ def __getlink(self, key):
+ value = self.__links[key]
+ self.__links.move_to_end(key)
+ return value
diff --git a/venv/Lib/site-packages/certifi-2020.6.20.dist-info/INSTALLER b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/certifi-2020.6.20.dist-info/LICENSE b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/LICENSE
new file mode 100644
index 000000000..802b53ff1
--- /dev/null
+++ b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/LICENSE
@@ -0,0 +1,21 @@
+This packge contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt). This file can be found in the mozilla source tree:
+http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
diff --git a/venv/Lib/site-packages/certifi-2020.6.20.dist-info/METADATA b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/METADATA
new file mode 100644
index 000000000..0bcbcce26
--- /dev/null
+++ b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/METADATA
@@ -0,0 +1,82 @@
+Metadata-Version: 2.1
+Name: certifi
+Version: 2020.6.20
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: https://certifiio.readthedocs.io/en/latest/
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/
+Project-URL: Source, https://github.com/certifi/python-certifi
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+
+Certifi: Python SSL Certificates
+================================
+
+`Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
+
+Or from the command line::
+
+ $ python -m certifi
+ /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
+
+Enjoy!
+
+1024-bit Root Certificates
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Browsers and certificate authorities have concluded that 1024-bit keys are
+unacceptably weak for certificates, particularly root certificates. For this
+reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+certificate from the same CA. Because Mozilla removed these certificates from
+its bundle, ``certifi`` removed them as well.
+
+In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+to intentionally re-add the 1024-bit roots back into your bundle. This was not
+recommended in production and therefore was removed at the end of 2018.
+
+.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/
+.. _`Requests`: https://requests.readthedocs.io/en/master/
+
+Addition/Removal of Certificates
+--------------------------------
+
+Certifi does not support any addition/removal or other modification of the
+CA trust store content. This project is intended to provide a reliable and
+highly portable root of trust to python deployments. Look to upstream projects
+for methods to use alternate trust.
+
+
diff --git a/venv/Lib/site-packages/certifi-2020.6.20.dist-info/RECORD b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/RECORD
new file mode 100644
index 000000000..b3b52478f
--- /dev/null
+++ b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/RECORD
@@ -0,0 +1,13 @@
+certifi-2020.6.20.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+certifi-2020.6.20.dist-info/LICENSE,sha256=anCkv2sBABbVmmS4rkrY3H9e8W8ftFPMLs13HFo0ETE,1048
+certifi-2020.6.20.dist-info/METADATA,sha256=_0lH4pmUKzXqjJAq6fIlE4JB2g1CFLPpjpwwOsqNqwk,2944
+certifi-2020.6.20.dist-info/RECORD,,
+certifi-2020.6.20.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+certifi-2020.6.20.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
+certifi/__init__.py,sha256=u1E_DrSGj_nnEkK5VglvEqP8D80KpghLVWL0A_pq41A,62
+certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
+certifi/__pycache__/__init__.cpython-36.pyc,,
+certifi/__pycache__/__main__.cpython-36.pyc,,
+certifi/__pycache__/core.cpython-36.pyc,,
+certifi/cacert.pem,sha256=GhT24f0R7_9y4YY_hkXwkO7BthZhRGDCEMO348E9S14,282394
+certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303
diff --git a/venv/Lib/site-packages/certifi-2020.6.20.dist-info/WHEEL b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/WHEEL
new file mode 100644
index 000000000..ef99c6cf3
--- /dev/null
+++ b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/certifi-2020.6.20.dist-info/top_level.txt b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/top_level.txt
new file mode 100644
index 000000000..963eac530
--- /dev/null
+++ b/venv/Lib/site-packages/certifi-2020.6.20.dist-info/top_level.txt
@@ -0,0 +1 @@
+certifi
diff --git a/venv/Lib/site-packages/certifi/__init__.py b/venv/Lib/site-packages/certifi/__init__.py
new file mode 100644
index 000000000..5d52a62e7
--- /dev/null
+++ b/venv/Lib/site-packages/certifi/__init__.py
@@ -0,0 +1,3 @@
+from .core import contents, where
+
+__version__ = "2020.06.20"
diff --git a/venv/Lib/site-packages/certifi/__main__.py b/venv/Lib/site-packages/certifi/__main__.py
new file mode 100644
index 000000000..8945b5da8
--- /dev/null
+++ b/venv/Lib/site-packages/certifi/__main__.py
@@ -0,0 +1,12 @@
+import argparse
+
+from certifi import contents, where
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--contents", action="store_true")
+args = parser.parse_args()
+
+if args.contents:
+ print(contents())
+else:
+ print(where())
diff --git a/venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..1814529c5
Binary files /dev/null and b/venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-36.pyc b/venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-36.pyc
new file mode 100644
index 000000000..766fd752f
Binary files /dev/null and b/venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/certifi/__pycache__/core.cpython-36.pyc b/venv/Lib/site-packages/certifi/__pycache__/core.cpython-36.pyc
new file mode 100644
index 000000000..64a95d382
Binary files /dev/null and b/venv/Lib/site-packages/certifi/__pycache__/core.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/certifi/cacert.pem b/venv/Lib/site-packages/certifi/cacert.pem
new file mode 100644
index 000000000..0fd855f46
--- /dev/null
+++ b/venv/Lib/site-packages/certifi/cacert.pem
@@ -0,0 +1,4620 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes
+# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes
+# Label: "EC-ACC"
+# Serial: -23701579247955709139626555126524820479
+# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09
+# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8
+# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99
+-----BEGIN CERTIFICATE-----
+MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB
+8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy
+dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1
+YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3
+dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh
+IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD
+LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG
+EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g
+KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD
+ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu
+bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg
+ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R
+85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm
+4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV
+HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd
+QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t
+lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB
+o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4
+opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo
+dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW
+ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN
+AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y
+/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k
+SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy
+Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS
+Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl
+nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 146587175971765017618439757810265552097
+# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
+# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
+# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
+f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
+mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
+zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
+fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
+vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
+Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
+zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
+Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
+k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
+DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
+lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
+Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
+d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
+XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
+gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
+d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
+J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
+DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
+F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
+SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
+E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 146587176055767053814479386953112547951
+# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
+# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
+# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
+CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
+GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
+XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
+re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
+PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
+mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
+8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
+x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
+nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
+kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
+twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
+8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
+vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
+z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
+pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
+pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
+R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
+RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
+0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
+5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
+izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
+yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 146587176140553309517047991083707763997
+# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
+# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
+# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
+-----BEGIN CERTIFICATE-----
+MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
+736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
+DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
+fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
+njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 146587176229350439916519468929765261721
+# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
+# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
+# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
+-----BEGIN CERTIFICATE-----
+MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
+hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
+xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
+CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
+sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G4"
+# Serial: 289383649854506086828220374796556676440
+# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
+# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
+# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
+gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
+Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
+MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
+BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
+MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
+c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
+bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
+Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
+2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
+T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
+5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
+C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
+DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
+wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
+2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
+nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
+dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
+N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
+c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
+5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
+Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
+hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
+B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
+AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
+H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
+b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
+2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
+IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
+5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
+n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft ECC Root Certificate Authority 2017"
+# Serial: 136839042543790627607696632466672567020
+# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
+# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
+# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
+-----BEGIN CERTIFICATE-----
+MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
+VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
+MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
+UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
+b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
+ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
+hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
+FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
+L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
+iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft RSA Root Certificate Authority 2017"
+# Serial: 40975477897264996090493496164228220339
+# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
+# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
+# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
+-----BEGIN CERTIFICATE-----
+MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
+MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
+NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
+IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
+EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
+aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
+Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
+ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
+HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
+gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
+jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
+aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
+YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
+W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
+UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
+W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
+LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
+gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
+tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
+SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
+TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
+pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
+xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
+GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
+dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
+AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
+RA+GsCyRxj3qrg+E
+-----END CERTIFICATE-----
+
+# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Label: "e-Szigno Root CA 2017"
+# Serial: 411379200276854331539784714
+# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
+# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
+# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
+-----BEGIN CERTIFICATE-----
+MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
+BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
+LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
+b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
+BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
+THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
+IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
+xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
+Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
+eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
+jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
++efcMQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Label: "certSIGN Root CA G2"
+# Serial: 313609486401300475190
+# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
+# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
+# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
+BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
+Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
+BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
+R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
+dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
+vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
+uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
+n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
+cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
+xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
+rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
+DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
+DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
+LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
+eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
+d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
+kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
+b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
+qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
+OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
+NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
+ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
+pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
+03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
+PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
+1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
+QRBdJ3NghVdJIgc=
+-----END CERTIFICATE-----
diff --git a/venv/Lib/site-packages/certifi/core.py b/venv/Lib/site-packages/certifi/core.py
new file mode 100644
index 000000000..5d2b8cd32
--- /dev/null
+++ b/venv/Lib/site-packages/certifi/core.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem or its contents.
+"""
+import os
+
+try:
+ from importlib.resources import path as get_path, read_text
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where():
+ # This is slightly terrible, but we want to delay extracting the file
+ # in cases where we're inside of a zipimport situation until someone
+ # actually calls where(), but we don't want to re-extract the file
+ # on every call of where(), so we'll do it once then store it in a
+ # global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you to
+ # manage the cleanup of this file, so it doesn't actually return a
+ # path, it returns a context manager that will give you the path
+ # when you enter it and will do any cleanup when you leave it. In
+ # the common case of not needing a temporary file, it will just
+ # return the file system location and the __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = get_path("certifi", "cacert.pem")
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+
+ return _CACERT_PATH
+
+
+except ImportError:
+ # This fallback will work for Python versions prior to 3.7 that lack the
+ # importlib.resources module but relies on the existing `where` function
+ # so won't address issues with environments like PyOxidizer that don't set
+ # __file__ on modules.
+ def read_text(_module, _path, encoding="ascii"):
+ with open(where(), "r", encoding=encoding) as data:
+ return data.read()
+
+ # If we don't have importlib.resources, then we will just do the old logic
+ # of assuming we're on the filesystem and munge the path directly.
+ def where():
+ f = os.path.dirname(__file__)
+
+ return os.path.join(f, "cacert.pem")
+
+
+def contents():
+ return read_text("certifi", "cacert.pem", encoding="ascii")
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/INSTALLER b/venv/Lib/site-packages/cffi-1.14.3.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/LICENSE b/venv/Lib/site-packages/cffi-1.14.3.dist-info/LICENSE
new file mode 100644
index 000000000..29225eee9
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/LICENSE
@@ -0,0 +1,26 @@
+
+Except when otherwise stated (look for LICENSE files in directories or
+information at the beginning of each file) all software and
+documentation is licensed as follows:
+
+ The MIT License
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or
+ sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/METADATA b/venv/Lib/site-packages/cffi-1.14.3.dist-info/METADATA
new file mode 100644
index 000000000..d3c31dae6
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/METADATA
@@ -0,0 +1,37 @@
+Metadata-Version: 2.1
+Name: cffi
+Version: 1.14.3
+Summary: Foreign Function Interface for Python calling C code.
+Home-page: http://cffi.readthedocs.org
+Author: Armin Rigo, Maciej Fijalkowski
+Author-email: python-cffi@googlegroups.com
+License: MIT
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: License :: OSI Approved :: MIT License
+Requires-Dist: pycparser
+
+
+CFFI
+====
+
+Foreign Function Interface for Python calling C code.
+Please see the `Documentation `_.
+
+Contact
+-------
+
+`Mailing list `_
+
+
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/RECORD b/venv/Lib/site-packages/cffi-1.14.3.dist-info/RECORD
new file mode 100644
index 000000000..9607bddfa
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/RECORD
@@ -0,0 +1,44 @@
+_cffi_backend.cp36-win32.pyd,sha256=G6qu3i9zJLMfZ_PT7JiYLHWlQ4oeg6Xdtt0U2fz452g,146432
+cffi-1.14.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cffi-1.14.3.dist-info/LICENSE,sha256=esEZUOct9bRcUXFqeyLnuzSzJNZ_Bl4pOBUt1HLEgV8,1320
+cffi-1.14.3.dist-info/METADATA,sha256=SOxoTo-W2jbz8qj7XHL2Ry3Shfpbeg6IHx7crTt4NRs,1191
+cffi-1.14.3.dist-info/RECORD,,
+cffi-1.14.3.dist-info/WHEEL,sha256=AV2Nvbg-pC-zIZdxGMBky9Ya05hYOPClxAaFNPEQVgE,102
+cffi-1.14.3.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
+cffi-1.14.3.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
+cffi/__init__.py,sha256=r5h8errFupyJOsTuicf5dYchgmL9Hwsy-GzAH5AAMOw,527
+cffi/__pycache__/__init__.cpython-36.pyc,,
+cffi/__pycache__/api.cpython-36.pyc,,
+cffi/__pycache__/backend_ctypes.cpython-36.pyc,,
+cffi/__pycache__/cffi_opcode.cpython-36.pyc,,
+cffi/__pycache__/commontypes.cpython-36.pyc,,
+cffi/__pycache__/cparser.cpython-36.pyc,,
+cffi/__pycache__/error.cpython-36.pyc,,
+cffi/__pycache__/ffiplatform.cpython-36.pyc,,
+cffi/__pycache__/lock.cpython-36.pyc,,
+cffi/__pycache__/model.cpython-36.pyc,,
+cffi/__pycache__/pkgconfig.cpython-36.pyc,,
+cffi/__pycache__/recompiler.cpython-36.pyc,,
+cffi/__pycache__/setuptools_ext.cpython-36.pyc,,
+cffi/__pycache__/vengine_cpy.cpython-36.pyc,,
+cffi/__pycache__/vengine_gen.cpython-36.pyc,,
+cffi/__pycache__/verifier.cpython-36.pyc,,
+cffi/_cffi_errors.h,sha256=INd0GxZQna8TTRYNOOr9_iFy0FZa84I_KH1qlmPgulQ,4003
+cffi/_cffi_include.h,sha256=H7cgdZR-POwmUFrIup4jOGzmje8YoQHhN99gVFg7w08,15185
+cffi/_embedding.h,sha256=4vlPtC1Zof0KjwnohsyLcWSDv-Kxa_bLmkobilrHbx0,18108
+cffi/api.py,sha256=Xs_dAN5x1ehfnn_F9ZTdA3Ce0bmPrqeIOkO4Ya1tfbQ,43029
+cffi/backend_ctypes.py,sha256=BHN3q2giL2_Y8wMDST2CIcc_qoMrs65qV9Ob5JvxBZ4,43575
+cffi/cffi_opcode.py,sha256=57P2NHLZkuTWueZybu5iosWljb6ocQmUXzGrCplrnyE,5911
+cffi/commontypes.py,sha256=mEZD4g0qtadnv6O6CEXvMQaJ1K6SRbG5S1h4YvVZHOU,2769
+cffi/cparser.py,sha256=CwVk2V3ATYlCoywG6zN35w6UQ7zj2EWX68KjoJp2Mzk,45237
+cffi/error.py,sha256=Bka7fSV22aIglTQDPIDfpnxTc1aWZLMQdQOJY-h_PUA,908
+cffi/ffiplatform.py,sha256=qioydJeC63dEvrQ3ht5_BPmSs7wzzzuWnZAJtfhic7I,4173
+cffi/lock.py,sha256=vnbsel7392Ib8gGBifIfAfc7MHteSwd3nP725pvc25Q,777
+cffi/model.py,sha256=HRD0WEYHF2Vr6RjS-4wyncElrZxU2256zY0fbMkSKec,22385
+cffi/parse_c_type.h,sha256=fKYNqWNX5f9kZNNhbXcRLTOlpRGRhh8eCLyHmTXIZnQ,6157
+cffi/pkgconfig.py,sha256=9zDcDf0XKIJaxFHLg7e-W8-Xb8Yq5hdhqH7kLg-ugRo,4495
+cffi/recompiler.py,sha256=_Hti-7dC_XumeGfj8tnodXwg1KplG_Iv-7P_5Xl41pA,65632
+cffi/setuptools_ext.py,sha256=8y14TOlRAkgdczmwtPOahyFXJHNyIqhLjUHMYQmjOHs,9150
+cffi/vengine_cpy.py,sha256=ukugKCIsURxJzHxlxS265tGjQfPTFDbThwsqBrwKh-A,44396
+cffi/vengine_gen.py,sha256=mykUhLFJIcV6AyQ5cMJ3n_7dbqw0a9WEjXW0E-WfgiI,27359
+cffi/verifier.py,sha256=La8rdbEkvdvbqAHDzTk5lsNUvdkqB_GcFnO7wXI6Mgk,11513
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/WHEEL b/venv/Lib/site-packages/cffi-1.14.3.dist-info/WHEEL
new file mode 100644
index 000000000..9ed4ab625
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: false
+Tag: cp36-cp36m-win32
+
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/entry_points.txt b/venv/Lib/site-packages/cffi-1.14.3.dist-info/entry_points.txt
new file mode 100644
index 000000000..eee7e0fb1
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[distutils.setup_keywords]
+cffi_modules = cffi.setuptools_ext:cffi_modules
+
diff --git a/venv/Lib/site-packages/cffi-1.14.3.dist-info/top_level.txt b/venv/Lib/site-packages/cffi-1.14.3.dist-info/top_level.txt
new file mode 100644
index 000000000..f64577957
--- /dev/null
+++ b/venv/Lib/site-packages/cffi-1.14.3.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+_cffi_backend
+cffi
diff --git a/venv/Lib/site-packages/cffi/__init__.py b/venv/Lib/site-packages/cffi/__init__.py
new file mode 100644
index 000000000..264afa162
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/__init__.py
@@ -0,0 +1,14 @@
+__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
+ 'FFIError']
+
+from .api import FFI
+from .error import CDefError, FFIError, VerificationError, VerificationMissing
+from .error import PkgConfigError
+
+__version__ = "1.14.3"
+__version_info__ = (1, 14, 3)
+
+# The verifier module file names are based on the CRC32 of a string that
+# contains the following version number. It may be older than __version__
+# if nothing is clearly incompatible.
+__version_verifier_modules__ = "0.8.6"
diff --git a/venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..6defb7f4c
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/api.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/api.cpython-36.pyc
new file mode 100644
index 000000000..29bc1b3f7
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/api.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-36.pyc
new file mode 100644
index 000000000..92f4e6b8d
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-36.pyc
new file mode 100644
index 000000000..f496e18c1
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-36.pyc
new file mode 100644
index 000000000..c7197a99d
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-36.pyc
new file mode 100644
index 000000000..c60a681c0
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/error.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/error.cpython-36.pyc
new file mode 100644
index 000000000..6968c4677
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/error.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-36.pyc
new file mode 100644
index 000000000..5b5f8a724
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/lock.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/lock.cpython-36.pyc
new file mode 100644
index 000000000..8febefa2f
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/lock.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/model.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/model.cpython-36.pyc
new file mode 100644
index 000000000..4ea8028f2
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/model.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-36.pyc
new file mode 100644
index 000000000..24e406070
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-36.pyc
new file mode 100644
index 000000000..f50436607
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-36.pyc
new file mode 100644
index 000000000..a9d39745e
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-36.pyc
new file mode 100644
index 000000000..d36acb008
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-36.pyc
new file mode 100644
index 000000000..4581b7dde
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-36.pyc b/venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-36.pyc
new file mode 100644
index 000000000..04329c610
Binary files /dev/null and b/venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/cffi/_cffi_errors.h b/venv/Lib/site-packages/cffi/_cffi_errors.h
new file mode 100644
index 000000000..83cdad068
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/_cffi_errors.h
@@ -0,0 +1,147 @@
+#ifndef CFFI_MESSAGEBOX
+# ifdef _MSC_VER
+# define CFFI_MESSAGEBOX 1
+# else
+# define CFFI_MESSAGEBOX 0
+# endif
+#endif
+
+
+#if CFFI_MESSAGEBOX
+/* Windows only: logic to take the Python-CFFI embedding logic
+ initialization errors and display them in a background thread
+ with MessageBox. The idea is that if the whole program closes
+ as a result of this problem, then likely it is already a console
+ program and you can read the stderr output in the console too.
+ If it is not a console program, then it will likely show its own
+ dialog to complain, or generally not abruptly close, and for this
+ case the background thread should stay alive.
+*/
+static void *volatile _cffi_bootstrap_text;
+
+static PyObject *_cffi_start_error_capture(void)
+{
+ PyObject *result = NULL;
+ PyObject *x, *m, *bi;
+
+ if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
+ (void *)1, NULL) != NULL)
+ return (PyObject *)1;
+
+ m = PyImport_AddModule("_cffi_error_capture");
+ if (m == NULL)
+ goto error;
+
+ result = PyModule_GetDict(m);
+ if (result == NULL)
+ goto error;
+
+#if PY_MAJOR_VERSION >= 3
+ bi = PyImport_ImportModule("builtins");
+#else
+ bi = PyImport_ImportModule("__builtin__");
+#endif
+ if (bi == NULL)
+ goto error;
+ PyDict_SetItemString(result, "__builtins__", bi);
+ Py_DECREF(bi);
+
+ x = PyRun_String(
+ "import sys\n"
+ "class FileLike:\n"
+ " def write(self, x):\n"
+ " try:\n"
+ " of.write(x)\n"
+ " except: pass\n"
+ " self.buf += x\n"
+ "fl = FileLike()\n"
+ "fl.buf = ''\n"
+ "of = sys.stderr\n"
+ "sys.stderr = fl\n"
+ "def done():\n"
+ " sys.stderr = of\n"
+ " return fl.buf\n", /* make sure the returned value stays alive */
+ Py_file_input,
+ result, result);
+ Py_XDECREF(x);
+
+ error:
+ if (PyErr_Occurred())
+ {
+ PyErr_WriteUnraisable(Py_None);
+ PyErr_Clear();
+ }
+ return result;
+}
+
+#pragma comment(lib, "user32.lib")
+
+static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
+{
+ Sleep(666); /* may be interrupted if the whole process is closing */
+#if PY_MAJOR_VERSION >= 3
+ MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
+ L"Python-CFFI error",
+ MB_OK | MB_ICONERROR);
+#else
+ MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
+ "Python-CFFI error",
+ MB_OK | MB_ICONERROR);
+#endif
+ _cffi_bootstrap_text = NULL;
+ return 0;
+}
+
+static void _cffi_stop_error_capture(PyObject *ecap)
+{
+ PyObject *s;
+ void *text;
+
+ if (ecap == (PyObject *)1)
+ return;
+
+ if (ecap == NULL)
+ goto error;
+
+ s = PyRun_String("done()", Py_eval_input, ecap, ecap);
+ if (s == NULL)
+ goto error;
+
+ /* Show a dialog box, but in a background thread, and
+ never show multiple dialog boxes at once. */
+#if PY_MAJOR_VERSION >= 3
+ text = PyUnicode_AsWideCharString(s, NULL);
+#else
+ text = PyString_AsString(s);
+#endif
+
+ _cffi_bootstrap_text = text;
+
+ if (text != NULL)
+ {
+ HANDLE h;
+ h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
+ NULL, 0, NULL);
+ if (h != NULL)
+ CloseHandle(h);
+ }
+ /* decref the string, but it should stay alive as 'fl.buf'
+ in the small module above. It will really be freed only if
+ we later get another similar error. So it's a leak of at
+ most one copy of the small module. That's fine for this
+ situation which is usually a "fatal error" anyway. */
+ Py_DECREF(s);
+ PyErr_Clear();
+ return;
+
+ error:
+ _cffi_bootstrap_text = NULL;
+ PyErr_Clear();
+}
+
+#else
+
+static PyObject *_cffi_start_error_capture(void) { return NULL; }
+static void _cffi_stop_error_capture(PyObject *ecap) { }
+
+#endif
diff --git a/venv/Lib/site-packages/cffi/_cffi_include.h b/venv/Lib/site-packages/cffi/_cffi_include.h
new file mode 100644
index 000000000..e4c0a6724
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/_cffi_include.h
@@ -0,0 +1,385 @@
+#define _CFFI_
+
+/* We try to define Py_LIMITED_API before including Python.h.
+
+ Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
+ Py_REF_DEBUG are not defined. This is a best-effort approximation:
+ we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
+ the same works for the other two macros. Py_DEBUG implies them,
+ but not the other way around.
+
+ The implementation is messy (issue #350): on Windows, with _MSC_VER,
+ we have to define Py_LIMITED_API even before including pyconfig.h.
+ In that case, we guess what pyconfig.h will do to the macros above,
+ and check our guess after the #include.
+
+ Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv
+ version >= 16.0.0. With older versions of either, you don't get a
+ copy of PYTHON3.DLL in the virtualenv. We can't check the version of
+ CPython *before* we even include pyconfig.h. ffi.set_source() puts
+ a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is
+ running on Windows < 3.5, as an attempt at fixing it, but that's
+ arguably wrong because it may not be the target version of Python.
+ Still better than nothing I guess. As another workaround, you can
+ remove the definition of Py_LIMITED_API here.
+
+ See also 'py_limited_api' in cffi/setuptools_ext.py.
+*/
+#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
+# ifdef _MSC_VER
+# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+# define Py_LIMITED_API
+# endif
+# include
+ /* sanity-check: Py_LIMITED_API will cause crashes if any of these
+ are also defined. Normally, the Python file PC/pyconfig.h does not
+ cause any of these to be defined, with the exception that _DEBUG
+ causes Py_DEBUG. Double-check that. */
+# ifdef Py_LIMITED_API
+# if defined(Py_DEBUG)
+# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set"
+# endif
+# if defined(Py_TRACE_REFS)
+# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set"
+# endif
+# if defined(Py_REF_DEBUG)
+# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set"
+# endif
+# endif
+# else
+# include
+# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+# define Py_LIMITED_API
+# endif
+# endif
+#endif
+
+#include
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include
+#include "parse_c_type.h"
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+ and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+ typedef __int8 int_least8_t;
+ typedef __int16 int_least16_t;
+ typedef __int32 int_least32_t;
+ typedef __int64 int_least64_t;
+ typedef unsigned __int8 uint_least8_t;
+ typedef unsigned __int16 uint_least16_t;
+ typedef unsigned __int32 uint_least32_t;
+ typedef unsigned __int64 uint_least64_t;
+ typedef __int8 int_fast8_t;
+ typedef __int16 int_fast16_t;
+ typedef __int32 int_fast32_t;
+ typedef __int64 int_fast64_t;
+ typedef unsigned __int8 uint_fast8_t;
+ typedef unsigned __int16 uint_fast16_t;
+ typedef unsigned __int32 uint_fast32_t;
+ typedef unsigned __int64 uint_fast64_t;
+ typedef __int64 intmax_t;
+ typedef unsigned __int64 uintmax_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+# ifndef __cplusplus
+ typedef unsigned char _Bool;
+# endif
+# endif
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+# include
+# endif
+#endif
+
+#ifdef __GNUC__
+# define _CFFI_UNUSED_FN __attribute__((unused))
+#else
+# define _CFFI_UNUSED_FN /* nothing */
+#endif
+
+#ifdef __cplusplus
+# ifndef _Bool
+ typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
+# endif
+#endif
+
+/********** CPython-specific section **********/
+#ifndef PYPY_VERSION
+
+
+#if PY_MAJOR_VERSION >= 3
+# define PyInt_FromLong PyLong_FromLong
+#endif
+
+#define _cffi_from_c_double PyFloat_FromDouble
+#define _cffi_from_c_float PyFloat_FromDouble
+#define _cffi_from_c_long PyInt_FromLong
+#define _cffi_from_c_ulong PyLong_FromUnsignedLong
+#define _cffi_from_c_longlong PyLong_FromLongLong
+#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
+
+#define _cffi_to_c_double PyFloat_AsDouble
+#define _cffi_to_c_float PyFloat_AsDouble
+
+#define _cffi_from_c_int(x, type) \
+ (((type)-1) > 0 ? /* unsigned */ \
+ (sizeof(type) < sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ sizeof(type) == sizeof(long) ? \
+ PyLong_FromUnsignedLong((unsigned long)x) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
+ (sizeof(type) <= sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ PyLong_FromLongLong((long long)x)))
+
+#define _cffi_to_c_int(o, type) \
+ ((type)( \
+ sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
+ : (type)_cffi_to_c_i8(o)) : \
+ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
+ : (type)_cffi_to_c_i16(o)) : \
+ sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
+ : (type)_cffi_to_c_i32(o)) : \
+ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
+ : (type)_cffi_to_c_i64(o)) : \
+ (Py_FatalError("unsupported size for type " #type), (type)0)))
+
+#define _cffi_to_c_i8 \
+ ((int(*)(PyObject *))_cffi_exports[1])
+#define _cffi_to_c_u8 \
+ ((int(*)(PyObject *))_cffi_exports[2])
+#define _cffi_to_c_i16 \
+ ((int(*)(PyObject *))_cffi_exports[3])
+#define _cffi_to_c_u16 \
+ ((int(*)(PyObject *))_cffi_exports[4])
+#define _cffi_to_c_i32 \
+ ((int(*)(PyObject *))_cffi_exports[5])
+#define _cffi_to_c_u32 \
+ ((unsigned int(*)(PyObject *))_cffi_exports[6])
+#define _cffi_to_c_i64 \
+ ((long long(*)(PyObject *))_cffi_exports[7])
+#define _cffi_to_c_u64 \
+ ((unsigned long long(*)(PyObject *))_cffi_exports[8])
+#define _cffi_to_c_char \
+ ((int(*)(PyObject *))_cffi_exports[9])
+#define _cffi_from_c_pointer \
+ ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
+#define _cffi_to_c_pointer \
+ ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
+#define _cffi_get_struct_layout \
+ not used any more
+#define _cffi_restore_errno \
+ ((void(*)(void))_cffi_exports[13])
+#define _cffi_save_errno \
+ ((void(*)(void))_cffi_exports[14])
+#define _cffi_from_c_char \
+ ((PyObject *(*)(char))_cffi_exports[15])
+#define _cffi_from_c_deref \
+ ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
+#define _cffi_to_c \
+ ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
+#define _cffi_from_c_struct \
+ ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
+#define _cffi_to_c_wchar_t \
+ ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
+#define _cffi_from_c_wchar_t \
+ ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
+#define _cffi_to_c_long_double \
+ ((long double(*)(PyObject *))_cffi_exports[21])
+#define _cffi_to_c__Bool \
+ ((_Bool(*)(PyObject *))_cffi_exports[22])
+#define _cffi_prepare_pointer_call_argument \
+ ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
+ PyObject *, char **))_cffi_exports[23])
+#define _cffi_convert_array_from_object \
+ ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
+#define _CFFI_CPIDX 25
+#define _cffi_call_python \
+ ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
+#define _cffi_to_c_wchar3216_t \
+ ((int(*)(PyObject *))_cffi_exports[26])
+#define _cffi_from_c_wchar3216_t \
+ ((PyObject *(*)(int))_cffi_exports[27])
+#define _CFFI_NUM_EXPORTS 28
+
+struct _cffi_ctypedescr;
+
+static void *_cffi_exports[_CFFI_NUM_EXPORTS];
+
+#define _cffi_type(index) ( \
+ assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
+ (struct _cffi_ctypedescr *)_cffi_types[index])
+
+static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
+ const struct _cffi_type_context_s *ctx)
+{
+ PyObject *module, *o_arg, *new_module;
+ void *raw[] = {
+ (void *)module_name,
+ (void *)version,
+ (void *)_cffi_exports,
+ (void *)ctx,
+ };
+
+ module = PyImport_ImportModule("_cffi_backend");
+ if (module == NULL)
+ goto failure;
+
+ o_arg = PyLong_FromVoidPtr((void *)raw);
+ if (o_arg == NULL)
+ goto failure;
+
+ new_module = PyObject_CallMethod(
+ module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
+
+ Py_DECREF(o_arg);
+ Py_DECREF(module);
+ return new_module;
+
+ failure:
+ Py_XDECREF(module);
+ return NULL;
+}
+
+
+#ifdef HAVE_WCHAR_H
+typedef wchar_t _cffi_wchar_t;
+#else
+typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
+#endif
+
+_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
+{
+ if (sizeof(_cffi_wchar_t) == 2)
+ return (uint16_t)_cffi_to_c_wchar_t(o);
+ else
+ return (uint16_t)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
+{
+ if (sizeof(_cffi_wchar_t) == 2)
+ return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+ else
+ return _cffi_from_c_wchar3216_t((int)x);
+}
+
+_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
+{
+ if (sizeof(_cffi_wchar_t) == 4)
+ return (int)_cffi_to_c_wchar_t(o);
+ else
+ return (int)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
+{
+ if (sizeof(_cffi_wchar_t) == 4)
+ return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+ else
+ return _cffi_from_c_wchar3216_t((int)x);
+}
+
+union _cffi_union_alignment_u {
+ unsigned char m_char;
+ unsigned short m_short;
+ unsigned int m_int;
+ unsigned long m_long;
+ unsigned long long m_longlong;
+ float m_float;
+ double m_double;
+ long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+ struct _cffi_freeme_s *next;
+ union _cffi_union_alignment_u alignment;
+};
+
+_CFFI_UNUSED_FN static int
+_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
+ char **output_data, Py_ssize_t datasize,
+ struct _cffi_freeme_s **freeme)
+{
+ char *p;
+ if (datasize < 0)
+ return -1;
+
+ p = *output_data;
+ if (p == NULL) {
+ struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+ offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+ if (fp == NULL)
+ return -1;
+ fp->next = *freeme;
+ *freeme = fp;
+ p = *output_data = (char *)&fp->alignment;
+ }
+ memset((void *)p, 0, (size_t)datasize);
+ return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+_CFFI_UNUSED_FN static void
+_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+ do {
+ void *p = (void *)freeme;
+ freeme = freeme->next;
+ PyObject_Free(p);
+ } while (freeme != NULL);
+}
+
+/********** end CPython-specific section **********/
+#else
+_CFFI_UNUSED_FN
+static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
+# define _cffi_call_python _cffi_call_python_org
+#endif
+
+
+#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
+
+#define _cffi_prim_int(size, sign) \
+ ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
+ (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
+ (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
+ (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
+ _CFFI__UNKNOWN_PRIM)
+
+#define _cffi_prim_float(size) \
+ ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
+ (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
+ (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
+ _CFFI__UNKNOWN_FLOAT_PRIM)
+
+#define _cffi_check_int(got, got_nonpos, expected) \
+ ((got_nonpos) == (expected <= 0) && \
+ (got) == (unsigned long long)expected)
+
+#ifdef MS_WIN32
+# define _cffi_stdcall __stdcall
+#else
+# define _cffi_stdcall /* nothing */
+#endif
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/venv/Lib/site-packages/cffi/_embedding.h b/venv/Lib/site-packages/cffi/_embedding.h
new file mode 100644
index 000000000..fdce22286
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/_embedding.h
@@ -0,0 +1,527 @@
+
+/***** Support code for embedding *****/
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#if defined(_WIN32)
+# define CFFI_DLLEXPORT __declspec(dllexport)
+#elif defined(__GNUC__)
+# define CFFI_DLLEXPORT __attribute__((visibility("default")))
+#else
+# define CFFI_DLLEXPORT /* nothing */
+#endif
+
+
+/* There are two global variables of type _cffi_call_python_fnptr:
+
+ * _cffi_call_python, which we declare just below, is the one called
+ by ``extern "Python"`` implementations.
+
+ * _cffi_call_python_org, which on CPython is actually part of the
+ _cffi_exports[] array, is the function pointer copied from
+ _cffi_backend.
+
+ After initialization is complete, both are equal. However, the
+ first one remains equal to &_cffi_start_and_call_python until the
+ very end of initialization, when we are (or should be) sure that
+ concurrent threads also see a completely initialized world, and
+ only then is it changed.
+*/
+#undef _cffi_call_python
+typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
+static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
+static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
+
+
+#ifndef _MSC_VER
+ /* --- Assuming a GCC not infinitely old --- */
+# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
+# define cffi_write_barrier() __sync_synchronize()
+# if !defined(__amd64__) && !defined(__x86_64__) && \
+ !defined(__i386__) && !defined(__i386)
+# define cffi_read_barrier() __sync_synchronize()
+# else
+# define cffi_read_barrier() (void)0
+# endif
+#else
+ /* --- Windows threads version --- */
+# include
+# define cffi_compare_and_swap(l,o,n) \
+ (InterlockedCompareExchangePointer(l,n,o) == (o))
+# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
+# define cffi_read_barrier() (void)0
+static volatile LONG _cffi_dummy;
+#endif
+
+#ifdef WITH_THREAD
+# ifndef _MSC_VER
+# include
+ static pthread_mutex_t _cffi_embed_startup_lock;
+# else
+ static CRITICAL_SECTION _cffi_embed_startup_lock;
+# endif
+ static char _cffi_embed_startup_lock_ready = 0;
+#endif
+
+static void _cffi_acquire_reentrant_mutex(void)
+{
+ static void *volatile lock = NULL;
+
+ while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: pthread_mutex_init() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+
+#ifdef WITH_THREAD
+ if (!_cffi_embed_startup_lock_ready) {
+# ifndef _MSC_VER
+ pthread_mutexattr_t attr;
+ pthread_mutexattr_init(&attr);
+ pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
+ pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
+# else
+ InitializeCriticalSection(&_cffi_embed_startup_lock);
+# endif
+ _cffi_embed_startup_lock_ready = 1;
+ }
+#endif
+
+ while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
+ ;
+
+#ifndef _MSC_VER
+ pthread_mutex_lock(&_cffi_embed_startup_lock);
+#else
+ EnterCriticalSection(&_cffi_embed_startup_lock);
+#endif
+}
+
+static void _cffi_release_reentrant_mutex(void)
+{
+#ifndef _MSC_VER
+ pthread_mutex_unlock(&_cffi_embed_startup_lock);
+#else
+ LeaveCriticalSection(&_cffi_embed_startup_lock);
+#endif
+}
+
+
+/********** CPython-specific section **********/
+#ifndef PYPY_VERSION
+
+#include "_cffi_errors.h"
+
+
+#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
+
+PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
+
+static void _cffi_py_initialize(void)
+{
+ /* XXX use initsigs=0, which "skips initialization registration of
+ signal handlers, which might be useful when Python is
+ embedded" according to the Python docs. But review and think
+ if it should be a user-controllable setting.
+
+ XXX we should also give a way to write errors to a buffer
+ instead of to stderr.
+
+ XXX if importing 'site' fails, CPython (any version) calls
+ exit(). Should we try to work around this behavior here?
+ */
+ Py_InitializeEx(0);
+}
+
+static int _cffi_initialize_python(void)
+{
+ /* This initializes Python, imports _cffi_backend, and then the
+ present .dll/.so is set up as a CPython C extension module.
+ */
+ int result;
+ PyGILState_STATE state;
+ PyObject *pycode=NULL, *global_dict=NULL, *x;
+ PyObject *builtins;
+
+ state = PyGILState_Ensure();
+
+ /* Call the initxxx() function from the present module. It will
+ create and initialize us as a CPython extension module, instead
+ of letting the startup Python code do it---it might reimport
+ the same .dll/.so and get maybe confused on some platforms.
+ It might also have troubles locating the .dll/.so again for all
+ I know.
+ */
+ (void)_CFFI_PYTHON_STARTUP_FUNC();
+ if (PyErr_Occurred())
+ goto error;
+
+ /* Now run the Python code provided to ffi.embedding_init_code().
+ */
+ pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
+ "",
+ Py_file_input);
+ if (pycode == NULL)
+ goto error;
+ global_dict = PyDict_New();
+ if (global_dict == NULL)
+ goto error;
+ builtins = PyEval_GetBuiltins();
+ if (builtins == NULL)
+ goto error;
+ if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
+ goto error;
+ x = PyEval_EvalCode(
+#if PY_MAJOR_VERSION < 3
+ (PyCodeObject *)
+#endif
+ pycode, global_dict, global_dict);
+ if (x == NULL)
+ goto error;
+ Py_DECREF(x);
+
+ /* Done! Now if we've been called from
+ _cffi_start_and_call_python() in an ``extern "Python"``, we can
+ only hope that the Python code did correctly set up the
+ corresponding @ffi.def_extern() function. Otherwise, the
+ general logic of ``extern "Python"`` functions (inside the
+ _cffi_backend module) will find that the reference is still
+ missing and print an error.
+ */
+ result = 0;
+ done:
+ Py_XDECREF(pycode);
+ Py_XDECREF(global_dict);
+ PyGILState_Release(state);
+ return result;
+
+ error:;
+ {
+ /* Print as much information as potentially useful.
+ Debugging load-time failures with embedding is not fun
+ */
+ PyObject *ecap;
+ PyObject *exception, *v, *tb, *f, *modules, *mod;
+ PyErr_Fetch(&exception, &v, &tb);
+ ecap = _cffi_start_error_capture();
+ f = PySys_GetObject((char *)"stderr");
+ if (f != NULL && f != Py_None) {
+ PyFile_WriteString(
+ "Failed to initialize the Python-CFFI embedding logic:\n\n", f);
+ }
+
+ if (exception != NULL) {
+ PyErr_NormalizeException(&exception, &v, &tb);
+ PyErr_Display(exception, v, tb);
+ }
+ Py_XDECREF(exception);
+ Py_XDECREF(v);
+ Py_XDECREF(tb);
+
+ if (f != NULL && f != Py_None) {
+ PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
+ "\ncompiled with cffi version: 1.14.3"
+ "\n_cffi_backend module: ", f);
+ modules = PyImport_GetModuleDict();
+ mod = PyDict_GetItemString(modules, "_cffi_backend");
+ if (mod == NULL) {
+ PyFile_WriteString("not loaded", f);
+ }
+ else {
+ v = PyObject_GetAttrString(mod, "__file__");
+ PyFile_WriteObject(v, f, 0);
+ Py_XDECREF(v);
+ }
+ PyFile_WriteString("\nsys.path: ", f);
+ PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
+ PyFile_WriteString("\n\n", f);
+ }
+ _cffi_stop_error_capture(ecap);
+ }
+ result = -1;
+ goto done;
+}
+
+#if PY_VERSION_HEX < 0x03080000
+PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
+#endif
+
+static int _cffi_carefully_make_gil(void)
+{
+ /* This does the basic initialization of Python. It can be called
+ completely concurrently from unrelated threads. It assumes
+ that we don't hold the GIL before (if it exists), and we don't
+ hold it afterwards.
+
+ (What it really does used to be completely different in Python 2
+ and Python 3, with the Python 2 solution avoiding the spin-lock
+ around the Py_InitializeEx() call. However, after recent changes
+ to CPython 2.7 (issue #358) it no longer works. So we use the
+ Python 3 solution everywhere.)
+
+ This initializes Python by calling Py_InitializeEx().
+ Important: this must not be called concurrently at all.
+ So we use a global variable as a simple spin lock. This global
+ variable must be from 'libpythonX.Y.so', not from this
+ cffi-based extension module, because it must be shared from
+ different cffi-based extension modules.
+
+ In Python < 3.8, we choose
+ _PyParser_TokenNames[0] as a completely arbitrary pointer value
+ that is never written to. The default is to point to the
+ string "ENDMARKER". We change it temporarily to point to the
+ next character in that string. (Yes, I know it's REALLY
+ obscure.)
+
+ In Python >= 3.8, this string array is no longer writable, so
+ instead we pick PyCapsuleType.tp_version_tag. We can't change
+ Python < 3.8 because someone might use a mixture of cffi
+ embedded modules, some of which were compiled before this file
+ changed.
+ */
+
+#ifdef WITH_THREAD
+# if PY_VERSION_HEX < 0x03080000
+ char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
+ char *old_value, *locked_value;
+
+ while (1) { /* spin loop */
+ old_value = *lock;
+ locked_value = old_value + 1;
+ if (old_value[0] == 'E') {
+ assert(old_value[1] == 'N');
+ if (cffi_compare_and_swap(lock, old_value, locked_value))
+ break;
+ }
+ else {
+ assert(old_value[0] == 'N');
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: PyEval_InitThreads() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+ }
+# else
+ int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
+ int old_value, locked_value;
+ assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
+
+ while (1) { /* spin loop */
+ old_value = *lock;
+ locked_value = -42;
+ if (old_value == 0) {
+ if (cffi_compare_and_swap(lock, old_value, locked_value))
+ break;
+ }
+ else {
+ assert(old_value == locked_value);
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: PyEval_InitThreads() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+ }
+# endif
+#endif
+
+ /* call Py_InitializeEx() */
+ if (!Py_IsInitialized()) {
+ _cffi_py_initialize();
+#if PY_VERSION_HEX < 0x03070000
+ PyEval_InitThreads();
+#endif
+ PyEval_SaveThread(); /* release the GIL */
+ /* the returned tstate must be the one that has been stored into the
+ autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
+ }
+ else {
+#if PY_VERSION_HEX < 0x03070000
+ /* PyEval_InitThreads() is always a no-op from CPython 3.7 */
+ PyGILState_STATE state = PyGILState_Ensure();
+ PyEval_InitThreads();
+ PyGILState_Release(state);
+#endif
+ }
+
+#ifdef WITH_THREAD
+ /* release the lock */
+ while (!cffi_compare_and_swap(lock, locked_value, old_value))
+ ;
+#endif
+
+ return 0;
+}
+
+/********** end CPython-specific section **********/
+
+
+#else
+
+
+/********** PyPy-specific section **********/
+
+PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
+
+static struct _cffi_pypy_init_s {
+ const char *name;
+ void *func; /* function pointer */
+ const char *code;
+} _cffi_pypy_init = {
+ _CFFI_MODULE_NAME,
+ _CFFI_PYTHON_STARTUP_FUNC,
+ _CFFI_PYTHON_STARTUP_CODE,
+};
+
+extern int pypy_carefully_make_gil(const char *);
+extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
+
+static int _cffi_carefully_make_gil(void)
+{
+ return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
+}
+
+static int _cffi_initialize_python(void)
+{
+ return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
+}
+
+/********** end PyPy-specific section **********/
+
+
+#endif
+
+
+#ifdef __GNUC__
+__attribute__((noinline))
+#endif
+static _cffi_call_python_fnptr _cffi_start_python(void)
+{
+ /* Delicate logic to initialize Python. This function can be
+ called multiple times concurrently, e.g. when the process calls
+ its first ``extern "Python"`` functions in multiple threads at
+ once. It can also be called recursively, in which case we must
+ ignore it. We also have to consider what occurs if several
+ different cffi-based extensions reach this code in parallel
+ threads---it is a different copy of the code, then, and we
+ can't have any shared global variable unless it comes from
+ 'libpythonX.Y.so'.
+
+ Idea:
+
+ * _cffi_carefully_make_gil(): "carefully" call
+ PyEval_InitThreads() (possibly with Py_InitializeEx() first).
+
+ * then we use a (local) custom lock to make sure that a call to this
+ cffi-based extension will wait if another call to the *same*
+ extension is running the initialization in another thread.
+ It is reentrant, so that a recursive call will not block, but
+ only one from a different thread.
+
+ * then we grab the GIL and (Python 2) we call Py_InitializeEx().
+ At this point, concurrent calls to Py_InitializeEx() are not
+ possible: we have the GIL.
+
+ * do the rest of the specific initialization, which may
+ temporarily release the GIL but not the custom lock.
+ Only release the custom lock when we are done.
+ */
+ static char called = 0;
+
+ if (_cffi_carefully_make_gil() != 0)
+ return NULL;
+
+ _cffi_acquire_reentrant_mutex();
+
+ /* Here the GIL exists, but we don't have it. We're only protected
+ from concurrency by the reentrant mutex. */
+
+ /* This file only initializes the embedded module once, the first
+ time this is called, even if there are subinterpreters. */
+ if (!called) {
+ called = 1; /* invoke _cffi_initialize_python() only once,
+ but don't set '_cffi_call_python' right now,
+ otherwise concurrent threads won't call
+ this function at all (we need them to wait) */
+ if (_cffi_initialize_python() == 0) {
+ /* now initialization is finished. Switch to the fast-path. */
+
+ /* We would like nobody to see the new value of
+ '_cffi_call_python' without also seeing the rest of the
+ data initialized. However, this is not possible. But
+ the new value of '_cffi_call_python' is the function
+ 'cffi_call_python()' from _cffi_backend. So: */
+ cffi_write_barrier();
+ /* ^^^ we put a write barrier here, and a corresponding
+ read barrier at the start of cffi_call_python(). This
+ ensures that after that read barrier, we see everything
+ done here before the write barrier.
+ */
+
+ assert(_cffi_call_python_org != NULL);
+ _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
+ }
+ else {
+ /* initialization failed. Reset this to NULL, even if it was
+ already set to some other value. Future calls to
+ _cffi_start_python() are still forced to occur, and will
+ always return NULL from now on. */
+ _cffi_call_python_org = NULL;
+ }
+ }
+
+ _cffi_release_reentrant_mutex();
+
+ return (_cffi_call_python_fnptr)_cffi_call_python_org;
+}
+
+static
+void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
+{
+ _cffi_call_python_fnptr fnptr;
+ int current_err = errno;
+#ifdef _MSC_VER
+ int current_lasterr = GetLastError();
+#endif
+ fnptr = _cffi_start_python();
+ if (fnptr == NULL) {
+ fprintf(stderr, "function %s() called, but initialization code "
+ "failed. Returning 0.\n", externpy->name);
+ memset(args, 0, externpy->size_of_result);
+ }
+#ifdef _MSC_VER
+ SetLastError(current_lasterr);
+#endif
+ errno = current_err;
+
+ if (fnptr != NULL)
+ fnptr(externpy, args);
+}
+
+
+/* The cffi_start_python() function makes sure Python is initialized
+ and our cffi module is set up. It can be called manually from the
+ user C code. The same effect is obtained automatically from any
+ dll-exported ``extern "Python"`` function. This function returns
+ -1 if initialization failed, 0 if all is OK. */
+_CFFI_UNUSED_FN
+static int cffi_start_python(void)
+{
+ if (_cffi_call_python == &_cffi_start_and_call_python) {
+ if (_cffi_start_python() == NULL)
+ return -1;
+ }
+ cffi_read_barrier();
+ return 0;
+}
+
+#undef cffi_compare_and_swap
+#undef cffi_write_barrier
+#undef cffi_read_barrier
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/venv/Lib/site-packages/cffi/api.py b/venv/Lib/site-packages/cffi/api.py
new file mode 100644
index 000000000..999a8aefc
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/api.py
@@ -0,0 +1,965 @@
+import sys, types
+from .lock import allocate_lock
+from .error import CDefError
+from . import model
+
+try:
+ callable
+except NameError:
+ # Python 3.1
+ from collections import Callable
+ callable = lambda x: isinstance(x, Callable)
+
+try:
+ basestring
+except NameError:
+ # Python 3.x
+ basestring = str
+
+_unspecified = object()
+
+
+
+class FFI(object):
+ r'''
+ The main top-level class that you instantiate once, or once per module.
+
+ Example usage:
+
+ ffi = FFI()
+ ffi.cdef("""
+ int printf(const char *, ...);
+ """)
+
+ C = ffi.dlopen(None) # standard library
+ -or-
+ C = ffi.verify() # use a C compiler: verify the decl above is right
+
+ C.printf("hello, %s!\n", ffi.new("char[]", "world"))
+ '''
+
+ def __init__(self, backend=None):
+ """Create an FFI instance. The 'backend' argument is used to
+ select a non-default backend, mostly for tests.
+ """
+ if backend is None:
+ # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
+ # _cffi_backend.so compiled.
+ import _cffi_backend as backend
+ from . import __version__
+ if backend.__version__ != __version__:
+ # bad version! Try to be as explicit as possible.
+ if hasattr(backend, '__file__'):
+ # CPython
+ raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
+ __version__, __file__,
+ backend.__version__, backend.__file__))
+ else:
+ # PyPy
+ raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
+ __version__, __file__, backend.__version__))
+ # (If you insist you can also try to pass the option
+ # 'backend=backend_ctypes.CTypesBackend()', but don't
+ # rely on it! It's probably not going to work well.)
+
+ from . import cparser
+ self._backend = backend
+ self._lock = allocate_lock()
+ self._parser = cparser.Parser()
+ self._cached_btypes = {}
+ self._parsed_types = types.ModuleType('parsed_types').__dict__
+ self._new_types = types.ModuleType('new_types').__dict__
+ self._function_caches = []
+ self._libraries = []
+ self._cdefsources = []
+ self._included_ffis = []
+ self._windows_unicode = None
+ self._init_once_cache = {}
+ self._cdef_version = None
+ self._embedding = None
+ self._typecache = model.get_typecache(backend)
+ if hasattr(backend, 'set_ffi'):
+ backend.set_ffi(self)
+ for name in list(backend.__dict__):
+ if name.startswith('RTLD_'):
+ setattr(self, name, getattr(backend, name))
+ #
+ with self._lock:
+ self.BVoidP = self._get_cached_btype(model.voidp_type)
+ self.BCharA = self._get_cached_btype(model.char_array_type)
+ if isinstance(backend, types.ModuleType):
+ # _cffi_backend: attach these constants to the class
+ if not hasattr(FFI, 'NULL'):
+ FFI.NULL = self.cast(self.BVoidP, 0)
+ FFI.CData, FFI.CType = backend._get_types()
+ else:
+ # ctypes backend: attach these constants to the instance
+ self.NULL = self.cast(self.BVoidP, 0)
+ self.CData, self.CType = backend._get_types()
+ self.buffer = backend.buffer
+
+ def cdef(self, csource, override=False, packed=False, pack=None):
+ """Parse the given C source. This registers all declared functions,
+ types, and global variables. The functions and global variables can
+ then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
+ The types can be used in 'ffi.new()' and other functions.
+ If 'packed' is specified as True, all structs declared inside this
+ cdef are packed, i.e. laid out without any field alignment at all.
+ Alternatively, 'pack' can be a small integer, and requests for
+ alignment greater than that are ignored (pack=1 is equivalent to
+ packed=True).
+ """
+ self._cdef(csource, override=override, packed=packed, pack=pack)
+
+ def embedding_api(self, csource, packed=False, pack=None):
+ self._cdef(csource, packed=packed, pack=pack, dllexport=True)
+ if self._embedding is None:
+ self._embedding = ''
+
+ def _cdef(self, csource, override=False, **options):
+ if not isinstance(csource, str): # unicode, on Python 2
+ if not isinstance(csource, basestring):
+ raise TypeError("cdef() argument must be a string")
+ csource = csource.encode('ascii')
+ with self._lock:
+ self._cdef_version = object()
+ self._parser.parse(csource, override=override, **options)
+ self._cdefsources.append(csource)
+ if override:
+ for cache in self._function_caches:
+ cache.clear()
+ finishlist = self._parser._recomplete
+ if finishlist:
+ self._parser._recomplete = []
+ for tp in finishlist:
+ tp.finish_backend_type(self, finishlist)
+
+ def dlopen(self, name, flags=0):
+ """Load and return a dynamic library identified by 'name'.
+ The standard C library can be loaded by passing None.
+ Note that functions and types declared by 'ffi.cdef()' are not
+ linked to a particular library, just like C headers; in the
+ library we only look for the actual (untyped) symbols.
+ """
+ if not (isinstance(name, basestring) or
+ name is None or
+ isinstance(name, self.CData)):
+ raise TypeError("dlopen(name): name must be a file name, None, "
+ "or an already-opened 'void *' handle")
+ with self._lock:
+ lib, function_cache = _make_ffi_library(self, name, flags)
+ self._function_caches.append(function_cache)
+ self._libraries.append(lib)
+ return lib
+
+ def dlclose(self, lib):
+ """Close a library obtained with ffi.dlopen(). After this call,
+ access to functions or variables from the library will fail
+ (possibly with a segmentation fault).
+ """
+ type(lib).__cffi_close__(lib)
+
+ def _typeof_locked(self, cdecl):
+ # call me with the lock!
+ key = cdecl
+ if key in self._parsed_types:
+ return self._parsed_types[key]
+ #
+ if not isinstance(cdecl, str): # unicode, on Python 2
+ cdecl = cdecl.encode('ascii')
+ #
+ type = self._parser.parse_type(cdecl)
+ really_a_function_type = type.is_raw_function
+ if really_a_function_type:
+ type = type.as_function_pointer()
+ btype = self._get_cached_btype(type)
+ result = btype, really_a_function_type
+ self._parsed_types[key] = result
+ return result
+
+ def _typeof(self, cdecl, consider_function_as_funcptr=False):
+ # string -> ctype object
+ try:
+ result = self._parsed_types[cdecl]
+ except KeyError:
+ with self._lock:
+ result = self._typeof_locked(cdecl)
+ #
+ btype, really_a_function_type = result
+ if really_a_function_type and not consider_function_as_funcptr:
+ raise CDefError("the type %r is a function type, not a "
+ "pointer-to-function type" % (cdecl,))
+ return btype
+
+ def typeof(self, cdecl):
+ """Parse the C type given as a string and return the
+ corresponding object.
+ It can also be used on 'cdata' instance to get its C type.
+ """
+ if isinstance(cdecl, basestring):
+ return self._typeof(cdecl)
+ if isinstance(cdecl, self.CData):
+ return self._backend.typeof(cdecl)
+ if isinstance(cdecl, types.BuiltinFunctionType):
+ res = _builtin_function_type(cdecl)
+ if res is not None:
+ return res
+ if (isinstance(cdecl, types.FunctionType)
+ and hasattr(cdecl, '_cffi_base_type')):
+ with self._lock:
+ return self._get_cached_btype(cdecl._cffi_base_type)
+ raise TypeError(type(cdecl))
+
+ def sizeof(self, cdecl):
+ """Return the size in bytes of the argument. It can be a
+ string naming a C type, or a 'cdata' instance.
+ """
+ if isinstance(cdecl, basestring):
+ BType = self._typeof(cdecl)
+ return self._backend.sizeof(BType)
+ else:
+ return self._backend.sizeof(cdecl)
+
+ def alignof(self, cdecl):
+ """Return the natural alignment size in bytes of the C type
+ given as a string.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.alignof(cdecl)
+
+ def offsetof(self, cdecl, *fields_or_indexes):
+ """Return the offset of the named field inside the given
+ structure or array, which must be given as a C type name.
+ You can give several field names in case of nested structures.
+ You can also give numeric values which correspond to array
+ items, in case of an array type.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
+
+ def new(self, cdecl, init=None):
+ """Allocate an instance according to the specified C type and
+ return a pointer to it. The specified C type must be either a
+ pointer or an array: ``new('X *')`` allocates an X and returns
+ a pointer to it, whereas ``new('X[n]')`` allocates an array of
+ n X'es and returns an array referencing it (which works
+ mostly like a pointer, like in C). You can also use
+ ``new('X[]', n)`` to allocate an array of a non-constant
+ length n.
+
+ The memory is initialized following the rules of declaring a
+ global variable in C: by default it is zero-initialized, but
+ an explicit initializer can be given which can be used to
+ fill all or part of the memory.
+
+ When the returned object goes out of scope, the memory
+ is freed. In other words the returned object has
+ ownership of the value of type 'cdecl' that it points to. This
+ means that the raw data can be used as long as this object is
+ kept alive, but must not be used for a longer time. Be careful
+ about that when copying the pointer to the memory somewhere
+ else, e.g. into another structure.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.newp(cdecl, init)
+
+ def new_allocator(self, alloc=None, free=None,
+ should_clear_after_alloc=True):
+ """Return a new allocator, i.e. a function that behaves like ffi.new()
+ but uses the provided low-level 'alloc' and 'free' functions.
+
+ 'alloc' is called with the size as argument. If it returns NULL, a
+ MemoryError is raised. 'free' is called with the result of 'alloc'
+ as argument. Both can be either Python function or directly C
+ functions. If 'free' is None, then no free function is called.
+ If both 'alloc' and 'free' are None, the default is used.
+
+ If 'should_clear_after_alloc' is set to False, then the memory
+ returned by 'alloc' is assumed to be already cleared (or you are
+ fine with garbage); otherwise CFFI will clear it.
+ """
+ compiled_ffi = self._backend.FFI()
+ allocator = compiled_ffi.new_allocator(alloc, free,
+ should_clear_after_alloc)
+ def allocate(cdecl, init=None):
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return allocator(cdecl, init)
+ return allocate
+
+ def cast(self, cdecl, source):
+ """Similar to a C cast: returns an instance of the named C
+ type initialized with the given 'source'. The source is
+ casted between integers or pointers of any type.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.cast(cdecl, source)
+
+ def string(self, cdata, maxlen=-1):
+ """Return a Python string (or unicode string) from the 'cdata'.
+ If 'cdata' is a pointer or array of characters or bytes, returns
+ the null-terminated string. The returned string extends until
+ the first null character, or at most 'maxlen' characters. If
+ 'cdata' is an array then 'maxlen' defaults to its length.
+
+ If 'cdata' is a pointer or array of wchar_t, returns a unicode
+ string following the same rules.
+
+ If 'cdata' is a single character or byte or a wchar_t, returns
+ it as a string or unicode string.
+
+ If 'cdata' is an enum, returns the value of the enumerator as a
+ string, or 'NUMBER' if the value is out of range.
+ """
+ return self._backend.string(cdata, maxlen)
+
+ def unpack(self, cdata, length):
+ """Unpack an array of C data of the given length,
+ returning a Python string/unicode/list.
+
+ If 'cdata' is a pointer to 'char', returns a byte string.
+ It does not stop at the first null. This is equivalent to:
+ ffi.buffer(cdata, length)[:]
+
+ If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
+ 'length' is measured in wchar_t's; it is not the size in bytes.
+
+ If 'cdata' is a pointer to anything else, returns a list of
+ 'length' items. This is a faster equivalent to:
+ [cdata[i] for i in range(length)]
+ """
+ return self._backend.unpack(cdata, length)
+
+ #def buffer(self, cdata, size=-1):
+ # """Return a read-write buffer object that references the raw C data
+ # pointed to by the given 'cdata'. The 'cdata' must be a pointer or
+ # an array. Can be passed to functions expecting a buffer, or directly
+ # manipulated with:
+ #
+ # buf[:] get a copy of it in a regular string, or
+ # buf[idx] as a single character
+ # buf[:] = ...
+ # buf[idx] = ... change the content
+ # """
+ # note that 'buffer' is a type, set on this instance by __init__
+
+ def from_buffer(self, cdecl, python_buffer=_unspecified,
+ require_writable=False):
+ """Return a cdata of the given type pointing to the data of the
+ given Python object, which must support the buffer interface.
+ Note that this is not meant to be used on the built-in types
+ str or unicode (you can build 'char[]' arrays explicitly)
+ but only on objects containing large quantities of raw data
+ in some other format, like 'array.array' or numpy arrays.
+
+ The first argument is optional and default to 'char[]'.
+ """
+ if python_buffer is _unspecified:
+ cdecl, python_buffer = self.BCharA, cdecl
+ elif isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.from_buffer(cdecl, python_buffer,
+ require_writable)
+
+ def memmove(self, dest, src, n):
+ """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
+
+ Like the C function memmove(), the memory areas may overlap;
+ apart from that it behaves like the C function memcpy().
+
+ 'src' can be any cdata ptr or array, or any Python buffer object.
+ 'dest' can be any cdata ptr or array, or a writable Python buffer
+ object. The size to copy, 'n', is always measured in bytes.
+
+ Unlike other methods, this one supports all Python buffer including
+ byte strings and bytearrays---but it still does not support
+ non-contiguous buffers.
+ """
+ return self._backend.memmove(dest, src, n)
+
+ def callback(self, cdecl, python_callable=None, error=None, onerror=None):
+ """Return a callback object or a decorator making such a
+ callback object. 'cdecl' must name a C function pointer type.
+ The callback invokes the specified 'python_callable' (which may
+ be provided either directly or via a decorator). Important: the
+ callback object must be manually kept alive for as long as the
+ callback may be invoked from the C level.
+ """
+ def callback_decorator_wrap(python_callable):
+ if not callable(python_callable):
+ raise TypeError("the 'python_callable' argument "
+ "is not callable")
+ return self._backend.callback(cdecl, python_callable,
+ error, onerror)
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
+ if python_callable is None:
+ return callback_decorator_wrap # decorator mode
+ else:
+ return callback_decorator_wrap(python_callable) # direct mode
+
+ def getctype(self, cdecl, replace_with=''):
+ """Return a string giving the C type 'cdecl', which may be itself
+ a string or a object. If 'replace_with' is given, it gives
+ extra text to append (or insert for more complicated C types), like
+ a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ replace_with = replace_with.strip()
+ if (replace_with.startswith('*')
+ and '&[' in self._backend.getcname(cdecl, '&')):
+ replace_with = '(%s)' % replace_with
+ elif replace_with and not replace_with[0] in '[(':
+ replace_with = ' ' + replace_with
+ return self._backend.getcname(cdecl, replace_with)
+
+ def gc(self, cdata, destructor, size=0):
+ """Return a new cdata object that points to the same
+ data. Later, when this new cdata object is garbage-collected,
+ 'destructor(old_cdata_object)' will be called.
+
+ The optional 'size' gives an estimate of the size, used to
+ trigger the garbage collection more eagerly. So far only used
+ on PyPy. It tells the GC that the returned object keeps alive
+ roughly 'size' bytes of external memory.
+ """
+ return self._backend.gcp(cdata, destructor, size)
+
+ def _get_cached_btype(self, type):
+ assert self._lock.acquire(False) is False
+ # call me with the lock!
+ try:
+ BType = self._cached_btypes[type]
+ except KeyError:
+ finishlist = []
+ BType = type.get_cached_btype(self, finishlist)
+ for type in finishlist:
+ type.finish_backend_type(self, finishlist)
+ return BType
+
+ def verify(self, source='', tmpdir=None, **kwargs):
+ """Verify that the current ffi signatures compile on this
+ machine, and return a dynamic library object. The dynamic
+ library can be used to call functions and access global
+ variables declared in this 'ffi'. The library is compiled
+ by the C compiler: it gives you C-level API compatibility
+ (including calling macros). This is unlike 'ffi.dlopen()',
+ which requires binary compatibility in the signatures.
+ """
+ from .verifier import Verifier, _caller_dir_pycache
+ #
+ # If set_unicode(True) was called, insert the UNICODE and
+ # _UNICODE macro declarations
+ if self._windows_unicode:
+ self._apply_windows_unicode(kwargs)
+ #
+ # Set the tmpdir here, and not in Verifier.__init__: it picks
+ # up the caller's directory, which we want to be the caller of
+ # ffi.verify(), as opposed to the caller of Veritier().
+ tmpdir = tmpdir or _caller_dir_pycache()
+ #
+ # Make a Verifier() and use it to load the library.
+ self.verifier = Verifier(self, source, tmpdir, **kwargs)
+ lib = self.verifier.load_library()
+ #
+ # Save the loaded library for keep-alive purposes, even
+ # if the caller doesn't keep it alive itself (it should).
+ self._libraries.append(lib)
+ return lib
+
+ def _get_errno(self):
+ return self._backend.get_errno()
+ def _set_errno(self, errno):
+ self._backend.set_errno(errno)
+ errno = property(_get_errno, _set_errno, None,
+ "the value of 'errno' from/to the C calls")
+
+ def getwinerror(self, code=-1):
+ return self._backend.getwinerror(code)
+
+ def _pointer_to(self, ctype):
+ with self._lock:
+ return model.pointer_cache(self, ctype)
+
+ def addressof(self, cdata, *fields_or_indexes):
+ """Return the address of a .
+ If 'fields_or_indexes' are given, returns the address of that
+ field or array item in the structure or array, recursively in
+ case of nested structures.
+ """
+ try:
+ ctype = self._backend.typeof(cdata)
+ except TypeError:
+ if '__addressof__' in type(cdata).__dict__:
+ return type(cdata).__addressof__(cdata, *fields_or_indexes)
+ raise
+ if fields_or_indexes:
+ ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
+ else:
+ if ctype.kind == "pointer":
+ raise TypeError("addressof(pointer)")
+ offset = 0
+ ctypeptr = self._pointer_to(ctype)
+ return self._backend.rawaddressof(ctypeptr, cdata, offset)
+
+ def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
+ ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
+ for field1 in fields_or_indexes:
+ ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
+ offset += offset1
+ return ctype, offset
+
+ def include(self, ffi_to_include):
+ """Includes the typedefs, structs, unions and enums defined
+ in another FFI instance. Usage is similar to a #include in C,
+ where a part of the program might include types defined in
+ another part for its own usage. Note that the include()
+ method has no effect on functions, constants and global
+ variables, which must anyway be accessed directly from the
+ lib object returned by the original FFI instance.
+ """
+ if not isinstance(ffi_to_include, FFI):
+ raise TypeError("ffi.include() expects an argument that is also of"
+ " type cffi.FFI, not %r" % (
+ type(ffi_to_include).__name__,))
+ if ffi_to_include is self:
+ raise ValueError("self.include(self)")
+ with ffi_to_include._lock:
+ with self._lock:
+ self._parser.include(ffi_to_include._parser)
+ self._cdefsources.append('[')
+ self._cdefsources.extend(ffi_to_include._cdefsources)
+ self._cdefsources.append(']')
+ self._included_ffis.append(ffi_to_include)
+
+ def new_handle(self, x):
+ return self._backend.newp_handle(self.BVoidP, x)
+
+ def from_handle(self, x):
+ return self._backend.from_handle(x)
+
+ def release(self, x):
+ self._backend.release(x)
+
+ def set_unicode(self, enabled_flag):
+ """Windows: if 'enabled_flag' is True, enable the UNICODE and
+ _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
+ to be (pointers to) wchar_t. If 'enabled_flag' is False,
+ declare these types to be (pointers to) plain 8-bit characters.
+ This is mostly for backward compatibility; you usually want True.
+ """
+ if self._windows_unicode is not None:
+ raise ValueError("set_unicode() can only be called once")
+ enabled_flag = bool(enabled_flag)
+ if enabled_flag:
+ self.cdef("typedef wchar_t TBYTE;"
+ "typedef wchar_t TCHAR;"
+ "typedef const wchar_t *LPCTSTR;"
+ "typedef const wchar_t *PCTSTR;"
+ "typedef wchar_t *LPTSTR;"
+ "typedef wchar_t *PTSTR;"
+ "typedef TBYTE *PTBYTE;"
+ "typedef TCHAR *PTCHAR;")
+ else:
+ self.cdef("typedef char TBYTE;"
+ "typedef char TCHAR;"
+ "typedef const char *LPCTSTR;"
+ "typedef const char *PCTSTR;"
+ "typedef char *LPTSTR;"
+ "typedef char *PTSTR;"
+ "typedef TBYTE *PTBYTE;"
+ "typedef TCHAR *PTCHAR;")
+ self._windows_unicode = enabled_flag
+
+ def _apply_windows_unicode(self, kwds):
+ defmacros = kwds.get('define_macros', ())
+ if not isinstance(defmacros, (list, tuple)):
+ raise TypeError("'define_macros' must be a list or tuple")
+ defmacros = list(defmacros) + [('UNICODE', '1'),
+ ('_UNICODE', '1')]
+ kwds['define_macros'] = defmacros
+
+ def _apply_embedding_fix(self, kwds):
+ # must include an argument like "-lpython2.7" for the compiler
+ def ensure(key, value):
+ lst = kwds.setdefault(key, [])
+ if value not in lst:
+ lst.append(value)
+ #
+ if '__pypy__' in sys.builtin_module_names:
+ import os
+ if sys.platform == "win32":
+ # we need 'libpypy-c.lib'. Current distributions of
+ # pypy (>= 4.1) contain it as 'libs/python27.lib'.
+ pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
+ if hasattr(sys, 'prefix'):
+ ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
+ else:
+ # we need 'libpypy-c.{so,dylib}', which should be by
+ # default located in 'sys.prefix/bin' for installed
+ # systems.
+ if sys.version_info < (3,):
+ pythonlib = "pypy-c"
+ else:
+ pythonlib = "pypy3-c"
+ if hasattr(sys, 'prefix'):
+ ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
+ # On uninstalled pypy's, the libpypy-c is typically found in
+ # .../pypy/goal/.
+ if hasattr(sys, 'prefix'):
+ ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
+ else:
+ if sys.platform == "win32":
+ template = "python%d%d"
+ if hasattr(sys, 'gettotalrefcount'):
+ template += '_d'
+ else:
+ try:
+ import sysconfig
+ except ImportError: # 2.6
+ from distutils import sysconfig
+ template = "python%d.%d"
+ if sysconfig.get_config_var('DEBUG_EXT'):
+ template += sysconfig.get_config_var('DEBUG_EXT')
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ if hasattr(sys, 'abiflags'):
+ pythonlib += sys.abiflags
+ ensure('libraries', pythonlib)
+ if sys.platform == "win32":
+ ensure('extra_link_args', '/MANIFEST')
+
+ def set_source(self, module_name, source, source_extension='.c', **kwds):
+ import os
+ if hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() cannot be called several times "
+ "per ffi object")
+ if not isinstance(module_name, basestring):
+ raise TypeError("'module_name' must be a string")
+ if os.sep in module_name or (os.altsep and os.altsep in module_name):
+ raise ValueError("'module_name' must not contain '/': use a dotted "
+ "name to make a 'package.module' location")
+ self._assigned_source = (str(module_name), source,
+ source_extension, kwds)
+
+ def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
+ source_extension='.c', **kwds):
+ from . import pkgconfig
+ if not isinstance(pkgconfig_libs, list):
+ raise TypeError("the pkgconfig_libs argument must be a list "
+ "of package names")
+ kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
+ pkgconfig.merge_flags(kwds, kwds2)
+ self.set_source(module_name, source, source_extension, **kwds)
+
+ def distutils_extension(self, tmpdir='build', verbose=True):
+ from distutils.dir_util import mkpath
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
+ return self.verifier.get_extension()
+ raise ValueError("set_source() must be called before"
+ " distutils_extension()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ if source is None:
+ raise TypeError("distutils_extension() is only for C extension "
+ "modules, not for dlopen()-style pure Python "
+ "modules")
+ mkpath(tmpdir)
+ ext, updated = recompile(self, module_name,
+ source, tmpdir=tmpdir, extradir=tmpdir,
+ source_extension=source_extension,
+ call_c_compiler=False, **kwds)
+ if verbose:
+ if updated:
+ sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
+ else:
+ sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
+ return ext
+
+ def emit_c_code(self, filename):
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() must be called before emit_c_code()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ if source is None:
+ raise TypeError("emit_c_code() is only for C extension modules, "
+ "not for dlopen()-style pure Python modules")
+ recompile(self, module_name, source,
+ c_file=filename, call_c_compiler=False, **kwds)
+
+ def emit_python_code(self, filename):
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() must be called before emit_c_code()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ if source is not None:
+ raise TypeError("emit_python_code() is only for dlopen()-style "
+ "pure Python modules, not for C extension modules")
+ recompile(self, module_name, source,
+ c_file=filename, call_c_compiler=False, **kwds)
+
+ def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
+ """The 'target' argument gives the final file name of the
+ compiled DLL. Use '*' to force distutils' choice, suitable for
+ regular CPython C API modules. Use a file name ending in '.*'
+ to ask for the system's default extension for dynamic libraries
+ (.so/.dll/.dylib).
+
+ The default is '*' when building a non-embedded C API extension,
+ and (module_name + '.*') when building an embedded library.
+ """
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() must be called before compile()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ return recompile(self, module_name, source, tmpdir=tmpdir,
+ target=target, source_extension=source_extension,
+ compiler_verbose=verbose, debug=debug, **kwds)
+
+ def init_once(self, func, tag):
+ # Read _init_once_cache[tag], which is either (False, lock) if
+ # we're calling the function now in some thread, or (True, result).
+ # Don't call setdefault() in most cases, to avoid allocating and
+ # immediately freeing a lock; but still use setdefaut() to avoid
+ # races.
+ try:
+ x = self._init_once_cache[tag]
+ except KeyError:
+ x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
+ # Common case: we got (True, result), so we return the result.
+ if x[0]:
+ return x[1]
+ # Else, it's a lock. Acquire it to serialize the following tests.
+ with x[1]:
+ # Read again from _init_once_cache the current status.
+ x = self._init_once_cache[tag]
+ if x[0]:
+ return x[1]
+ # Call the function and store the result back.
+ result = func()
+ self._init_once_cache[tag] = (True, result)
+ return result
+
+ def embedding_init_code(self, pysource):
+ if self._embedding:
+ raise ValueError("embedding_init_code() can only be called once")
+ # fix 'pysource' before it gets dumped into the C file:
+ # - remove empty lines at the beginning, so it starts at "line 1"
+ # - dedent, if all non-empty lines are indented
+ # - check for SyntaxErrors
+ import re
+ match = re.match(r'\s*\n', pysource)
+ if match:
+ pysource = pysource[match.end():]
+ lines = pysource.splitlines() or ['']
+ prefix = re.match(r'\s*', lines[0]).group()
+ for i in range(1, len(lines)):
+ line = lines[i]
+ if line.rstrip():
+ while not line.startswith(prefix):
+ prefix = prefix[:-1]
+ i = len(prefix)
+ lines = [line[i:]+'\n' for line in lines]
+ pysource = ''.join(lines)
+ #
+ compile(pysource, "cffi_init", "exec")
+ #
+ self._embedding = pysource
+
+ def def_extern(self, *args, **kwds):
+ raise ValueError("ffi.def_extern() is only available on API-mode FFI "
+ "objects")
+
+ def list_types(self):
+ """Returns the user type names known to this FFI instance.
+ This returns a tuple containing three lists of names:
+ (typedef_names, names_of_structs, names_of_unions)
+ """
+ typedefs = []
+ structs = []
+ unions = []
+ for key in self._parser._declarations:
+ if key.startswith('typedef '):
+ typedefs.append(key[8:])
+ elif key.startswith('struct '):
+ structs.append(key[7:])
+ elif key.startswith('union '):
+ unions.append(key[6:])
+ typedefs.sort()
+ structs.sort()
+ unions.sort()
+ return (typedefs, structs, unions)
+
+
+def _load_backend_lib(backend, name, flags):
+ import os
+ if not isinstance(name, basestring):
+ if sys.platform != "win32" or name is not None:
+ return backend.load_library(name, flags)
+ name = "c" # Windows: load_library(None) fails, but this works
+ # on Python 2 (backward compatibility hack only)
+ first_error = None
+ if '.' in name or '/' in name or os.sep in name:
+ try:
+ return backend.load_library(name, flags)
+ except OSError as e:
+ first_error = e
+ import ctypes.util
+ path = ctypes.util.find_library(name)
+ if path is None:
+ if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
+ raise OSError("dlopen(None) cannot work on Windows for Python 3 "
+ "(see http://bugs.python.org/issue23606)")
+ msg = ("ctypes.util.find_library() did not manage "
+ "to locate a library called %r" % (name,))
+ if first_error is not None:
+ msg = "%s. Additionally, %s" % (first_error, msg)
+ raise OSError(msg)
+ return backend.load_library(path, flags)
+
+def _make_ffi_library(ffi, libname, flags):
+ backend = ffi._backend
+ backendlib = _load_backend_lib(backend, libname, flags)
+ #
+ def accessor_function(name):
+ key = 'function ' + name
+ tp, _ = ffi._parser._declarations[key]
+ BType = ffi._get_cached_btype(tp)
+ value = backendlib.load_function(BType, name)
+ library.__dict__[name] = value
+ #
+ def accessor_variable(name):
+ key = 'variable ' + name
+ tp, _ = ffi._parser._declarations[key]
+ BType = ffi._get_cached_btype(tp)
+ read_variable = backendlib.read_variable
+ write_variable = backendlib.write_variable
+ setattr(FFILibrary, name, property(
+ lambda self: read_variable(BType, name),
+ lambda self, value: write_variable(BType, name, value)))
+ #
+ def addressof_var(name):
+ try:
+ return addr_variables[name]
+ except KeyError:
+ with ffi._lock:
+ if name not in addr_variables:
+ key = 'variable ' + name
+ tp, _ = ffi._parser._declarations[key]
+ BType = ffi._get_cached_btype(tp)
+ if BType.kind != 'array':
+ BType = model.pointer_cache(ffi, BType)
+ p = backendlib.load_function(BType, name)
+ addr_variables[name] = p
+ return addr_variables[name]
+ #
+ def accessor_constant(name):
+ raise NotImplementedError("non-integer constant '%s' cannot be "
+ "accessed from a dlopen() library" % (name,))
+ #
+ def accessor_int_constant(name):
+ library.__dict__[name] = ffi._parser._int_constants[name]
+ #
+ accessors = {}
+ accessors_version = [False]
+ addr_variables = {}
+ #
+ def update_accessors():
+ if accessors_version[0] is ffi._cdef_version:
+ return
+ #
+ for key, (tp, _) in ffi._parser._declarations.items():
+ if not isinstance(tp, model.EnumType):
+ tag, name = key.split(' ', 1)
+ if tag == 'function':
+ accessors[name] = accessor_function
+ elif tag == 'variable':
+ accessors[name] = accessor_variable
+ elif tag == 'constant':
+ accessors[name] = accessor_constant
+ else:
+ for i, enumname in enumerate(tp.enumerators):
+ def accessor_enum(name, tp=tp, i=i):
+ tp.check_not_partial()
+ library.__dict__[name] = tp.enumvalues[i]
+ accessors[enumname] = accessor_enum
+ for name in ffi._parser._int_constants:
+ accessors.setdefault(name, accessor_int_constant)
+ accessors_version[0] = ffi._cdef_version
+ #
+ def make_accessor(name):
+ with ffi._lock:
+ if name in library.__dict__ or name in FFILibrary.__dict__:
+ return # added by another thread while waiting for the lock
+ if name not in accessors:
+ update_accessors()
+ if name not in accessors:
+ raise AttributeError(name)
+ accessors[name](name)
+ #
+ class FFILibrary(object):
+ def __getattr__(self, name):
+ make_accessor(name)
+ return getattr(self, name)
+ def __setattr__(self, name, value):
+ try:
+ property = getattr(self.__class__, name)
+ except AttributeError:
+ make_accessor(name)
+ setattr(self, name, value)
+ else:
+ property.__set__(self, value)
+ def __dir__(self):
+ with ffi._lock:
+ update_accessors()
+ return accessors.keys()
+ def __addressof__(self, name):
+ if name in library.__dict__:
+ return library.__dict__[name]
+ if name in FFILibrary.__dict__:
+ return addressof_var(name)
+ make_accessor(name)
+ if name in library.__dict__:
+ return library.__dict__[name]
+ if name in FFILibrary.__dict__:
+ return addressof_var(name)
+ raise AttributeError("cffi library has no function or "
+ "global variable named '%s'" % (name,))
+ def __cffi_close__(self):
+ backendlib.close_lib()
+ self.__dict__.clear()
+ #
+ if isinstance(libname, basestring):
+ try:
+ if not isinstance(libname, str): # unicode, on Python 2
+ libname = libname.encode('utf-8')
+ FFILibrary.__name__ = 'FFILibrary_%s' % libname
+ except UnicodeError:
+ pass
+ library = FFILibrary()
+ return library, library.__dict__
+
+def _builtin_function_type(func):
+ # a hack to make at least ffi.typeof(builtin_function) work,
+ # if the builtin function was obtained by 'vengine_cpy'.
+ import sys
+ try:
+ module = sys.modules[func.__module__]
+ ffi = module._cffi_original_ffi
+ types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
+ tp = types_of_builtin_funcs[func]
+ except (KeyError, AttributeError, TypeError):
+ return None
+ else:
+ with ffi._lock:
+ return ffi._get_cached_btype(tp)
diff --git a/venv/Lib/site-packages/cffi/backend_ctypes.py b/venv/Lib/site-packages/cffi/backend_ctypes.py
new file mode 100644
index 000000000..e7956a79c
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/backend_ctypes.py
@@ -0,0 +1,1121 @@
+import ctypes, ctypes.util, operator, sys
+from . import model
+
+if sys.version_info < (3,):
+ bytechr = chr
+else:
+ unicode = str
+ long = int
+ xrange = range
+ bytechr = lambda num: bytes([num])
+
+class CTypesType(type):
+ pass
+
+class CTypesData(object):
+ __metaclass__ = CTypesType
+ __slots__ = ['__weakref__']
+ __name__ = ''
+
+ def __init__(self, *args):
+ raise TypeError("cannot instantiate %r" % (self.__class__,))
+
+ @classmethod
+ def _newp(cls, init):
+ raise TypeError("expected a pointer or array ctype, got '%s'"
+ % (cls._get_c_name(),))
+
+ @staticmethod
+ def _to_ctypes(value):
+ raise TypeError
+
+ @classmethod
+ def _arg_to_ctypes(cls, *value):
+ try:
+ ctype = cls._ctype
+ except AttributeError:
+ raise TypeError("cannot create an instance of %r" % (cls,))
+ if value:
+ res = cls._to_ctypes(*value)
+ if not isinstance(res, ctype):
+ res = cls._ctype(res)
+ else:
+ res = cls._ctype()
+ return res
+
+ @classmethod
+ def _create_ctype_obj(cls, init):
+ if init is None:
+ return cls._arg_to_ctypes()
+ else:
+ return cls._arg_to_ctypes(init)
+
+ @staticmethod
+ def _from_ctypes(ctypes_value):
+ raise TypeError
+
+ @classmethod
+ def _get_c_name(cls, replace_with=''):
+ return cls._reftypename.replace(' &', replace_with)
+
+ @classmethod
+ def _fix_class(cls):
+ cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
+ cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
+ cls.__module__ = 'ffi'
+
+ def _get_own_repr(self):
+ raise NotImplementedError
+
+ def _addr_repr(self, address):
+ if address == 0:
+ return 'NULL'
+ else:
+ if address < 0:
+ address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
+ return '0x%x' % address
+
+ def __repr__(self, c_name=None):
+ own = self._get_own_repr()
+ return '' % (c_name or self._get_c_name(), own)
+
+ def _convert_to_address(self, BClass):
+ if BClass is None:
+ raise TypeError("cannot convert %r to an address" % (
+ self._get_c_name(),))
+ else:
+ raise TypeError("cannot convert %r to %r" % (
+ self._get_c_name(), BClass._get_c_name()))
+
+ @classmethod
+ def _get_size(cls):
+ return ctypes.sizeof(cls._ctype)
+
+ def _get_size_of_instance(self):
+ return ctypes.sizeof(self._ctype)
+
+ @classmethod
+ def _cast_from(cls, source):
+ raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
+
+ def _cast_to_integer(self):
+ return self._convert_to_address(None)
+
+ @classmethod
+ def _alignment(cls):
+ return ctypes.alignment(cls._ctype)
+
+ def __iter__(self):
+ raise TypeError("cdata %r does not support iteration" % (
+ self._get_c_name()),)
+
+ def _make_cmp(name):
+ cmpfunc = getattr(operator, name)
+ def cmp(self, other):
+ v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
+ w_is_ptr = (isinstance(other, CTypesData) and
+ not isinstance(other, CTypesGenericPrimitive))
+ if v_is_ptr and w_is_ptr:
+ return cmpfunc(self._convert_to_address(None),
+ other._convert_to_address(None))
+ elif v_is_ptr or w_is_ptr:
+ return NotImplemented
+ else:
+ if isinstance(self, CTypesGenericPrimitive):
+ self = self._value
+ if isinstance(other, CTypesGenericPrimitive):
+ other = other._value
+ return cmpfunc(self, other)
+ cmp.func_name = name
+ return cmp
+
+ __eq__ = _make_cmp('__eq__')
+ __ne__ = _make_cmp('__ne__')
+ __lt__ = _make_cmp('__lt__')
+ __le__ = _make_cmp('__le__')
+ __gt__ = _make_cmp('__gt__')
+ __ge__ = _make_cmp('__ge__')
+
+ def __hash__(self):
+ return hash(self._convert_to_address(None))
+
+ def _to_string(self, maxlen):
+ raise TypeError("string(): %r" % (self,))
+
+
+class CTypesGenericPrimitive(CTypesData):
+ __slots__ = []
+
+ def __hash__(self):
+ return hash(self._value)
+
+ def _get_own_repr(self):
+ return repr(self._from_ctypes(self._value))
+
+
+class CTypesGenericArray(CTypesData):
+ __slots__ = []
+
+ @classmethod
+ def _newp(cls, init):
+ return cls(init)
+
+ def __iter__(self):
+ for i in xrange(len(self)):
+ yield self[i]
+
+ def _get_own_repr(self):
+ return self._addr_repr(ctypes.addressof(self._blob))
+
+
+class CTypesGenericPtr(CTypesData):
+ __slots__ = ['_address', '_as_ctype_ptr']
+ _automatic_casts = False
+ kind = "pointer"
+
+ @classmethod
+ def _newp(cls, init):
+ return cls(init)
+
+ @classmethod
+ def _cast_from(cls, source):
+ if source is None:
+ address = 0
+ elif isinstance(source, CTypesData):
+ address = source._cast_to_integer()
+ elif isinstance(source, (int, long)):
+ address = source
+ else:
+ raise TypeError("bad type for cast to %r: %r" %
+ (cls, type(source).__name__))
+ return cls._new_pointer_at(address)
+
+ @classmethod
+ def _new_pointer_at(cls, address):
+ self = cls.__new__(cls)
+ self._address = address
+ self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
+ return self
+
+ def _get_own_repr(self):
+ try:
+ return self._addr_repr(self._address)
+ except AttributeError:
+ return '???'
+
+ def _cast_to_integer(self):
+ return self._address
+
+ def __nonzero__(self):
+ return bool(self._address)
+ __bool__ = __nonzero__
+
+ @classmethod
+ def _to_ctypes(cls, value):
+ if not isinstance(value, CTypesData):
+ raise TypeError("unexpected %s object" % type(value).__name__)
+ address = value._convert_to_address(cls)
+ return ctypes.cast(address, cls._ctype)
+
+ @classmethod
+ def _from_ctypes(cls, ctypes_ptr):
+ address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
+ return cls._new_pointer_at(address)
+
+ @classmethod
+ def _initialize(cls, ctypes_ptr, value):
+ if value:
+ ctypes_ptr.contents = cls._to_ctypes(value).contents
+
+ def _convert_to_address(self, BClass):
+ if (BClass in (self.__class__, None) or BClass._automatic_casts
+ or self._automatic_casts):
+ return self._address
+ else:
+ return CTypesData._convert_to_address(self, BClass)
+
+
+class CTypesBaseStructOrUnion(CTypesData):
+ __slots__ = ['_blob']
+
+ @classmethod
+ def _create_ctype_obj(cls, init):
+ # may be overridden
+ raise TypeError("cannot instantiate opaque type %s" % (cls,))
+
+ def _get_own_repr(self):
+ return self._addr_repr(ctypes.addressof(self._blob))
+
+ @classmethod
+ def _offsetof(cls, fieldname):
+ return getattr(cls._ctype, fieldname).offset
+
+ def _convert_to_address(self, BClass):
+ if getattr(BClass, '_BItem', None) is self.__class__:
+ return ctypes.addressof(self._blob)
+ else:
+ return CTypesData._convert_to_address(self, BClass)
+
+ @classmethod
+ def _from_ctypes(cls, ctypes_struct_or_union):
+ self = cls.__new__(cls)
+ self._blob = ctypes_struct_or_union
+ return self
+
+ @classmethod
+ def _to_ctypes(cls, value):
+ return value._blob
+
+ def __repr__(self, c_name=None):
+ return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
+
+
+class CTypesBackend(object):
+
+ PRIMITIVE_TYPES = {
+ 'char': ctypes.c_char,
+ 'short': ctypes.c_short,
+ 'int': ctypes.c_int,
+ 'long': ctypes.c_long,
+ 'long long': ctypes.c_longlong,
+ 'signed char': ctypes.c_byte,
+ 'unsigned char': ctypes.c_ubyte,
+ 'unsigned short': ctypes.c_ushort,
+ 'unsigned int': ctypes.c_uint,
+ 'unsigned long': ctypes.c_ulong,
+ 'unsigned long long': ctypes.c_ulonglong,
+ 'float': ctypes.c_float,
+ 'double': ctypes.c_double,
+ '_Bool': ctypes.c_bool,
+ }
+
+ for _name in ['unsigned long long', 'unsigned long',
+ 'unsigned int', 'unsigned short', 'unsigned char']:
+ _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
+ PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_void_p):
+ PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_size_t):
+ PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
+
+ for _name in ['long long', 'long', 'int', 'short', 'signed char']:
+ _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
+ PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_void_p):
+ PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
+ PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_size_t):
+ PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
+
+
+ def __init__(self):
+ self.RTLD_LAZY = 0 # not supported anyway by ctypes
+ self.RTLD_NOW = 0
+ self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
+ self.RTLD_LOCAL = ctypes.RTLD_LOCAL
+
+ def set_ffi(self, ffi):
+ self.ffi = ffi
+
+ def _get_types(self):
+ return CTypesData, CTypesType
+
+ def load_library(self, path, flags=0):
+ cdll = ctypes.CDLL(path, flags)
+ return CTypesLibrary(self, cdll)
+
+ def new_void_type(self):
+ class CTypesVoid(CTypesData):
+ __slots__ = []
+ _reftypename = 'void &'
+ @staticmethod
+ def _from_ctypes(novalue):
+ return None
+ @staticmethod
+ def _to_ctypes(novalue):
+ if novalue is not None:
+ raise TypeError("None expected, got %s object" %
+ (type(novalue).__name__,))
+ return None
+ CTypesVoid._fix_class()
+ return CTypesVoid
+
+ def new_primitive_type(self, name):
+ if name == 'wchar_t':
+ raise NotImplementedError(name)
+ ctype = self.PRIMITIVE_TYPES[name]
+ if name == 'char':
+ kind = 'char'
+ elif name in ('float', 'double'):
+ kind = 'float'
+ else:
+ if name in ('signed char', 'unsigned char'):
+ kind = 'byte'
+ elif name == '_Bool':
+ kind = 'bool'
+ else:
+ kind = 'int'
+ is_signed = (ctype(-1).value == -1)
+ #
+ def _cast_source_to_int(source):
+ if isinstance(source, (int, long, float)):
+ source = int(source)
+ elif isinstance(source, CTypesData):
+ source = source._cast_to_integer()
+ elif isinstance(source, bytes):
+ source = ord(source)
+ elif source is None:
+ source = 0
+ else:
+ raise TypeError("bad type for cast to %r: %r" %
+ (CTypesPrimitive, type(source).__name__))
+ return source
+ #
+ kind1 = kind
+ class CTypesPrimitive(CTypesGenericPrimitive):
+ __slots__ = ['_value']
+ _ctype = ctype
+ _reftypename = '%s &' % name
+ kind = kind1
+
+ def __init__(self, value):
+ self._value = value
+
+ @staticmethod
+ def _create_ctype_obj(init):
+ if init is None:
+ return ctype()
+ return ctype(CTypesPrimitive._to_ctypes(init))
+
+ if kind == 'int' or kind == 'byte':
+ @classmethod
+ def _cast_from(cls, source):
+ source = _cast_source_to_int(source)
+ source = ctype(source).value # cast within range
+ return cls(source)
+ def __int__(self):
+ return self._value
+
+ if kind == 'bool':
+ @classmethod
+ def _cast_from(cls, source):
+ if not isinstance(source, (int, long, float)):
+ source = _cast_source_to_int(source)
+ return cls(bool(source))
+ def __int__(self):
+ return int(self._value)
+
+ if kind == 'char':
+ @classmethod
+ def _cast_from(cls, source):
+ source = _cast_source_to_int(source)
+ source = bytechr(source & 0xFF)
+ return cls(source)
+ def __int__(self):
+ return ord(self._value)
+
+ if kind == 'float':
+ @classmethod
+ def _cast_from(cls, source):
+ if isinstance(source, float):
+ pass
+ elif isinstance(source, CTypesGenericPrimitive):
+ if hasattr(source, '__float__'):
+ source = float(source)
+ else:
+ source = int(source)
+ else:
+ source = _cast_source_to_int(source)
+ source = ctype(source).value # fix precision
+ return cls(source)
+ def __int__(self):
+ return int(self._value)
+ def __float__(self):
+ return self._value
+
+ _cast_to_integer = __int__
+
+ if kind == 'int' or kind == 'byte' or kind == 'bool':
+ @staticmethod
+ def _to_ctypes(x):
+ if not isinstance(x, (int, long)):
+ if isinstance(x, CTypesData):
+ x = int(x)
+ else:
+ raise TypeError("integer expected, got %s" %
+ type(x).__name__)
+ if ctype(x).value != x:
+ if not is_signed and x < 0:
+ raise OverflowError("%s: negative integer" % name)
+ else:
+ raise OverflowError("%s: integer out of bounds"
+ % name)
+ return x
+
+ if kind == 'char':
+ @staticmethod
+ def _to_ctypes(x):
+ if isinstance(x, bytes) and len(x) == 1:
+ return x
+ if isinstance(x, CTypesPrimitive): # >
+ return x._value
+ raise TypeError("character expected, got %s" %
+ type(x).__name__)
+ def __nonzero__(self):
+ return ord(self._value) != 0
+ else:
+ def __nonzero__(self):
+ return self._value != 0
+ __bool__ = __nonzero__
+
+ if kind == 'float':
+ @staticmethod
+ def _to_ctypes(x):
+ if not isinstance(x, (int, long, float, CTypesData)):
+ raise TypeError("float expected, got %s" %
+ type(x).__name__)
+ return ctype(x).value
+
+ @staticmethod
+ def _from_ctypes(value):
+ return getattr(value, 'value', value)
+
+ @staticmethod
+ def _initialize(blob, init):
+ blob.value = CTypesPrimitive._to_ctypes(init)
+
+ if kind == 'char':
+ def _to_string(self, maxlen):
+ return self._value
+ if kind == 'byte':
+ def _to_string(self, maxlen):
+ return chr(self._value & 0xff)
+ #
+ CTypesPrimitive._fix_class()
+ return CTypesPrimitive
+
+ def new_pointer_type(self, BItem):
+ getbtype = self.ffi._get_cached_btype
+ if BItem is getbtype(model.PrimitiveType('char')):
+ kind = 'charp'
+ elif BItem in (getbtype(model.PrimitiveType('signed char')),
+ getbtype(model.PrimitiveType('unsigned char'))):
+ kind = 'bytep'
+ elif BItem is getbtype(model.void_type):
+ kind = 'voidp'
+ else:
+ kind = 'generic'
+ #
+ class CTypesPtr(CTypesGenericPtr):
+ __slots__ = ['_own']
+ if kind == 'charp':
+ __slots__ += ['__as_strbuf']
+ _BItem = BItem
+ if hasattr(BItem, '_ctype'):
+ _ctype = ctypes.POINTER(BItem._ctype)
+ _bitem_size = ctypes.sizeof(BItem._ctype)
+ else:
+ _ctype = ctypes.c_void_p
+ if issubclass(BItem, CTypesGenericArray):
+ _reftypename = BItem._get_c_name('(* &)')
+ else:
+ _reftypename = BItem._get_c_name(' * &')
+
+ def __init__(self, init):
+ ctypeobj = BItem._create_ctype_obj(init)
+ if kind == 'charp':
+ self.__as_strbuf = ctypes.create_string_buffer(
+ ctypeobj.value + b'\x00')
+ self._as_ctype_ptr = ctypes.cast(
+ self.__as_strbuf, self._ctype)
+ else:
+ self._as_ctype_ptr = ctypes.pointer(ctypeobj)
+ self._address = ctypes.cast(self._as_ctype_ptr,
+ ctypes.c_void_p).value
+ self._own = True
+
+ def __add__(self, other):
+ if isinstance(other, (int, long)):
+ return self._new_pointer_at(self._address +
+ other * self._bitem_size)
+ else:
+ return NotImplemented
+
+ def __sub__(self, other):
+ if isinstance(other, (int, long)):
+ return self._new_pointer_at(self._address -
+ other * self._bitem_size)
+ elif type(self) is type(other):
+ return (self._address - other._address) // self._bitem_size
+ else:
+ return NotImplemented
+
+ def __getitem__(self, index):
+ if getattr(self, '_own', False) and index != 0:
+ raise IndexError
+ return BItem._from_ctypes(self._as_ctype_ptr[index])
+
+ def __setitem__(self, index, value):
+ self._as_ctype_ptr[index] = BItem._to_ctypes(value)
+
+ if kind == 'charp' or kind == 'voidp':
+ @classmethod
+ def _arg_to_ctypes(cls, *value):
+ if value and isinstance(value[0], bytes):
+ return ctypes.c_char_p(value[0])
+ else:
+ return super(CTypesPtr, cls)._arg_to_ctypes(*value)
+
+ if kind == 'charp' or kind == 'bytep':
+ def _to_string(self, maxlen):
+ if maxlen < 0:
+ maxlen = sys.maxsize
+ p = ctypes.cast(self._as_ctype_ptr,
+ ctypes.POINTER(ctypes.c_char))
+ n = 0
+ while n < maxlen and p[n] != b'\x00':
+ n += 1
+ return b''.join([p[i] for i in range(n)])
+
+ def _get_own_repr(self):
+ if getattr(self, '_own', False):
+ return 'owning %d bytes' % (
+ ctypes.sizeof(self._as_ctype_ptr.contents),)
+ return super(CTypesPtr, self)._get_own_repr()
+ #
+ if (BItem is self.ffi._get_cached_btype(model.void_type) or
+ BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
+ CTypesPtr._automatic_casts = True
+ #
+ CTypesPtr._fix_class()
+ return CTypesPtr
+
+ def new_array_type(self, CTypesPtr, length):
+ if length is None:
+ brackets = ' &[]'
+ else:
+ brackets = ' &[%d]' % length
+ BItem = CTypesPtr._BItem
+ getbtype = self.ffi._get_cached_btype
+ if BItem is getbtype(model.PrimitiveType('char')):
+ kind = 'char'
+ elif BItem in (getbtype(model.PrimitiveType('signed char')),
+ getbtype(model.PrimitiveType('unsigned char'))):
+ kind = 'byte'
+ else:
+ kind = 'generic'
+ #
+ class CTypesArray(CTypesGenericArray):
+ __slots__ = ['_blob', '_own']
+ if length is not None:
+ _ctype = BItem._ctype * length
+ else:
+ __slots__.append('_ctype')
+ _reftypename = BItem._get_c_name(brackets)
+ _declared_length = length
+ _CTPtr = CTypesPtr
+
+ def __init__(self, init):
+ if length is None:
+ if isinstance(init, (int, long)):
+ len1 = init
+ init = None
+ elif kind == 'char' and isinstance(init, bytes):
+ len1 = len(init) + 1 # extra null
+ else:
+ init = tuple(init)
+ len1 = len(init)
+ self._ctype = BItem._ctype * len1
+ self._blob = self._ctype()
+ self._own = True
+ if init is not None:
+ self._initialize(self._blob, init)
+
+ @staticmethod
+ def _initialize(blob, init):
+ if isinstance(init, bytes):
+ init = [init[i:i+1] for i in range(len(init))]
+ else:
+ if isinstance(init, CTypesGenericArray):
+ if (len(init) != len(blob) or
+ not isinstance(init, CTypesArray)):
+ raise TypeError("length/type mismatch: %s" % (init,))
+ init = tuple(init)
+ if len(init) > len(blob):
+ raise IndexError("too many initializers")
+ addr = ctypes.cast(blob, ctypes.c_void_p).value
+ PTR = ctypes.POINTER(BItem._ctype)
+ itemsize = ctypes.sizeof(BItem._ctype)
+ for i, value in enumerate(init):
+ p = ctypes.cast(addr + i * itemsize, PTR)
+ BItem._initialize(p.contents, value)
+
+ def __len__(self):
+ return len(self._blob)
+
+ def __getitem__(self, index):
+ if not (0 <= index < len(self._blob)):
+ raise IndexError
+ return BItem._from_ctypes(self._blob[index])
+
+ def __setitem__(self, index, value):
+ if not (0 <= index < len(self._blob)):
+ raise IndexError
+ self._blob[index] = BItem._to_ctypes(value)
+
+ if kind == 'char' or kind == 'byte':
+ def _to_string(self, maxlen):
+ if maxlen < 0:
+ maxlen = len(self._blob)
+ p = ctypes.cast(self._blob,
+ ctypes.POINTER(ctypes.c_char))
+ n = 0
+ while n < maxlen and p[n] != b'\x00':
+ n += 1
+ return b''.join([p[i] for i in range(n)])
+
+ def _get_own_repr(self):
+ if getattr(self, '_own', False):
+ return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
+ return super(CTypesArray, self)._get_own_repr()
+
+ def _convert_to_address(self, BClass):
+ if BClass in (CTypesPtr, None) or BClass._automatic_casts:
+ return ctypes.addressof(self._blob)
+ else:
+ return CTypesData._convert_to_address(self, BClass)
+
+ @staticmethod
+ def _from_ctypes(ctypes_array):
+ self = CTypesArray.__new__(CTypesArray)
+ self._blob = ctypes_array
+ return self
+
+ @staticmethod
+ def _arg_to_ctypes(value):
+ return CTypesPtr._arg_to_ctypes(value)
+
+ def __add__(self, other):
+ if isinstance(other, (int, long)):
+ return CTypesPtr._new_pointer_at(
+ ctypes.addressof(self._blob) +
+ other * ctypes.sizeof(BItem._ctype))
+ else:
+ return NotImplemented
+
+ @classmethod
+ def _cast_from(cls, source):
+ raise NotImplementedError("casting to %r" % (
+ cls._get_c_name(),))
+ #
+ CTypesArray._fix_class()
+ return CTypesArray
+
+ def _new_struct_or_union(self, kind, name, base_ctypes_class):
+ #
+ class struct_or_union(base_ctypes_class):
+ pass
+ struct_or_union.__name__ = '%s_%s' % (kind, name)
+ kind1 = kind
+ #
+ class CTypesStructOrUnion(CTypesBaseStructOrUnion):
+ __slots__ = ['_blob']
+ _ctype = struct_or_union
+ _reftypename = '%s &' % (name,)
+ _kind = kind = kind1
+ #
+ CTypesStructOrUnion._fix_class()
+ return CTypesStructOrUnion
+
+ def new_struct_type(self, name):
+ return self._new_struct_or_union('struct', name, ctypes.Structure)
+
+ def new_union_type(self, name):
+ return self._new_struct_or_union('union', name, ctypes.Union)
+
+ def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
+ totalsize=-1, totalalignment=-1, sflags=0,
+ pack=0):
+ if totalsize >= 0 or totalalignment >= 0:
+ raise NotImplementedError("the ctypes backend of CFFI does not support "
+ "structures completed by verify(); please "
+ "compile and install the _cffi_backend module.")
+ struct_or_union = CTypesStructOrUnion._ctype
+ fnames = [fname for (fname, BField, bitsize) in fields]
+ btypes = [BField for (fname, BField, bitsize) in fields]
+ bitfields = [bitsize for (fname, BField, bitsize) in fields]
+ #
+ bfield_types = {}
+ cfields = []
+ for (fname, BField, bitsize) in fields:
+ if bitsize < 0:
+ cfields.append((fname, BField._ctype))
+ bfield_types[fname] = BField
+ else:
+ cfields.append((fname, BField._ctype, bitsize))
+ bfield_types[fname] = Ellipsis
+ if sflags & 8:
+ struct_or_union._pack_ = 1
+ elif pack:
+ struct_or_union._pack_ = pack
+ struct_or_union._fields_ = cfields
+ CTypesStructOrUnion._bfield_types = bfield_types
+ #
+ @staticmethod
+ def _create_ctype_obj(init):
+ result = struct_or_union()
+ if init is not None:
+ initialize(result, init)
+ return result
+ CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
+ #
+ def initialize(blob, init):
+ if is_union:
+ if len(init) > 1:
+ raise ValueError("union initializer: %d items given, but "
+ "only one supported (use a dict if needed)"
+ % (len(init),))
+ if not isinstance(init, dict):
+ if isinstance(init, (bytes, unicode)):
+ raise TypeError("union initializer: got a str")
+ init = tuple(init)
+ if len(init) > len(fnames):
+ raise ValueError("too many values for %s initializer" %
+ CTypesStructOrUnion._get_c_name())
+ init = dict(zip(fnames, init))
+ addr = ctypes.addressof(blob)
+ for fname, value in init.items():
+ BField, bitsize = name2fieldtype[fname]
+ assert bitsize < 0, \
+ "not implemented: initializer with bit fields"
+ offset = CTypesStructOrUnion._offsetof(fname)
+ PTR = ctypes.POINTER(BField._ctype)
+ p = ctypes.cast(addr + offset, PTR)
+ BField._initialize(p.contents, value)
+ is_union = CTypesStructOrUnion._kind == 'union'
+ name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
+ #
+ for fname, BField, bitsize in fields:
+ if fname == '':
+ raise NotImplementedError("nested anonymous structs/unions")
+ if hasattr(CTypesStructOrUnion, fname):
+ raise ValueError("the field name %r conflicts in "
+ "the ctypes backend" % fname)
+ if bitsize < 0:
+ def getter(self, fname=fname, BField=BField,
+ offset=CTypesStructOrUnion._offsetof(fname),
+ PTR=ctypes.POINTER(BField._ctype)):
+ addr = ctypes.addressof(self._blob)
+ p = ctypes.cast(addr + offset, PTR)
+ return BField._from_ctypes(p.contents)
+ def setter(self, value, fname=fname, BField=BField):
+ setattr(self._blob, fname, BField._to_ctypes(value))
+ #
+ if issubclass(BField, CTypesGenericArray):
+ setter = None
+ if BField._declared_length == 0:
+ def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
+ offset=CTypesStructOrUnion._offsetof(fname),
+ PTR=ctypes.POINTER(BField._ctype)):
+ addr = ctypes.addressof(self._blob)
+ p = ctypes.cast(addr + offset, PTR)
+ return BFieldPtr._from_ctypes(p)
+ #
+ else:
+ def getter(self, fname=fname, BField=BField):
+ return BField._from_ctypes(getattr(self._blob, fname))
+ def setter(self, value, fname=fname, BField=BField):
+ # xxx obscure workaround
+ value = BField._to_ctypes(value)
+ oldvalue = getattr(self._blob, fname)
+ setattr(self._blob, fname, value)
+ if value != getattr(self._blob, fname):
+ setattr(self._blob, fname, oldvalue)
+ raise OverflowError("value too large for bitfield")
+ setattr(CTypesStructOrUnion, fname, property(getter, setter))
+ #
+ CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
+ for fname in fnames:
+ if hasattr(CTypesPtr, fname):
+ raise ValueError("the field name %r conflicts in "
+ "the ctypes backend" % fname)
+ def getter(self, fname=fname):
+ return getattr(self[0], fname)
+ def setter(self, value, fname=fname):
+ setattr(self[0], fname, value)
+ setattr(CTypesPtr, fname, property(getter, setter))
+
+ def new_function_type(self, BArgs, BResult, has_varargs):
+ nameargs = [BArg._get_c_name() for BArg in BArgs]
+ if has_varargs:
+ nameargs.append('...')
+ nameargs = ', '.join(nameargs)
+ #
+ class CTypesFunctionPtr(CTypesGenericPtr):
+ __slots__ = ['_own_callback', '_name']
+ _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
+ *[BArg._ctype for BArg in BArgs],
+ use_errno=True)
+ _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
+
+ def __init__(self, init, error=None):
+ # create a callback to the Python callable init()
+ import traceback
+ assert not has_varargs, "varargs not supported for callbacks"
+ if getattr(BResult, '_ctype', None) is not None:
+ error = BResult._from_ctypes(
+ BResult._create_ctype_obj(error))
+ else:
+ error = None
+ def callback(*args):
+ args2 = []
+ for arg, BArg in zip(args, BArgs):
+ args2.append(BArg._from_ctypes(arg))
+ try:
+ res2 = init(*args2)
+ res2 = BResult._to_ctypes(res2)
+ except:
+ traceback.print_exc()
+ res2 = error
+ if issubclass(BResult, CTypesGenericPtr):
+ if res2:
+ res2 = ctypes.cast(res2, ctypes.c_void_p).value
+ # .value: http://bugs.python.org/issue1574593
+ else:
+ res2 = None
+ #print repr(res2)
+ return res2
+ if issubclass(BResult, CTypesGenericPtr):
+ # The only pointers callbacks can return are void*s:
+ # http://bugs.python.org/issue5710
+ callback_ctype = ctypes.CFUNCTYPE(
+ ctypes.c_void_p,
+ *[BArg._ctype for BArg in BArgs],
+ use_errno=True)
+ else:
+ callback_ctype = CTypesFunctionPtr._ctype
+ self._as_ctype_ptr = callback_ctype(callback)
+ self._address = ctypes.cast(self._as_ctype_ptr,
+ ctypes.c_void_p).value
+ self._own_callback = init
+
+ @staticmethod
+ def _initialize(ctypes_ptr, value):
+ if value:
+ raise NotImplementedError("ctypes backend: not supported: "
+ "initializers for function pointers")
+
+ def __repr__(self):
+ c_name = getattr(self, '_name', None)
+ if c_name:
+ i = self._reftypename.index('(* &)')
+ if self._reftypename[i-1] not in ' )*':
+ c_name = ' ' + c_name
+ c_name = self._reftypename.replace('(* &)', c_name)
+ return CTypesData.__repr__(self, c_name)
+
+ def _get_own_repr(self):
+ if getattr(self, '_own_callback', None) is not None:
+ return 'calling %r' % (self._own_callback,)
+ return super(CTypesFunctionPtr, self)._get_own_repr()
+
+ def __call__(self, *args):
+ if has_varargs:
+ assert len(args) >= len(BArgs)
+ extraargs = args[len(BArgs):]
+ args = args[:len(BArgs)]
+ else:
+ assert len(args) == len(BArgs)
+ ctypes_args = []
+ for arg, BArg in zip(args, BArgs):
+ ctypes_args.append(BArg._arg_to_ctypes(arg))
+ if has_varargs:
+ for i, arg in enumerate(extraargs):
+ if arg is None:
+ ctypes_args.append(ctypes.c_void_p(0)) # NULL
+ continue
+ if not isinstance(arg, CTypesData):
+ raise TypeError(
+ "argument %d passed in the variadic part "
+ "needs to be a cdata object (got %s)" %
+ (1 + len(BArgs) + i, type(arg).__name__))
+ ctypes_args.append(arg._arg_to_ctypes(arg))
+ result = self._as_ctype_ptr(*ctypes_args)
+ return BResult._from_ctypes(result)
+ #
+ CTypesFunctionPtr._fix_class()
+ return CTypesFunctionPtr
+
+ def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
+ assert isinstance(name, str)
+ reverse_mapping = dict(zip(reversed(enumvalues),
+ reversed(enumerators)))
+ #
+ class CTypesEnum(CTypesInt):
+ __slots__ = []
+ _reftypename = '%s &' % name
+
+ def _get_own_repr(self):
+ value = self._value
+ try:
+ return '%d: %s' % (value, reverse_mapping[value])
+ except KeyError:
+ return str(value)
+
+ def _to_string(self, maxlen):
+ value = self._value
+ try:
+ return reverse_mapping[value]
+ except KeyError:
+ return str(value)
+ #
+ CTypesEnum._fix_class()
+ return CTypesEnum
+
+ def get_errno(self):
+ return ctypes.get_errno()
+
+ def set_errno(self, value):
+ ctypes.set_errno(value)
+
+ def string(self, b, maxlen=-1):
+ return b._to_string(maxlen)
+
+ def buffer(self, bptr, size=-1):
+ raise NotImplementedError("buffer() with ctypes backend")
+
+ def sizeof(self, cdata_or_BType):
+ if isinstance(cdata_or_BType, CTypesData):
+ return cdata_or_BType._get_size_of_instance()
+ else:
+ assert issubclass(cdata_or_BType, CTypesData)
+ return cdata_or_BType._get_size()
+
+ def alignof(self, BType):
+ assert issubclass(BType, CTypesData)
+ return BType._alignment()
+
+ def newp(self, BType, source):
+ if not issubclass(BType, CTypesData):
+ raise TypeError
+ return BType._newp(source)
+
+ def cast(self, BType, source):
+ return BType._cast_from(source)
+
+ def callback(self, BType, source, error, onerror):
+ assert onerror is None # XXX not implemented
+ return BType(source, error)
+
+ _weakref_cache_ref = None
+
+ def gcp(self, cdata, destructor, size=0):
+ if self._weakref_cache_ref is None:
+ import weakref
+ class MyRef(weakref.ref):
+ def __eq__(self, other):
+ myref = self()
+ return self is other or (
+ myref is not None and myref is other())
+ def __ne__(self, other):
+ return not (self == other)
+ def __hash__(self):
+ try:
+ return self._hash
+ except AttributeError:
+ self._hash = hash(self())
+ return self._hash
+ self._weakref_cache_ref = {}, MyRef
+ weak_cache, MyRef = self._weakref_cache_ref
+
+ if destructor is None:
+ try:
+ del weak_cache[MyRef(cdata)]
+ except KeyError:
+ raise TypeError("Can remove destructor only on a object "
+ "previously returned by ffi.gc()")
+ return None
+
+ def remove(k):
+ cdata, destructor = weak_cache.pop(k, (None, None))
+ if destructor is not None:
+ destructor(cdata)
+
+ new_cdata = self.cast(self.typeof(cdata), cdata)
+ assert new_cdata is not cdata
+ weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
+ return new_cdata
+
+ typeof = type
+
+ def getcname(self, BType, replace_with):
+ return BType._get_c_name(replace_with)
+
+ def typeoffsetof(self, BType, fieldname, num=0):
+ if isinstance(fieldname, str):
+ if num == 0 and issubclass(BType, CTypesGenericPtr):
+ BType = BType._BItem
+ if not issubclass(BType, CTypesBaseStructOrUnion):
+ raise TypeError("expected a struct or union ctype")
+ BField = BType._bfield_types[fieldname]
+ if BField is Ellipsis:
+ raise TypeError("not supported for bitfields")
+ return (BField, BType._offsetof(fieldname))
+ elif isinstance(fieldname, (int, long)):
+ if issubclass(BType, CTypesGenericArray):
+ BType = BType._CTPtr
+ if not issubclass(BType, CTypesGenericPtr):
+ raise TypeError("expected an array or ptr ctype")
+ BItem = BType._BItem
+ offset = BItem._get_size() * fieldname
+ if offset > sys.maxsize:
+ raise OverflowError
+ return (BItem, offset)
+ else:
+ raise TypeError(type(fieldname))
+
+ def rawaddressof(self, BTypePtr, cdata, offset=None):
+ if isinstance(cdata, CTypesBaseStructOrUnion):
+ ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
+ elif isinstance(cdata, CTypesGenericPtr):
+ if offset is None or not issubclass(type(cdata)._BItem,
+ CTypesBaseStructOrUnion):
+ raise TypeError("unexpected cdata type")
+ ptr = type(cdata)._to_ctypes(cdata)
+ elif isinstance(cdata, CTypesGenericArray):
+ ptr = type(cdata)._to_ctypes(cdata)
+ else:
+ raise TypeError("expected a ")
+ if offset:
+ ptr = ctypes.cast(
+ ctypes.c_void_p(
+ ctypes.cast(ptr, ctypes.c_void_p).value + offset),
+ type(ptr))
+ return BTypePtr._from_ctypes(ptr)
+
+
+class CTypesLibrary(object):
+
+ def __init__(self, backend, cdll):
+ self.backend = backend
+ self.cdll = cdll
+
+ def load_function(self, BType, name):
+ c_func = getattr(self.cdll, name)
+ funcobj = BType._from_ctypes(c_func)
+ funcobj._name = name
+ return funcobj
+
+ def read_variable(self, BType, name):
+ try:
+ ctypes_obj = BType._ctype.in_dll(self.cdll, name)
+ except AttributeError as e:
+ raise NotImplementedError(e)
+ return BType._from_ctypes(ctypes_obj)
+
+ def write_variable(self, BType, name, value):
+ new_ctypes_obj = BType._to_ctypes(value)
+ ctypes_obj = BType._ctype.in_dll(self.cdll, name)
+ ctypes.memmove(ctypes.addressof(ctypes_obj),
+ ctypes.addressof(new_ctypes_obj),
+ ctypes.sizeof(BType._ctype))
diff --git a/venv/Lib/site-packages/cffi/cffi_opcode.py b/venv/Lib/site-packages/cffi/cffi_opcode.py
new file mode 100644
index 000000000..a0df98d1c
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/cffi_opcode.py
@@ -0,0 +1,187 @@
+from .error import VerificationError
+
+class CffiOp(object):
+ def __init__(self, op, arg):
+ self.op = op
+ self.arg = arg
+
+ def as_c_expr(self):
+ if self.op is None:
+ assert isinstance(self.arg, str)
+ return '(_cffi_opcode_t)(%s)' % (self.arg,)
+ classname = CLASS_NAME[self.op]
+ return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
+
+ def as_python_bytes(self):
+ if self.op is None and self.arg.isdigit():
+ value = int(self.arg) # non-negative: '-' not in self.arg
+ if value >= 2**31:
+ raise OverflowError("cannot emit %r: limited to 2**31-1"
+ % (self.arg,))
+ return format_four_bytes(value)
+ if isinstance(self.arg, str):
+ raise VerificationError("cannot emit to Python: %r" % (self.arg,))
+ return format_four_bytes((self.arg << 8) | self.op)
+
+ def __str__(self):
+ classname = CLASS_NAME.get(self.op, self.op)
+ return '(%s %s)' % (classname, self.arg)
+
+def format_four_bytes(num):
+ return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
+ (num >> 24) & 0xFF,
+ (num >> 16) & 0xFF,
+ (num >> 8) & 0xFF,
+ (num ) & 0xFF)
+
+OP_PRIMITIVE = 1
+OP_POINTER = 3
+OP_ARRAY = 5
+OP_OPEN_ARRAY = 7
+OP_STRUCT_UNION = 9
+OP_ENUM = 11
+OP_FUNCTION = 13
+OP_FUNCTION_END = 15
+OP_NOOP = 17
+OP_BITFIELD = 19
+OP_TYPENAME = 21
+OP_CPYTHON_BLTN_V = 23 # varargs
+OP_CPYTHON_BLTN_N = 25 # noargs
+OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
+OP_CONSTANT = 29
+OP_CONSTANT_INT = 31
+OP_GLOBAL_VAR = 33
+OP_DLOPEN_FUNC = 35
+OP_DLOPEN_CONST = 37
+OP_GLOBAL_VAR_F = 39
+OP_EXTERN_PYTHON = 41
+
+PRIM_VOID = 0
+PRIM_BOOL = 1
+PRIM_CHAR = 2
+PRIM_SCHAR = 3
+PRIM_UCHAR = 4
+PRIM_SHORT = 5
+PRIM_USHORT = 6
+PRIM_INT = 7
+PRIM_UINT = 8
+PRIM_LONG = 9
+PRIM_ULONG = 10
+PRIM_LONGLONG = 11
+PRIM_ULONGLONG = 12
+PRIM_FLOAT = 13
+PRIM_DOUBLE = 14
+PRIM_LONGDOUBLE = 15
+
+PRIM_WCHAR = 16
+PRIM_INT8 = 17
+PRIM_UINT8 = 18
+PRIM_INT16 = 19
+PRIM_UINT16 = 20
+PRIM_INT32 = 21
+PRIM_UINT32 = 22
+PRIM_INT64 = 23
+PRIM_UINT64 = 24
+PRIM_INTPTR = 25
+PRIM_UINTPTR = 26
+PRIM_PTRDIFF = 27
+PRIM_SIZE = 28
+PRIM_SSIZE = 29
+PRIM_INT_LEAST8 = 30
+PRIM_UINT_LEAST8 = 31
+PRIM_INT_LEAST16 = 32
+PRIM_UINT_LEAST16 = 33
+PRIM_INT_LEAST32 = 34
+PRIM_UINT_LEAST32 = 35
+PRIM_INT_LEAST64 = 36
+PRIM_UINT_LEAST64 = 37
+PRIM_INT_FAST8 = 38
+PRIM_UINT_FAST8 = 39
+PRIM_INT_FAST16 = 40
+PRIM_UINT_FAST16 = 41
+PRIM_INT_FAST32 = 42
+PRIM_UINT_FAST32 = 43
+PRIM_INT_FAST64 = 44
+PRIM_UINT_FAST64 = 45
+PRIM_INTMAX = 46
+PRIM_UINTMAX = 47
+PRIM_FLOATCOMPLEX = 48
+PRIM_DOUBLECOMPLEX = 49
+PRIM_CHAR16 = 50
+PRIM_CHAR32 = 51
+
+_NUM_PRIM = 52
+_UNKNOWN_PRIM = -1
+_UNKNOWN_FLOAT_PRIM = -2
+_UNKNOWN_LONG_DOUBLE = -3
+
+_IO_FILE_STRUCT = -1
+
+PRIMITIVE_TO_INDEX = {
+ 'char': PRIM_CHAR,
+ 'short': PRIM_SHORT,
+ 'int': PRIM_INT,
+ 'long': PRIM_LONG,
+ 'long long': PRIM_LONGLONG,
+ 'signed char': PRIM_SCHAR,
+ 'unsigned char': PRIM_UCHAR,
+ 'unsigned short': PRIM_USHORT,
+ 'unsigned int': PRIM_UINT,
+ 'unsigned long': PRIM_ULONG,
+ 'unsigned long long': PRIM_ULONGLONG,
+ 'float': PRIM_FLOAT,
+ 'double': PRIM_DOUBLE,
+ 'long double': PRIM_LONGDOUBLE,
+ 'float _Complex': PRIM_FLOATCOMPLEX,
+ 'double _Complex': PRIM_DOUBLECOMPLEX,
+ '_Bool': PRIM_BOOL,
+ 'wchar_t': PRIM_WCHAR,
+ 'char16_t': PRIM_CHAR16,
+ 'char32_t': PRIM_CHAR32,
+ 'int8_t': PRIM_INT8,
+ 'uint8_t': PRIM_UINT8,
+ 'int16_t': PRIM_INT16,
+ 'uint16_t': PRIM_UINT16,
+ 'int32_t': PRIM_INT32,
+ 'uint32_t': PRIM_UINT32,
+ 'int64_t': PRIM_INT64,
+ 'uint64_t': PRIM_UINT64,
+ 'intptr_t': PRIM_INTPTR,
+ 'uintptr_t': PRIM_UINTPTR,
+ 'ptrdiff_t': PRIM_PTRDIFF,
+ 'size_t': PRIM_SIZE,
+ 'ssize_t': PRIM_SSIZE,
+ 'int_least8_t': PRIM_INT_LEAST8,
+ 'uint_least8_t': PRIM_UINT_LEAST8,
+ 'int_least16_t': PRIM_INT_LEAST16,
+ 'uint_least16_t': PRIM_UINT_LEAST16,
+ 'int_least32_t': PRIM_INT_LEAST32,
+ 'uint_least32_t': PRIM_UINT_LEAST32,
+ 'int_least64_t': PRIM_INT_LEAST64,
+ 'uint_least64_t': PRIM_UINT_LEAST64,
+ 'int_fast8_t': PRIM_INT_FAST8,
+ 'uint_fast8_t': PRIM_UINT_FAST8,
+ 'int_fast16_t': PRIM_INT_FAST16,
+ 'uint_fast16_t': PRIM_UINT_FAST16,
+ 'int_fast32_t': PRIM_INT_FAST32,
+ 'uint_fast32_t': PRIM_UINT_FAST32,
+ 'int_fast64_t': PRIM_INT_FAST64,
+ 'uint_fast64_t': PRIM_UINT_FAST64,
+ 'intmax_t': PRIM_INTMAX,
+ 'uintmax_t': PRIM_UINTMAX,
+ }
+
+F_UNION = 0x01
+F_CHECK_FIELDS = 0x02
+F_PACKED = 0x04
+F_EXTERNAL = 0x08
+F_OPAQUE = 0x10
+
+G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
+ for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
+ 'F_EXTERNAL', 'F_OPAQUE']])
+
+CLASS_NAME = {}
+for _name, _value in list(globals().items()):
+ if _name.startswith('OP_') and isinstance(_value, int):
+ CLASS_NAME[_value] = _name[3:]
diff --git a/venv/Lib/site-packages/cffi/commontypes.py b/venv/Lib/site-packages/cffi/commontypes.py
new file mode 100644
index 000000000..8ec97c756
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/commontypes.py
@@ -0,0 +1,80 @@
+import sys
+from . import model
+from .error import FFIError
+
+
+COMMON_TYPES = {}
+
+try:
+ # fetch "bool" and all simple Windows types
+ from _cffi_backend import _get_common_types
+ _get_common_types(COMMON_TYPES)
+except ImportError:
+ pass
+
+COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
+COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
+
+for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
+ if _type.endswith('_t'):
+ COMMON_TYPES[_type] = _type
+del _type
+
+_CACHE = {}
+
+def resolve_common_type(parser, commontype):
+ try:
+ return _CACHE[commontype]
+ except KeyError:
+ cdecl = COMMON_TYPES.get(commontype, commontype)
+ if not isinstance(cdecl, str):
+ result, quals = cdecl, 0 # cdecl is already a BaseType
+ elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
+ result, quals = model.PrimitiveType(cdecl), 0
+ elif cdecl == 'set-unicode-needed':
+ raise FFIError("The Windows type %r is only available after "
+ "you call ffi.set_unicode()" % (commontype,))
+ else:
+ if commontype == cdecl:
+ raise FFIError(
+ "Unsupported type: %r. Please look at "
+ "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
+ "and file an issue if you think this type should really "
+ "be supported." % (commontype,))
+ result, quals = parser.parse_type_and_quals(cdecl) # recursive
+
+ assert isinstance(result, model.BaseTypeByIdentity)
+ _CACHE[commontype] = result, quals
+ return result, quals
+
+
+# ____________________________________________________________
+# extra types for Windows (most of them are in commontypes.c)
+
+
+def win_common_types():
+ return {
+ "UNICODE_STRING": model.StructType(
+ "_UNICODE_STRING",
+ ["Length",
+ "MaximumLength",
+ "Buffer"],
+ [model.PrimitiveType("unsigned short"),
+ model.PrimitiveType("unsigned short"),
+ model.PointerType(model.PrimitiveType("wchar_t"))],
+ [-1, -1, -1]),
+ "PUNICODE_STRING": "UNICODE_STRING *",
+ "PCUNICODE_STRING": "const UNICODE_STRING *",
+
+ "TBYTE": "set-unicode-needed",
+ "TCHAR": "set-unicode-needed",
+ "LPCTSTR": "set-unicode-needed",
+ "PCTSTR": "set-unicode-needed",
+ "LPTSTR": "set-unicode-needed",
+ "PTSTR": "set-unicode-needed",
+ "PTBYTE": "set-unicode-needed",
+ "PTCHAR": "set-unicode-needed",
+ }
+
+if sys.platform == 'win32':
+ COMMON_TYPES.update(win_common_types())
diff --git a/venv/Lib/site-packages/cffi/cparser.py b/venv/Lib/site-packages/cffi/cparser.py
new file mode 100644
index 000000000..74830e913
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/cparser.py
@@ -0,0 +1,1006 @@
+from . import model
+from .commontypes import COMMON_TYPES, resolve_common_type
+from .error import FFIError, CDefError
+try:
+ from . import _pycparser as pycparser
+except ImportError:
+ import pycparser
+import weakref, re, sys
+
+try:
+ if sys.version_info < (3,):
+ import thread as _thread
+ else:
+ import _thread
+ lock = _thread.allocate_lock()
+except ImportError:
+ lock = None
+
+def _workaround_for_static_import_finders():
+ # Issue #392: packaging tools like cx_Freeze can not find these
+ # because pycparser uses exec dynamic import. This is an obscure
+ # workaround. This function is never called.
+ import pycparser.yacctab
+ import pycparser.lextab
+
+CDEF_SOURCE_STRING = ""
+_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
+ re.DOTALL | re.MULTILINE)
+_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
+ r"\b((?:[^\n\\]|\\.)*?)$",
+ re.DOTALL | re.MULTILINE)
+_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE)
+_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
+_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
+_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
+_r_words = re.compile(r"\w+|\S")
+_parser_cache = None
+_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
+_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
+_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
+_r_cdecl = re.compile(r"\b__cdecl\b")
+_r_extern_python = re.compile(r'\bextern\s*"'
+ r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
+_r_star_const_space = re.compile( # matches "* const "
+ r"[*]\s*((const|volatile|restrict)\b\s*)+")
+_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
+ r"\.\.\.")
+_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
+
+def _get_parser():
+ global _parser_cache
+ if _parser_cache is None:
+ _parser_cache = pycparser.CParser()
+ return _parser_cache
+
+def _workaround_for_old_pycparser(csource):
+ # Workaround for a pycparser issue (fixed between pycparser 2.10 and
+ # 2.14): "char*const***" gives us a wrong syntax tree, the same as
+ # for "char***(*const)". This means we can't tell the difference
+ # afterwards. But "char(*const(***))" gives us the right syntax
+ # tree. The issue only occurs if there are several stars in
+ # sequence with no parenthesis inbetween, just possibly qualifiers.
+ # Attempt to fix it by adding some parentheses in the source: each
+ # time we see "* const" or "* const *", we add an opening
+ # parenthesis before each star---the hard part is figuring out where
+ # to close them.
+ parts = []
+ while True:
+ match = _r_star_const_space.search(csource)
+ if not match:
+ break
+ #print repr(''.join(parts)+csource), '=>',
+ parts.append(csource[:match.start()])
+ parts.append('('); closing = ')'
+ parts.append(match.group()) # e.g. "* const "
+ endpos = match.end()
+ if csource.startswith('*', endpos):
+ parts.append('('); closing += ')'
+ level = 0
+ i = endpos
+ while i < len(csource):
+ c = csource[i]
+ if c == '(':
+ level += 1
+ elif c == ')':
+ if level == 0:
+ break
+ level -= 1
+ elif c in ',;=':
+ if level == 0:
+ break
+ i += 1
+ csource = csource[endpos:i] + closing + csource[i:]
+ #print repr(''.join(parts)+csource)
+ parts.append(csource)
+ return ''.join(parts)
+
+def _preprocess_extern_python(csource):
+ # input: `extern "Python" int foo(int);` or
+ # `extern "Python" { int foo(int); }`
+ # output:
+ # void __cffi_extern_python_start;
+ # int foo(int);
+ # void __cffi_extern_python_stop;
+ #
+ # input: `extern "Python+C" int foo(int);`
+ # output:
+ # void __cffi_extern_python_plus_c_start;
+ # int foo(int);
+ # void __cffi_extern_python_stop;
+ parts = []
+ while True:
+ match = _r_extern_python.search(csource)
+ if not match:
+ break
+ endpos = match.end() - 1
+ #print
+ #print ''.join(parts)+csource
+ #print '=>'
+ parts.append(csource[:match.start()])
+ if 'C' in match.group(1):
+ parts.append('void __cffi_extern_python_plus_c_start; ')
+ else:
+ parts.append('void __cffi_extern_python_start; ')
+ if csource[endpos] == '{':
+ # grouping variant
+ closing = csource.find('}', endpos)
+ if closing < 0:
+ raise CDefError("'extern \"Python\" {': no '}' found")
+ if csource.find('{', endpos + 1, closing) >= 0:
+ raise NotImplementedError("cannot use { } inside a block "
+ "'extern \"Python\" { ... }'")
+ parts.append(csource[endpos+1:closing])
+ csource = csource[closing+1:]
+ else:
+ # non-grouping variant
+ semicolon = csource.find(';', endpos)
+ if semicolon < 0:
+ raise CDefError("'extern \"Python\": no ';' found")
+ parts.append(csource[endpos:semicolon+1])
+ csource = csource[semicolon+1:]
+ parts.append(' void __cffi_extern_python_stop;')
+ #print ''.join(parts)+csource
+ #print
+ parts.append(csource)
+ return ''.join(parts)
+
+def _warn_for_string_literal(csource):
+ if '"' not in csource:
+ return
+ for line in csource.splitlines():
+ if '"' in line and not line.lstrip().startswith('#'):
+ import warnings
+ warnings.warn("String literal found in cdef() or type source. "
+ "String literals are ignored here, but you should "
+ "remove them anyway because some character sequences "
+ "confuse pre-parsing.")
+ break
+
+def _warn_for_non_extern_non_static_global_variable(decl):
+ if not decl.storage:
+ import warnings
+ warnings.warn("Global variable '%s' in cdef(): for consistency "
+ "with C it should have a storage class specifier "
+ "(usually 'extern')" % (decl.name,))
+
+def _remove_line_directives(csource):
+ # _r_line_directive matches whole lines, without the final \n, if they
+ # start with '#line' with some spacing allowed, or '#NUMBER'. This
+ # function stores them away and replaces them with exactly the string
+ # '#line@N', where N is the index in the list 'line_directives'.
+ line_directives = []
+ def replace(m):
+ i = len(line_directives)
+ line_directives.append(m.group())
+ return '#line@%d' % i
+ csource = _r_line_directive.sub(replace, csource)
+ return csource, line_directives
+
+def _put_back_line_directives(csource, line_directives):
+ def replace(m):
+ s = m.group()
+ if not s.startswith('#line@'):
+ raise AssertionError("unexpected #line directive "
+ "(should have been processed and removed")
+ return line_directives[int(s[6:])]
+ return _r_line_directive.sub(replace, csource)
+
+def _preprocess(csource):
+ # First, remove the lines of the form '#line N "filename"' because
+ # the "filename" part could confuse the rest
+ csource, line_directives = _remove_line_directives(csource)
+ # Remove comments. NOTE: this only work because the cdef() section
+ # should not contain any string literals (except in line directives)!
+ def replace_keeping_newlines(m):
+ return ' ' + m.group().count('\n') * '\n'
+ csource = _r_comment.sub(replace_keeping_newlines, csource)
+ # Remove the "#define FOO x" lines
+ macros = {}
+ for match in _r_define.finditer(csource):
+ macroname, macrovalue = match.groups()
+ macrovalue = macrovalue.replace('\\\n', '').strip()
+ macros[macroname] = macrovalue
+ csource = _r_define.sub('', csource)
+ #
+ if pycparser.__version__ < '2.14':
+ csource = _workaround_for_old_pycparser(csource)
+ #
+ # BIG HACK: replace WINAPI or __stdcall with "volatile const".
+ # It doesn't make sense for the return type of a function to be
+ # "volatile volatile const", so we abuse it to detect __stdcall...
+ # Hack number 2 is that "int(volatile *fptr)();" is not valid C
+ # syntax, so we place the "volatile" before the opening parenthesis.
+ csource = _r_stdcall2.sub(' volatile volatile const(', csource)
+ csource = _r_stdcall1.sub(' volatile volatile const ', csource)
+ csource = _r_cdecl.sub(' ', csource)
+ #
+ # Replace `extern "Python"` with start/end markers
+ csource = _preprocess_extern_python(csource)
+ #
+ # Now there should not be any string literal left; warn if we get one
+ _warn_for_string_literal(csource)
+ #
+ # Replace "[...]" with "[__dotdotdotarray__]"
+ csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
+ #
+ # Replace "...}" with "__dotdotdotNUM__}". This construction should
+ # occur only at the end of enums; at the end of structs we have "...;}"
+ # and at the end of vararg functions "...);". Also replace "=...[,}]"
+ # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
+ # giving an unknown value.
+ matches = list(_r_partial_enum.finditer(csource))
+ for number, match in enumerate(reversed(matches)):
+ p = match.start()
+ if csource[p] == '=':
+ p2 = csource.find('...', p, match.end())
+ assert p2 > p
+ csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
+ csource[p2+3:])
+ else:
+ assert csource[p:p+3] == '...'
+ csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
+ csource[p+3:])
+ # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
+ csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
+ # Replace "float ..." or "double..." with "__dotdotdotfloat__"
+ csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
+ # Replace all remaining "..." with the same name, "__dotdotdot__",
+ # which is declared with a typedef for the purpose of C parsing.
+ csource = csource.replace('...', ' __dotdotdot__ ')
+ # Finally, put back the line directives
+ csource = _put_back_line_directives(csource, line_directives)
+ return csource, macros
+
+def _common_type_names(csource):
+ # Look in the source for what looks like usages of types from the
+ # list of common types. A "usage" is approximated here as the
+ # appearance of the word, minus a "definition" of the type, which
+ # is the last word in a "typedef" statement. Approximative only
+ # but should be fine for all the common types.
+ look_for_words = set(COMMON_TYPES)
+ look_for_words.add(';')
+ look_for_words.add(',')
+ look_for_words.add('(')
+ look_for_words.add(')')
+ look_for_words.add('typedef')
+ words_used = set()
+ is_typedef = False
+ paren = 0
+ previous_word = ''
+ for word in _r_words.findall(csource):
+ if word in look_for_words:
+ if word == ';':
+ if is_typedef:
+ words_used.discard(previous_word)
+ look_for_words.discard(previous_word)
+ is_typedef = False
+ elif word == 'typedef':
+ is_typedef = True
+ paren = 0
+ elif word == '(':
+ paren += 1
+ elif word == ')':
+ paren -= 1
+ elif word == ',':
+ if is_typedef and paren == 0:
+ words_used.discard(previous_word)
+ look_for_words.discard(previous_word)
+ else: # word in COMMON_TYPES
+ words_used.add(word)
+ previous_word = word
+ return words_used
+
+
+class Parser(object):
+
+ def __init__(self):
+ self._declarations = {}
+ self._included_declarations = set()
+ self._anonymous_counter = 0
+ self._structnode2type = weakref.WeakKeyDictionary()
+ self._options = {}
+ self._int_constants = {}
+ self._recomplete = []
+ self._uses_new_feature = None
+
+ def _parse(self, csource):
+ csource, macros = _preprocess(csource)
+ # XXX: for more efficiency we would need to poke into the
+ # internals of CParser... the following registers the
+ # typedefs, because their presence or absence influences the
+ # parsing itself (but what they are typedef'ed to plays no role)
+ ctn = _common_type_names(csource)
+ typenames = []
+ for name in sorted(self._declarations):
+ if name.startswith('typedef '):
+ name = name[8:]
+ typenames.append(name)
+ ctn.discard(name)
+ typenames += sorted(ctn)
+ #
+ csourcelines = []
+ csourcelines.append('# 1 ""')
+ for typename in typenames:
+ csourcelines.append('typedef int %s;' % typename)
+ csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
+ ' __dotdotdot__;')
+ # this forces pycparser to consider the following in the file
+ # called from line 1
+ csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
+ csourcelines.append(csource)
+ fullcsource = '\n'.join(csourcelines)
+ if lock is not None:
+ lock.acquire() # pycparser is not thread-safe...
+ try:
+ ast = _get_parser().parse(fullcsource)
+ except pycparser.c_parser.ParseError as e:
+ self.convert_pycparser_error(e, csource)
+ finally:
+ if lock is not None:
+ lock.release()
+ # csource will be used to find buggy source text
+ return ast, macros, csource
+
+ def _convert_pycparser_error(self, e, csource):
+ # xxx look for ":NUM:" at the start of str(e)
+ # and interpret that as a line number. This will not work if
+ # the user gives explicit ``# NUM "FILE"`` directives.
+ line = None
+ msg = str(e)
+ match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
+ if match:
+ linenum = int(match.group(1), 10)
+ csourcelines = csource.splitlines()
+ if 1 <= linenum <= len(csourcelines):
+ line = csourcelines[linenum-1]
+ return line
+
+ def convert_pycparser_error(self, e, csource):
+ line = self._convert_pycparser_error(e, csource)
+
+ msg = str(e)
+ if line:
+ msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
+ else:
+ msg = 'parse error\n%s' % (msg,)
+ raise CDefError(msg)
+
+ def parse(self, csource, override=False, packed=False, pack=None,
+ dllexport=False):
+ if packed:
+ if packed != True:
+ raise ValueError("'packed' should be False or True; use "
+ "'pack' to give another value")
+ if pack:
+ raise ValueError("cannot give both 'pack' and 'packed'")
+ pack = 1
+ elif pack:
+ if pack & (pack - 1):
+ raise ValueError("'pack' must be a power of two, not %r" %
+ (pack,))
+ else:
+ pack = 0
+ prev_options = self._options
+ try:
+ self._options = {'override': override,
+ 'packed': pack,
+ 'dllexport': dllexport}
+ self._internal_parse(csource)
+ finally:
+ self._options = prev_options
+
+ def _internal_parse(self, csource):
+ ast, macros, csource = self._parse(csource)
+ # add the macros
+ self._process_macros(macros)
+ # find the first "__dotdotdot__" and use that as a separator
+ # between the repeated typedefs and the real csource
+ iterator = iter(ast.ext)
+ for decl in iterator:
+ if decl.name == '__dotdotdot__':
+ break
+ else:
+ assert 0
+ current_decl = None
+ #
+ try:
+ self._inside_extern_python = '__cffi_extern_python_stop'
+ for decl in iterator:
+ current_decl = decl
+ if isinstance(decl, pycparser.c_ast.Decl):
+ self._parse_decl(decl)
+ elif isinstance(decl, pycparser.c_ast.Typedef):
+ if not decl.name:
+ raise CDefError("typedef does not declare any name",
+ decl)
+ quals = 0
+ if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
+ decl.type.type.names[-1].startswith('__dotdotdot')):
+ realtype = self._get_unknown_type(decl)
+ elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
+ isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
+ isinstance(decl.type.type.type,
+ pycparser.c_ast.IdentifierType) and
+ decl.type.type.type.names[-1].startswith('__dotdotdot')):
+ realtype = self._get_unknown_ptr_type(decl)
+ else:
+ realtype, quals = self._get_type_and_quals(
+ decl.type, name=decl.name, partial_length_ok=True,
+ typedef_example="*(%s *)0" % (decl.name,))
+ self._declare('typedef ' + decl.name, realtype, quals=quals)
+ elif decl.__class__.__name__ == 'Pragma':
+ pass # skip pragma, only in pycparser 2.15
+ else:
+ raise CDefError("unexpected <%s>: this construct is valid "
+ "C but not valid in cdef()" %
+ decl.__class__.__name__, decl)
+ except CDefError as e:
+ if len(e.args) == 1:
+ e.args = e.args + (current_decl,)
+ raise
+ except FFIError as e:
+ msg = self._convert_pycparser_error(e, csource)
+ if msg:
+ e.args = (e.args[0] + "\n *** Err: %s" % msg,)
+ raise
+
+ def _add_constants(self, key, val):
+ if key in self._int_constants:
+ if self._int_constants[key] == val:
+ return # ignore identical double declarations
+ raise FFIError(
+ "multiple declarations of constant: %s" % (key,))
+ self._int_constants[key] = val
+
+ def _add_integer_constant(self, name, int_str):
+ int_str = int_str.lower().rstrip("ul")
+ neg = int_str.startswith('-')
+ if neg:
+ int_str = int_str[1:]
+ # "010" is not valid oct in py3
+ if (int_str.startswith("0") and int_str != '0'
+ and not int_str.startswith("0x")):
+ int_str = "0o" + int_str[1:]
+ pyvalue = int(int_str, 0)
+ if neg:
+ pyvalue = -pyvalue
+ self._add_constants(name, pyvalue)
+ self._declare('macro ' + name, pyvalue)
+
+ def _process_macros(self, macros):
+ for key, value in macros.items():
+ value = value.strip()
+ if _r_int_literal.match(value):
+ self._add_integer_constant(key, value)
+ elif value == '...':
+ self._declare('macro ' + key, value)
+ else:
+ raise CDefError(
+ 'only supports one of the following syntax:\n'
+ ' #define %s ... (literally dot-dot-dot)\n'
+ ' #define %s NUMBER (with NUMBER an integer'
+ ' constant, decimal/hex/octal)\n'
+ 'got:\n'
+ ' #define %s %s'
+ % (key, key, key, value))
+
+ def _declare_function(self, tp, quals, decl):
+ tp = self._get_type_pointer(tp, quals)
+ if self._options.get('dllexport'):
+ tag = 'dllexport_python '
+ elif self._inside_extern_python == '__cffi_extern_python_start':
+ tag = 'extern_python '
+ elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
+ tag = 'extern_python_plus_c '
+ else:
+ tag = 'function '
+ self._declare(tag + decl.name, tp)
+
+ def _parse_decl(self, decl):
+ node = decl.type
+ if isinstance(node, pycparser.c_ast.FuncDecl):
+ tp, quals = self._get_type_and_quals(node, name=decl.name)
+ assert isinstance(tp, model.RawFunctionType)
+ self._declare_function(tp, quals, decl)
+ else:
+ if isinstance(node, pycparser.c_ast.Struct):
+ self._get_struct_union_enum_type('struct', node)
+ elif isinstance(node, pycparser.c_ast.Union):
+ self._get_struct_union_enum_type('union', node)
+ elif isinstance(node, pycparser.c_ast.Enum):
+ self._get_struct_union_enum_type('enum', node)
+ elif not decl.name:
+ raise CDefError("construct does not declare any variable",
+ decl)
+ #
+ if decl.name:
+ tp, quals = self._get_type_and_quals(node,
+ partial_length_ok=True)
+ if tp.is_raw_function:
+ self._declare_function(tp, quals, decl)
+ elif (tp.is_integer_type() and
+ hasattr(decl, 'init') and
+ hasattr(decl.init, 'value') and
+ _r_int_literal.match(decl.init.value)):
+ self._add_integer_constant(decl.name, decl.init.value)
+ elif (tp.is_integer_type() and
+ isinstance(decl.init, pycparser.c_ast.UnaryOp) and
+ decl.init.op == '-' and
+ hasattr(decl.init.expr, 'value') and
+ _r_int_literal.match(decl.init.expr.value)):
+ self._add_integer_constant(decl.name,
+ '-' + decl.init.expr.value)
+ elif (tp is model.void_type and
+ decl.name.startswith('__cffi_extern_python_')):
+ # hack: `extern "Python"` in the C source is replaced
+ # with "void __cffi_extern_python_start;" and
+ # "void __cffi_extern_python_stop;"
+ self._inside_extern_python = decl.name
+ else:
+ if self._inside_extern_python !='__cffi_extern_python_stop':
+ raise CDefError(
+ "cannot declare constants or "
+ "variables with 'extern \"Python\"'")
+ if (quals & model.Q_CONST) and not tp.is_array_type:
+ self._declare('constant ' + decl.name, tp, quals=quals)
+ else:
+ _warn_for_non_extern_non_static_global_variable(decl)
+ self._declare('variable ' + decl.name, tp, quals=quals)
+
+ def parse_type(self, cdecl):
+ return self.parse_type_and_quals(cdecl)[0]
+
+ def parse_type_and_quals(self, cdecl):
+ ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
+ assert not macros
+ exprnode = ast.ext[-1].type.args.params[0]
+ if isinstance(exprnode, pycparser.c_ast.ID):
+ raise CDefError("unknown identifier '%s'" % (exprnode.name,))
+ return self._get_type_and_quals(exprnode.type)
+
+ def _declare(self, name, obj, included=False, quals=0):
+ if name in self._declarations:
+ prevobj, prevquals = self._declarations[name]
+ if prevobj is obj and prevquals == quals:
+ return
+ if not self._options.get('override'):
+ raise FFIError(
+ "multiple declarations of %s (for interactive usage, "
+ "try cdef(xx, override=True))" % (name,))
+ assert '__dotdotdot__' not in name.split()
+ self._declarations[name] = (obj, quals)
+ if included:
+ self._included_declarations.add(obj)
+
+ def _extract_quals(self, type):
+ quals = 0
+ if isinstance(type, (pycparser.c_ast.TypeDecl,
+ pycparser.c_ast.PtrDecl)):
+ if 'const' in type.quals:
+ quals |= model.Q_CONST
+ if 'volatile' in type.quals:
+ quals |= model.Q_VOLATILE
+ if 'restrict' in type.quals:
+ quals |= model.Q_RESTRICT
+ return quals
+
+ def _get_type_pointer(self, type, quals, declname=None):
+ if isinstance(type, model.RawFunctionType):
+ return type.as_function_pointer()
+ if (isinstance(type, model.StructOrUnionOrEnum) and
+ type.name.startswith('$') and type.name[1:].isdigit() and
+ type.forcename is None and declname is not None):
+ return model.NamedPointerType(type, declname, quals)
+ return model.PointerType(type, quals)
+
+ def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False,
+ typedef_example=None):
+ # first, dereference typedefs, if we have it already parsed, we're good
+ if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
+ isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
+ len(typenode.type.names) == 1 and
+ ('typedef ' + typenode.type.names[0]) in self._declarations):
+ tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
+ quals |= self._extract_quals(typenode)
+ return tp, quals
+ #
+ if isinstance(typenode, pycparser.c_ast.ArrayDecl):
+ # array type
+ if typenode.dim is None:
+ length = None
+ else:
+ length = self._parse_constant(
+ typenode.dim, partial_length_ok=partial_length_ok)
+ # a hack: in 'typedef int foo_t[...][...];', don't use '...' as
+ # the length but use directly the C expression that would be
+ # generated by recompiler.py. This lets the typedef be used in
+ # many more places within recompiler.py
+ if typedef_example is not None:
+ if length == '...':
+ length = '_cffi_array_len(%s)' % (typedef_example,)
+ typedef_example = "*" + typedef_example
+ #
+ tp, quals = self._get_type_and_quals(typenode.type,
+ partial_length_ok=partial_length_ok,
+ typedef_example=typedef_example)
+ return model.ArrayType(tp, length), quals
+ #
+ if isinstance(typenode, pycparser.c_ast.PtrDecl):
+ # pointer type
+ itemtype, itemquals = self._get_type_and_quals(typenode.type)
+ tp = self._get_type_pointer(itemtype, itemquals, declname=name)
+ quals = self._extract_quals(typenode)
+ return tp, quals
+ #
+ if isinstance(typenode, pycparser.c_ast.TypeDecl):
+ quals = self._extract_quals(typenode)
+ type = typenode.type
+ if isinstance(type, pycparser.c_ast.IdentifierType):
+ # assume a primitive type. get it from .names, but reduce
+ # synonyms to a single chosen combination
+ names = list(type.names)
+ if names != ['signed', 'char']: # keep this unmodified
+ prefixes = {}
+ while names:
+ name = names[0]
+ if name in ('short', 'long', 'signed', 'unsigned'):
+ prefixes[name] = prefixes.get(name, 0) + 1
+ del names[0]
+ else:
+ break
+ # ignore the 'signed' prefix below, and reorder the others
+ newnames = []
+ for prefix in ('unsigned', 'short', 'long'):
+ for i in range(prefixes.get(prefix, 0)):
+ newnames.append(prefix)
+ if not names:
+ names = ['int'] # implicitly
+ if names == ['int']: # but kill it if 'short' or 'long'
+ if 'short' in prefixes or 'long' in prefixes:
+ names = []
+ names = newnames + names
+ ident = ' '.join(names)
+ if ident == 'void':
+ return model.void_type, quals
+ if ident == '__dotdotdot__':
+ raise FFIError(':%d: bad usage of "..."' %
+ typenode.coord.line)
+ tp0, quals0 = resolve_common_type(self, ident)
+ return tp0, (quals | quals0)
+ #
+ if isinstance(type, pycparser.c_ast.Struct):
+ # 'struct foobar'
+ tp = self._get_struct_union_enum_type('struct', type, name)
+ return tp, quals
+ #
+ if isinstance(type, pycparser.c_ast.Union):
+ # 'union foobar'
+ tp = self._get_struct_union_enum_type('union', type, name)
+ return tp, quals
+ #
+ if isinstance(type, pycparser.c_ast.Enum):
+ # 'enum foobar'
+ tp = self._get_struct_union_enum_type('enum', type, name)
+ return tp, quals
+ #
+ if isinstance(typenode, pycparser.c_ast.FuncDecl):
+ # a function type
+ return self._parse_function_type(typenode, name), 0
+ #
+ # nested anonymous structs or unions end up here
+ if isinstance(typenode, pycparser.c_ast.Struct):
+ return self._get_struct_union_enum_type('struct', typenode, name,
+ nested=True), 0
+ if isinstance(typenode, pycparser.c_ast.Union):
+ return self._get_struct_union_enum_type('union', typenode, name,
+ nested=True), 0
+ #
+ raise FFIError(":%d: bad or unsupported type declaration" %
+ typenode.coord.line)
+
+ def _parse_function_type(self, typenode, funcname=None):
+ params = list(getattr(typenode.args, 'params', []))
+ for i, arg in enumerate(params):
+ if not hasattr(arg, 'type'):
+ raise CDefError("%s arg %d: unknown type '%s'"
+ " (if you meant to use the old C syntax of giving"
+ " untyped arguments, it is not supported)"
+ % (funcname or 'in expression', i + 1,
+ getattr(arg, 'name', '?')))
+ ellipsis = (
+ len(params) > 0 and
+ isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
+ isinstance(params[-1].type.type,
+ pycparser.c_ast.IdentifierType) and
+ params[-1].type.type.names == ['__dotdotdot__'])
+ if ellipsis:
+ params.pop()
+ if not params:
+ raise CDefError(
+ "%s: a function with only '(...)' as argument"
+ " is not correct C" % (funcname or 'in expression'))
+ args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
+ for argdeclnode in params]
+ if not ellipsis and args == [model.void_type]:
+ args = []
+ result, quals = self._get_type_and_quals(typenode.type)
+ # the 'quals' on the result type are ignored. HACK: we absure them
+ # to detect __stdcall functions: we textually replace "__stdcall"
+ # with "volatile volatile const" above.
+ abi = None
+ if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
+ if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
+ abi = '__stdcall'
+ return model.RawFunctionType(tuple(args), result, ellipsis, abi)
+
+ def _as_func_arg(self, type, quals):
+ if isinstance(type, model.ArrayType):
+ return model.PointerType(type.item, quals)
+ elif isinstance(type, model.RawFunctionType):
+ return type.as_function_pointer()
+ else:
+ return type
+
+ def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
+ # First, a level of caching on the exact 'type' node of the AST.
+ # This is obscure, but needed because pycparser "unrolls" declarations
+ # such as "typedef struct { } foo_t, *foo_p" and we end up with
+ # an AST that is not a tree, but a DAG, with the "type" node of the
+ # two branches foo_t and foo_p of the trees being the same node.
+ # It's a bit silly but detecting "DAG-ness" in the AST tree seems
+ # to be the only way to distinguish this case from two independent
+ # structs. See test_struct_with_two_usages.
+ try:
+ return self._structnode2type[type]
+ except KeyError:
+ pass
+ #
+ # Note that this must handle parsing "struct foo" any number of
+ # times and always return the same StructType object. Additionally,
+ # one of these times (not necessarily the first), the fields of
+ # the struct can be specified with "struct foo { ...fields... }".
+ # If no name is given, then we have to create a new anonymous struct
+ # with no caching; in this case, the fields are either specified
+ # right now or never.
+ #
+ force_name = name
+ name = type.name
+ #
+ # get the type or create it if needed
+ if name is None:
+ # 'force_name' is used to guess a more readable name for
+ # anonymous structs, for the common case "typedef struct { } foo".
+ if force_name is not None:
+ explicit_name = '$%s' % force_name
+ else:
+ self._anonymous_counter += 1
+ explicit_name = '$%d' % self._anonymous_counter
+ tp = None
+ else:
+ explicit_name = name
+ key = '%s %s' % (kind, name)
+ tp, _ = self._declarations.get(key, (None, None))
+ #
+ if tp is None:
+ if kind == 'struct':
+ tp = model.StructType(explicit_name, None, None, None)
+ elif kind == 'union':
+ tp = model.UnionType(explicit_name, None, None, None)
+ elif kind == 'enum':
+ if explicit_name == '__dotdotdot__':
+ raise CDefError("Enums cannot be declared with ...")
+ tp = self._build_enum_type(explicit_name, type.values)
+ else:
+ raise AssertionError("kind = %r" % (kind,))
+ if name is not None:
+ self._declare(key, tp)
+ else:
+ if kind == 'enum' and type.values is not None:
+ raise NotImplementedError(
+ "enum %s: the '{}' declaration should appear on the first "
+ "time the enum is mentioned, not later" % explicit_name)
+ if not tp.forcename:
+ tp.force_the_name(force_name)
+ if tp.forcename and '$' in tp.name:
+ self._declare('anonymous %s' % tp.forcename, tp)
+ #
+ self._structnode2type[type] = tp
+ #
+ # enums: done here
+ if kind == 'enum':
+ return tp
+ #
+ # is there a 'type.decls'? If yes, then this is the place in the
+ # C sources that declare the fields. If no, then just return the
+ # existing type, possibly still incomplete.
+ if type.decls is None:
+ return tp
+ #
+ if tp.fldnames is not None:
+ raise CDefError("duplicate declaration of struct %s" % name)
+ fldnames = []
+ fldtypes = []
+ fldbitsize = []
+ fldquals = []
+ for decl in type.decls:
+ if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
+ ''.join(decl.type.names) == '__dotdotdot__'):
+ # XXX pycparser is inconsistent: 'names' should be a list
+ # of strings, but is sometimes just one string. Use
+ # str.join() as a way to cope with both.
+ self._make_partial(tp, nested)
+ continue
+ if decl.bitsize is None:
+ bitsize = -1
+ else:
+ bitsize = self._parse_constant(decl.bitsize)
+ self._partial_length = False
+ type, fqual = self._get_type_and_quals(decl.type,
+ partial_length_ok=True)
+ if self._partial_length:
+ self._make_partial(tp, nested)
+ if isinstance(type, model.StructType) and type.partial:
+ self._make_partial(tp, nested)
+ fldnames.append(decl.name or '')
+ fldtypes.append(type)
+ fldbitsize.append(bitsize)
+ fldquals.append(fqual)
+ tp.fldnames = tuple(fldnames)
+ tp.fldtypes = tuple(fldtypes)
+ tp.fldbitsize = tuple(fldbitsize)
+ tp.fldquals = tuple(fldquals)
+ if fldbitsize != [-1] * len(fldbitsize):
+ if isinstance(tp, model.StructType) and tp.partial:
+ raise NotImplementedError("%s: using both bitfields and '...;'"
+ % (tp,))
+ tp.packed = self._options.get('packed')
+ if tp.completed: # must be re-completed: it is not opaque any more
+ tp.completed = 0
+ self._recomplete.append(tp)
+ return tp
+
+ def _make_partial(self, tp, nested):
+ if not isinstance(tp, model.StructOrUnion):
+ raise CDefError("%s cannot be partial" % (tp,))
+ if not tp.has_c_name() and not nested:
+ raise NotImplementedError("%s is partial but has no C name" %(tp,))
+ tp.partial = True
+
+ def _parse_constant(self, exprnode, partial_length_ok=False):
+ # for now, limited to expressions that are an immediate number
+ # or positive/negative number
+ if isinstance(exprnode, pycparser.c_ast.Constant):
+ s = exprnode.value
+ if '0' <= s[0] <= '9':
+ s = s.rstrip('uUlL')
+ try:
+ if s.startswith('0'):
+ return int(s, 8)
+ else:
+ return int(s, 10)
+ except ValueError:
+ if len(s) > 1:
+ if s.lower()[0:2] == '0x':
+ return int(s, 16)
+ elif s.lower()[0:2] == '0b':
+ return int(s, 2)
+ raise CDefError("invalid constant %r" % (s,))
+ elif s[0] == "'" and s[-1] == "'" and (
+ len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
+ return ord(s[-2])
+ else:
+ raise CDefError("invalid constant %r" % (s,))
+ #
+ if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+ exprnode.op == '+'):
+ return self._parse_constant(exprnode.expr)
+ #
+ if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+ exprnode.op == '-'):
+ return -self._parse_constant(exprnode.expr)
+ # load previously defined int constant
+ if (isinstance(exprnode, pycparser.c_ast.ID) and
+ exprnode.name in self._int_constants):
+ return self._int_constants[exprnode.name]
+ #
+ if (isinstance(exprnode, pycparser.c_ast.ID) and
+ exprnode.name == '__dotdotdotarray__'):
+ if partial_length_ok:
+ self._partial_length = True
+ return '...'
+ raise FFIError(":%d: unsupported '[...]' here, cannot derive "
+ "the actual array length in this context"
+ % exprnode.coord.line)
+ #
+ if isinstance(exprnode, pycparser.c_ast.BinaryOp):
+ left = self._parse_constant(exprnode.left)
+ right = self._parse_constant(exprnode.right)
+ if exprnode.op == '+':
+ return left + right
+ elif exprnode.op == '-':
+ return left - right
+ elif exprnode.op == '*':
+ return left * right
+ elif exprnode.op == '/':
+ return self._c_div(left, right)
+ elif exprnode.op == '%':
+ return left - self._c_div(left, right) * right
+ elif exprnode.op == '<<':
+ return left << right
+ elif exprnode.op == '>>':
+ return left >> right
+ elif exprnode.op == '&':
+ return left & right
+ elif exprnode.op == '|':
+ return left | right
+ elif exprnode.op == '^':
+ return left ^ right
+ #
+ raise FFIError(":%d: unsupported expression: expected a "
+ "simple numeric constant" % exprnode.coord.line)
+
+ def _c_div(self, a, b):
+ result = a // b
+ if ((a < 0) ^ (b < 0)) and (a % b) != 0:
+ result += 1
+ return result
+
+ def _build_enum_type(self, explicit_name, decls):
+ if decls is not None:
+ partial = False
+ enumerators = []
+ enumvalues = []
+ nextenumvalue = 0
+ for enum in decls.enumerators:
+ if _r_enum_dotdotdot.match(enum.name):
+ partial = True
+ continue
+ if enum.value is not None:
+ nextenumvalue = self._parse_constant(enum.value)
+ enumerators.append(enum.name)
+ enumvalues.append(nextenumvalue)
+ self._add_constants(enum.name, nextenumvalue)
+ nextenumvalue += 1
+ enumerators = tuple(enumerators)
+ enumvalues = tuple(enumvalues)
+ tp = model.EnumType(explicit_name, enumerators, enumvalues)
+ tp.partial = partial
+ else: # opaque enum
+ tp = model.EnumType(explicit_name, (), ())
+ return tp
+
+ def include(self, other):
+ for name, (tp, quals) in other._declarations.items():
+ if name.startswith('anonymous $enum_$'):
+ continue # fix for test_anonymous_enum_include
+ kind = name.split(' ', 1)[0]
+ if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
+ self._declare(name, tp, included=True, quals=quals)
+ for k, v in other._int_constants.items():
+ self._add_constants(k, v)
+
+ def _get_unknown_type(self, decl):
+ typenames = decl.type.type.names
+ if typenames == ['__dotdotdot__']:
+ return model.unknown_type(decl.name)
+
+ if typenames == ['__dotdotdotint__']:
+ if self._uses_new_feature is None:
+ self._uses_new_feature = "'typedef int... %s'" % decl.name
+ return model.UnknownIntegerType(decl.name)
+
+ if typenames == ['__dotdotdotfloat__']:
+ # note: not for 'long double' so far
+ if self._uses_new_feature is None:
+ self._uses_new_feature = "'typedef float... %s'" % decl.name
+ return model.UnknownFloatType(decl.name)
+
+ raise FFIError(':%d: unsupported usage of "..." in typedef'
+ % decl.coord.line)
+
+ def _get_unknown_ptr_type(self, decl):
+ if decl.type.type.type.names == ['__dotdotdot__']:
+ return model.unknown_ptr_type(decl.name)
+ raise FFIError(':%d: unsupported usage of "..." in typedef'
+ % decl.coord.line)
diff --git a/venv/Lib/site-packages/cffi/error.py b/venv/Lib/site-packages/cffi/error.py
new file mode 100644
index 000000000..0a27247c3
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/error.py
@@ -0,0 +1,31 @@
+
+class FFIError(Exception):
+ __module__ = 'cffi'
+
+class CDefError(Exception):
+ __module__ = 'cffi'
+ def __str__(self):
+ try:
+ current_decl = self.args[1]
+ filename = current_decl.coord.file
+ linenum = current_decl.coord.line
+ prefix = '%s:%d: ' % (filename, linenum)
+ except (AttributeError, TypeError, IndexError):
+ prefix = ''
+ return '%s%s' % (prefix, self.args[0])
+
+class VerificationError(Exception):
+ """ An error raised when verification fails
+ """
+ __module__ = 'cffi'
+
+class VerificationMissing(Exception):
+ """ An error raised when incomplete structures are passed into
+ cdef, but no verification has been done
+ """
+ __module__ = 'cffi'
+
+class PkgConfigError(Exception):
+ """ An error raised for missing modules in pkg-config
+ """
+ __module__ = 'cffi'
diff --git a/venv/Lib/site-packages/cffi/ffiplatform.py b/venv/Lib/site-packages/cffi/ffiplatform.py
new file mode 100644
index 000000000..85313460a
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/ffiplatform.py
@@ -0,0 +1,127 @@
+import sys, os
+from .error import VerificationError
+
+
+LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
+ 'extra_objects', 'depends']
+
+def get_extension(srcfilename, modname, sources=(), **kwds):
+ _hack_at_distutils()
+ from distutils.core import Extension
+ allsources = [srcfilename]
+ for src in sources:
+ allsources.append(os.path.normpath(src))
+ return Extension(name=modname, sources=allsources, **kwds)
+
+def compile(tmpdir, ext, compiler_verbose=0, debug=None):
+ """Compile a C extension module using distutils."""
+
+ _hack_at_distutils()
+ saved_environ = os.environ.copy()
+ try:
+ outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
+ outputfilename = os.path.abspath(outputfilename)
+ finally:
+ # workaround for a distutils bugs where some env vars can
+ # become longer and longer every time it is used
+ for key, value in saved_environ.items():
+ if os.environ.get(key) != value:
+ os.environ[key] = value
+ return outputfilename
+
+def _build(tmpdir, ext, compiler_verbose=0, debug=None):
+ # XXX compact but horrible :-(
+ from distutils.core import Distribution
+ import distutils.errors, distutils.log
+ #
+ dist = Distribution({'ext_modules': [ext]})
+ dist.parse_config_files()
+ options = dist.get_option_dict('build_ext')
+ if debug is None:
+ debug = sys.flags.debug
+ options['debug'] = ('ffiplatform', debug)
+ options['force'] = ('ffiplatform', True)
+ options['build_lib'] = ('ffiplatform', tmpdir)
+ options['build_temp'] = ('ffiplatform', tmpdir)
+ #
+ try:
+ old_level = distutils.log.set_threshold(0) or 0
+ try:
+ distutils.log.set_verbosity(compiler_verbose)
+ dist.run_command('build_ext')
+ cmd_obj = dist.get_command_obj('build_ext')
+ [soname] = cmd_obj.get_outputs()
+ finally:
+ distutils.log.set_threshold(old_level)
+ except (distutils.errors.CompileError,
+ distutils.errors.LinkError) as e:
+ raise VerificationError('%s: %s' % (e.__class__.__name__, e))
+ #
+ return soname
+
+try:
+ from os.path import samefile
+except ImportError:
+ def samefile(f1, f2):
+ return os.path.abspath(f1) == os.path.abspath(f2)
+
+def maybe_relative_path(path):
+ if not os.path.isabs(path):
+ return path # already relative
+ dir = path
+ names = []
+ while True:
+ prevdir = dir
+ dir, name = os.path.split(prevdir)
+ if dir == prevdir or not dir:
+ return path # failed to make it relative
+ names.append(name)
+ try:
+ if samefile(dir, os.curdir):
+ names.reverse()
+ return os.path.join(*names)
+ except OSError:
+ pass
+
+# ____________________________________________________________
+
+try:
+ int_or_long = (int, long)
+ import cStringIO
+except NameError:
+ int_or_long = int # Python 3
+ import io as cStringIO
+
+def _flatten(x, f):
+ if isinstance(x, str):
+ f.write('%ds%s' % (len(x), x))
+ elif isinstance(x, dict):
+ keys = sorted(x.keys())
+ f.write('%dd' % len(keys))
+ for key in keys:
+ _flatten(key, f)
+ _flatten(x[key], f)
+ elif isinstance(x, (list, tuple)):
+ f.write('%dl' % len(x))
+ for value in x:
+ _flatten(value, f)
+ elif isinstance(x, int_or_long):
+ f.write('%di' % (x,))
+ else:
+ raise TypeError(
+ "the keywords to verify() contains unsupported object %r" % (x,))
+
+def flatten(x):
+ f = cStringIO.StringIO()
+ _flatten(x, f)
+ return f.getvalue()
+
+def _hack_at_distutils():
+ # Windows-only workaround for some configurations: see
+ # https://bugs.python.org/issue23246 (Python 2.7 with
+ # a specific MS compiler suite download)
+ if sys.platform == "win32":
+ try:
+ import setuptools # for side-effects, patches distutils
+ except ImportError:
+ pass
diff --git a/venv/Lib/site-packages/cffi/lock.py b/venv/Lib/site-packages/cffi/lock.py
new file mode 100644
index 000000000..db91b7158
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/lock.py
@@ -0,0 +1,30 @@
+import sys
+
+if sys.version_info < (3,):
+ try:
+ from thread import allocate_lock
+ except ImportError:
+ from dummy_thread import allocate_lock
+else:
+ try:
+ from _thread import allocate_lock
+ except ImportError:
+ from _dummy_thread import allocate_lock
+
+
+##import sys
+##l1 = allocate_lock
+
+##class allocate_lock(object):
+## def __init__(self):
+## self._real = l1()
+## def __enter__(self):
+## for i in range(4, 0, -1):
+## print sys._getframe(i).f_code
+## print
+## return self._real.__enter__()
+## def __exit__(self, *args):
+## return self._real.__exit__(*args)
+## def acquire(self, f):
+## assert f is False
+## return self._real.acquire(f)
diff --git a/venv/Lib/site-packages/cffi/model.py b/venv/Lib/site-packages/cffi/model.py
new file mode 100644
index 000000000..ad1c17648
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/model.py
@@ -0,0 +1,617 @@
+import types
+import weakref
+
+from .lock import allocate_lock
+from .error import CDefError, VerificationError, VerificationMissing
+
+# type qualifiers
+Q_CONST = 0x01
+Q_RESTRICT = 0x02
+Q_VOLATILE = 0x04
+
+def qualify(quals, replace_with):
+ if quals & Q_CONST:
+ replace_with = ' const ' + replace_with.lstrip()
+ if quals & Q_VOLATILE:
+ replace_with = ' volatile ' + replace_with.lstrip()
+ if quals & Q_RESTRICT:
+ # It seems that __restrict is supported by gcc and msvc.
+ # If you hit some different compiler, add a #define in
+ # _cffi_include.h for it (and in its copies, documented there)
+ replace_with = ' __restrict ' + replace_with.lstrip()
+ return replace_with
+
+
+class BaseTypeByIdentity(object):
+ is_array_type = False
+ is_raw_function = False
+
+ def get_c_name(self, replace_with='', context='a C file', quals=0):
+ result = self.c_name_with_marker
+ assert result.count('&') == 1
+ # some logic duplication with ffi.getctype()... :-(
+ replace_with = replace_with.strip()
+ if replace_with:
+ if replace_with.startswith('*') and '&[' in result:
+ replace_with = '(%s)' % replace_with
+ elif not replace_with[0] in '[(':
+ replace_with = ' ' + replace_with
+ replace_with = qualify(quals, replace_with)
+ result = result.replace('&', replace_with)
+ if '$' in result:
+ raise VerificationError(
+ "cannot generate '%s' in %s: unknown type name"
+ % (self._get_c_name(), context))
+ return result
+
+ def _get_c_name(self):
+ return self.c_name_with_marker.replace('&', '')
+
+ def has_c_name(self):
+ return '$' not in self._get_c_name()
+
+ def is_integer_type(self):
+ return False
+
+ def get_cached_btype(self, ffi, finishlist, can_delay=False):
+ try:
+ BType = ffi._cached_btypes[self]
+ except KeyError:
+ BType = self.build_backend_type(ffi, finishlist)
+ BType2 = ffi._cached_btypes.setdefault(self, BType)
+ assert BType2 is BType
+ return BType
+
+ def __repr__(self):
+ return '<%s>' % (self._get_c_name(),)
+
+ def _get_items(self):
+ return [(name, getattr(self, name)) for name in self._attrs_]
+
+
+class BaseType(BaseTypeByIdentity):
+
+ def __eq__(self, other):
+ return (self.__class__ == other.__class__ and
+ self._get_items() == other._get_items())
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.__class__, tuple(self._get_items())))
+
+
+class VoidType(BaseType):
+ _attrs_ = ()
+
+ def __init__(self):
+ self.c_name_with_marker = 'void&'
+
+ def build_backend_type(self, ffi, finishlist):
+ return global_cache(self, ffi, 'new_void_type')
+
+void_type = VoidType()
+
+
+class BasePrimitiveType(BaseType):
+ def is_complex_type(self):
+ return False
+
+
+class PrimitiveType(BasePrimitiveType):
+ _attrs_ = ('name',)
+
+ ALL_PRIMITIVE_TYPES = {
+ 'char': 'c',
+ 'short': 'i',
+ 'int': 'i',
+ 'long': 'i',
+ 'long long': 'i',
+ 'signed char': 'i',
+ 'unsigned char': 'i',
+ 'unsigned short': 'i',
+ 'unsigned int': 'i',
+ 'unsigned long': 'i',
+ 'unsigned long long': 'i',
+ 'float': 'f',
+ 'double': 'f',
+ 'long double': 'f',
+ 'float _Complex': 'j',
+ 'double _Complex': 'j',
+ '_Bool': 'i',
+ # the following types are not primitive in the C sense
+ 'wchar_t': 'c',
+ 'char16_t': 'c',
+ 'char32_t': 'c',
+ 'int8_t': 'i',
+ 'uint8_t': 'i',
+ 'int16_t': 'i',
+ 'uint16_t': 'i',
+ 'int32_t': 'i',
+ 'uint32_t': 'i',
+ 'int64_t': 'i',
+ 'uint64_t': 'i',
+ 'int_least8_t': 'i',
+ 'uint_least8_t': 'i',
+ 'int_least16_t': 'i',
+ 'uint_least16_t': 'i',
+ 'int_least32_t': 'i',
+ 'uint_least32_t': 'i',
+ 'int_least64_t': 'i',
+ 'uint_least64_t': 'i',
+ 'int_fast8_t': 'i',
+ 'uint_fast8_t': 'i',
+ 'int_fast16_t': 'i',
+ 'uint_fast16_t': 'i',
+ 'int_fast32_t': 'i',
+ 'uint_fast32_t': 'i',
+ 'int_fast64_t': 'i',
+ 'uint_fast64_t': 'i',
+ 'intptr_t': 'i',
+ 'uintptr_t': 'i',
+ 'intmax_t': 'i',
+ 'uintmax_t': 'i',
+ 'ptrdiff_t': 'i',
+ 'size_t': 'i',
+ 'ssize_t': 'i',
+ }
+
+ def __init__(self, name):
+ assert name in self.ALL_PRIMITIVE_TYPES
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+ def is_char_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
+ def is_integer_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
+ def is_float_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
+ def is_complex_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
+
+ def build_backend_type(self, ffi, finishlist):
+ return global_cache(self, ffi, 'new_primitive_type', self.name)
+
+
+class UnknownIntegerType(BasePrimitiveType):
+ _attrs_ = ('name',)
+
+ def __init__(self, name):
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+ def is_integer_type(self):
+ return True
+
+ def build_backend_type(self, ffi, finishlist):
+ raise NotImplementedError("integer type '%s' can only be used after "
+ "compilation" % self.name)
+
+class UnknownFloatType(BasePrimitiveType):
+ _attrs_ = ('name', )
+
+ def __init__(self, name):
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+ def build_backend_type(self, ffi, finishlist):
+ raise NotImplementedError("float type '%s' can only be used after "
+ "compilation" % self.name)
+
+
+class BaseFunctionType(BaseType):
+ _attrs_ = ('args', 'result', 'ellipsis', 'abi')
+
+ def __init__(self, args, result, ellipsis, abi=None):
+ self.args = args
+ self.result = result
+ self.ellipsis = ellipsis
+ self.abi = abi
+ #
+ reprargs = [arg._get_c_name() for arg in self.args]
+ if self.ellipsis:
+ reprargs.append('...')
+ reprargs = reprargs or ['void']
+ replace_with = self._base_pattern % (', '.join(reprargs),)
+ if abi is not None:
+ replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
+ self.c_name_with_marker = (
+ self.result.c_name_with_marker.replace('&', replace_with))
+
+
+class RawFunctionType(BaseFunctionType):
+ # Corresponds to a C type like 'int(int)', which is the C type of
+ # a function, but not a pointer-to-function. The backend has no
+ # notion of such a type; it's used temporarily by parsing.
+ _base_pattern = '(&)(%s)'
+ is_raw_function = True
+
+ def build_backend_type(self, ffi, finishlist):
+ raise CDefError("cannot render the type %r: it is a function "
+ "type, not a pointer-to-function type" % (self,))
+
+ def as_function_pointer(self):
+ return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
+
+
+class FunctionPtrType(BaseFunctionType):
+ _base_pattern = '(*&)(%s)'
+
+ def build_backend_type(self, ffi, finishlist):
+ result = self.result.get_cached_btype(ffi, finishlist)
+ args = []
+ for tp in self.args:
+ args.append(tp.get_cached_btype(ffi, finishlist))
+ abi_args = ()
+ if self.abi == "__stdcall":
+ if not self.ellipsis: # __stdcall ignored for variadic funcs
+ try:
+ abi_args = (ffi._backend.FFI_STDCALL,)
+ except AttributeError:
+ pass
+ return global_cache(self, ffi, 'new_function_type',
+ tuple(args), result, self.ellipsis, *abi_args)
+
+ def as_raw_function(self):
+ return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
+
+
+class PointerType(BaseType):
+ _attrs_ = ('totype', 'quals')
+
+ def __init__(self, totype, quals=0):
+ self.totype = totype
+ self.quals = quals
+ extra = qualify(quals, " *&")
+ if totype.is_array_type:
+ extra = "(%s)" % (extra.lstrip(),)
+ self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
+
+ def build_backend_type(self, ffi, finishlist):
+ BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
+ return global_cache(self, ffi, 'new_pointer_type', BItem)
+
+voidp_type = PointerType(void_type)
+
+def ConstPointerType(totype):
+ return PointerType(totype, Q_CONST)
+
+const_voidp_type = ConstPointerType(void_type)
+
+
+class NamedPointerType(PointerType):
+ _attrs_ = ('totype', 'name')
+
+ def __init__(self, totype, name, quals=0):
+ PointerType.__init__(self, totype, quals)
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+
+class ArrayType(BaseType):
+ _attrs_ = ('item', 'length')
+ is_array_type = True
+
+ def __init__(self, item, length):
+ self.item = item
+ self.length = length
+ #
+ if length is None:
+ brackets = '&[]'
+ elif length == '...':
+ brackets = '&[/*...*/]'
+ else:
+ brackets = '&[%s]' % length
+ self.c_name_with_marker = (
+ self.item.c_name_with_marker.replace('&', brackets))
+
+ def length_is_unknown(self):
+ return isinstance(self.length, str)
+
+ def resolve_length(self, newlength):
+ return ArrayType(self.item, newlength)
+
+ def build_backend_type(self, ffi, finishlist):
+ if self.length_is_unknown():
+ raise CDefError("cannot render the type %r: unknown length" %
+ (self,))
+ self.item.get_cached_btype(ffi, finishlist) # force the item BType
+ BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
+ return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
+
+char_array_type = ArrayType(PrimitiveType('char'), None)
+
+
+class StructOrUnionOrEnum(BaseTypeByIdentity):
+ _attrs_ = ('name',)
+ forcename = None
+
+ def build_c_name_with_marker(self):
+ name = self.forcename or '%s %s' % (self.kind, self.name)
+ self.c_name_with_marker = name + '&'
+
+ def force_the_name(self, forcename):
+ self.forcename = forcename
+ self.build_c_name_with_marker()
+
+ def get_official_name(self):
+ assert self.c_name_with_marker.endswith('&')
+ return self.c_name_with_marker[:-1]
+
+
+class StructOrUnion(StructOrUnionOrEnum):
+ fixedlayout = None
+ completed = 0
+ partial = False
+ packed = 0
+
+ def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
+ self.name = name
+ self.fldnames = fldnames
+ self.fldtypes = fldtypes
+ self.fldbitsize = fldbitsize
+ self.fldquals = fldquals
+ self.build_c_name_with_marker()
+
+ def anonymous_struct_fields(self):
+ if self.fldtypes is not None:
+ for name, type in zip(self.fldnames, self.fldtypes):
+ if name == '' and isinstance(type, StructOrUnion):
+ yield type
+
+ def enumfields(self, expand_anonymous_struct_union=True):
+ fldquals = self.fldquals
+ if fldquals is None:
+ fldquals = (0,) * len(self.fldnames)
+ for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
+ self.fldbitsize, fldquals):
+ if (name == '' and isinstance(type, StructOrUnion)
+ and expand_anonymous_struct_union):
+ # nested anonymous struct/union
+ for result in type.enumfields():
+ yield result
+ else:
+ yield (name, type, bitsize, quals)
+
+ def force_flatten(self):
+ # force the struct or union to have a declaration that lists
+ # directly all fields returned by enumfields(), flattening
+ # nested anonymous structs/unions.
+ names = []
+ types = []
+ bitsizes = []
+ fldquals = []
+ for name, type, bitsize, quals in self.enumfields():
+ names.append(name)
+ types.append(type)
+ bitsizes.append(bitsize)
+ fldquals.append(quals)
+ self.fldnames = tuple(names)
+ self.fldtypes = tuple(types)
+ self.fldbitsize = tuple(bitsizes)
+ self.fldquals = tuple(fldquals)
+
+ def get_cached_btype(self, ffi, finishlist, can_delay=False):
+ BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
+ can_delay)
+ if not can_delay:
+ self.finish_backend_type(ffi, finishlist)
+ return BType
+
+ def finish_backend_type(self, ffi, finishlist):
+ if self.completed:
+ if self.completed != 2:
+ raise NotImplementedError("recursive structure declaration "
+ "for '%s'" % (self.name,))
+ return
+ BType = ffi._cached_btypes[self]
+ #
+ self.completed = 1
+ #
+ if self.fldtypes is None:
+ pass # not completing it: it's an opaque struct
+ #
+ elif self.fixedlayout is None:
+ fldtypes = [tp.get_cached_btype(ffi, finishlist)
+ for tp in self.fldtypes]
+ lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
+ extra_flags = ()
+ if self.packed:
+ if self.packed == 1:
+ extra_flags = (8,) # SF_PACKED
+ else:
+ extra_flags = (0, self.packed)
+ ffi._backend.complete_struct_or_union(BType, lst, self,
+ -1, -1, *extra_flags)
+ #
+ else:
+ fldtypes = []
+ fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
+ for i in range(len(self.fldnames)):
+ fsize = fieldsize[i]
+ ftype = self.fldtypes[i]
+ #
+ if isinstance(ftype, ArrayType) and ftype.length_is_unknown():
+ # fix the length to match the total size
+ BItemType = ftype.item.get_cached_btype(ffi, finishlist)
+ nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
+ if nrest != 0:
+ self._verification_error(
+ "field '%s.%s' has a bogus size?" % (
+ self.name, self.fldnames[i] or '{}'))
+ ftype = ftype.resolve_length(nlen)
+ self.fldtypes = (self.fldtypes[:i] + (ftype,) +
+ self.fldtypes[i+1:])
+ #
+ BFieldType = ftype.get_cached_btype(ffi, finishlist)
+ if isinstance(ftype, ArrayType) and ftype.length is None:
+ assert fsize == 0
+ else:
+ bitemsize = ffi.sizeof(BFieldType)
+ if bitemsize != fsize:
+ self._verification_error(
+ "field '%s.%s' is declared as %d bytes, but is "
+ "really %d bytes" % (self.name,
+ self.fldnames[i] or '{}',
+ bitemsize, fsize))
+ fldtypes.append(BFieldType)
+ #
+ lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
+ ffi._backend.complete_struct_or_union(BType, lst, self,
+ totalsize, totalalignment)
+ self.completed = 2
+
+ def _verification_error(self, msg):
+ raise VerificationError(msg)
+
+ def check_not_partial(self):
+ if self.partial and self.fixedlayout is None:
+ raise VerificationMissing(self._get_c_name())
+
+ def build_backend_type(self, ffi, finishlist):
+ self.check_not_partial()
+ finishlist.append(self)
+ #
+ return global_cache(self, ffi, 'new_%s_type' % self.kind,
+ self.get_official_name(), key=self)
+
+
+class StructType(StructOrUnion):
+ kind = 'struct'
+
+
+class UnionType(StructOrUnion):
+ kind = 'union'
+
+
+class EnumType(StructOrUnionOrEnum):
+ kind = 'enum'
+ partial = False
+ partial_resolved = False
+
+ def __init__(self, name, enumerators, enumvalues, baseinttype=None):
+ self.name = name
+ self.enumerators = enumerators
+ self.enumvalues = enumvalues
+ self.baseinttype = baseinttype
+ self.build_c_name_with_marker()
+
+ def force_the_name(self, forcename):
+ StructOrUnionOrEnum.force_the_name(self, forcename)
+ if self.forcename is None:
+ name = self.get_official_name()
+ self.forcename = '$' + name.replace(' ', '_')
+
+ def check_not_partial(self):
+ if self.partial and not self.partial_resolved:
+ raise VerificationMissing(self._get_c_name())
+
+ def build_backend_type(self, ffi, finishlist):
+ self.check_not_partial()
+ base_btype = self.build_baseinttype(ffi, finishlist)
+ return global_cache(self, ffi, 'new_enum_type',
+ self.get_official_name(),
+ self.enumerators, self.enumvalues,
+ base_btype, key=self)
+
+ def build_baseinttype(self, ffi, finishlist):
+ if self.baseinttype is not None:
+ return self.baseinttype.get_cached_btype(ffi, finishlist)
+ #
+ if self.enumvalues:
+ smallest_value = min(self.enumvalues)
+ largest_value = max(self.enumvalues)
+ else:
+ import warnings
+ try:
+ # XXX! The goal is to ensure that the warnings.warn()
+ # will not suppress the warning. We want to get it
+ # several times if we reach this point several times.
+ __warningregistry__.clear()
+ except NameError:
+ pass
+ warnings.warn("%r has no values explicitly defined; "
+ "guessing that it is equivalent to 'unsigned int'"
+ % self._get_c_name())
+ smallest_value = largest_value = 0
+ if smallest_value < 0: # needs a signed type
+ sign = 1
+ candidate1 = PrimitiveType("int")
+ candidate2 = PrimitiveType("long")
+ else:
+ sign = 0
+ candidate1 = PrimitiveType("unsigned int")
+ candidate2 = PrimitiveType("unsigned long")
+ btype1 = candidate1.get_cached_btype(ffi, finishlist)
+ btype2 = candidate2.get_cached_btype(ffi, finishlist)
+ size1 = ffi.sizeof(btype1)
+ size2 = ffi.sizeof(btype2)
+ if (smallest_value >= ((-1) << (8*size1-1)) and
+ largest_value < (1 << (8*size1-sign))):
+ return btype1
+ if (smallest_value >= ((-1) << (8*size2-1)) and
+ largest_value < (1 << (8*size2-sign))):
+ return btype2
+ raise CDefError("%s values don't all fit into either 'long' "
+ "or 'unsigned long'" % self._get_c_name())
+
+def unknown_type(name, structname=None):
+ if structname is None:
+ structname = '$%s' % name
+ tp = StructType(structname, None, None, None)
+ tp.force_the_name(name)
+ tp.origin = "unknown_type"
+ return tp
+
+def unknown_ptr_type(name, structname=None):
+ if structname is None:
+ structname = '$$%s' % name
+ tp = StructType(structname, None, None, None)
+ return NamedPointerType(tp, name)
+
+
+global_lock = allocate_lock()
+_typecache_cffi_backend = weakref.WeakValueDictionary()
+
+def get_typecache(backend):
+ # returns _typecache_cffi_backend if backend is the _cffi_backend
+ # module, or type(backend).__typecache if backend is an instance of
+ # CTypesBackend (or some FakeBackend class during tests)
+ if isinstance(backend, types.ModuleType):
+ return _typecache_cffi_backend
+ with global_lock:
+ if not hasattr(type(backend), '__typecache'):
+ type(backend).__typecache = weakref.WeakValueDictionary()
+ return type(backend).__typecache
+
+def global_cache(srctype, ffi, funcname, *args, **kwds):
+ key = kwds.pop('key', (funcname, args))
+ assert not kwds
+ try:
+ return ffi._typecache[key]
+ except KeyError:
+ pass
+ try:
+ res = getattr(ffi._backend, funcname)(*args)
+ except NotImplementedError as e:
+ raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
+ # note that setdefault() on WeakValueDictionary is not atomic
+ # and contains a rare bug (http://bugs.python.org/issue19542);
+ # we have to use a lock and do it ourselves
+ cache = ffi._typecache
+ with global_lock:
+ res1 = cache.get(key)
+ if res1 is None:
+ cache[key] = res
+ return res
+ else:
+ return res1
+
+def pointer_cache(ffi, BType):
+ return global_cache('?', ffi, 'new_pointer_type', BType)
+
+def attach_exception_info(e, name):
+ if e.args and type(e.args[0]) is str:
+ e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
diff --git a/venv/Lib/site-packages/cffi/parse_c_type.h b/venv/Lib/site-packages/cffi/parse_c_type.h
new file mode 100644
index 000000000..84e4ef856
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/parse_c_type.h
@@ -0,0 +1,181 @@
+
+/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
+ beginning of C sources generated by CFFI's ffi.set_source(). */
+
+typedef void *_cffi_opcode_t;
+
+#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
+#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
+#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
+
+#define _CFFI_OP_PRIMITIVE 1
+#define _CFFI_OP_POINTER 3
+#define _CFFI_OP_ARRAY 5
+#define _CFFI_OP_OPEN_ARRAY 7
+#define _CFFI_OP_STRUCT_UNION 9
+#define _CFFI_OP_ENUM 11
+#define _CFFI_OP_FUNCTION 13
+#define _CFFI_OP_FUNCTION_END 15
+#define _CFFI_OP_NOOP 17
+#define _CFFI_OP_BITFIELD 19
+#define _CFFI_OP_TYPENAME 21
+#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
+#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
+#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
+#define _CFFI_OP_CONSTANT 29
+#define _CFFI_OP_CONSTANT_INT 31
+#define _CFFI_OP_GLOBAL_VAR 33
+#define _CFFI_OP_DLOPEN_FUNC 35
+#define _CFFI_OP_DLOPEN_CONST 37
+#define _CFFI_OP_GLOBAL_VAR_F 39
+#define _CFFI_OP_EXTERN_PYTHON 41
+
+#define _CFFI_PRIM_VOID 0
+#define _CFFI_PRIM_BOOL 1
+#define _CFFI_PRIM_CHAR 2
+#define _CFFI_PRIM_SCHAR 3
+#define _CFFI_PRIM_UCHAR 4
+#define _CFFI_PRIM_SHORT 5
+#define _CFFI_PRIM_USHORT 6
+#define _CFFI_PRIM_INT 7
+#define _CFFI_PRIM_UINT 8
+#define _CFFI_PRIM_LONG 9
+#define _CFFI_PRIM_ULONG 10
+#define _CFFI_PRIM_LONGLONG 11
+#define _CFFI_PRIM_ULONGLONG 12
+#define _CFFI_PRIM_FLOAT 13
+#define _CFFI_PRIM_DOUBLE 14
+#define _CFFI_PRIM_LONGDOUBLE 15
+
+#define _CFFI_PRIM_WCHAR 16
+#define _CFFI_PRIM_INT8 17
+#define _CFFI_PRIM_UINT8 18
+#define _CFFI_PRIM_INT16 19
+#define _CFFI_PRIM_UINT16 20
+#define _CFFI_PRIM_INT32 21
+#define _CFFI_PRIM_UINT32 22
+#define _CFFI_PRIM_INT64 23
+#define _CFFI_PRIM_UINT64 24
+#define _CFFI_PRIM_INTPTR 25
+#define _CFFI_PRIM_UINTPTR 26
+#define _CFFI_PRIM_PTRDIFF 27
+#define _CFFI_PRIM_SIZE 28
+#define _CFFI_PRIM_SSIZE 29
+#define _CFFI_PRIM_INT_LEAST8 30
+#define _CFFI_PRIM_UINT_LEAST8 31
+#define _CFFI_PRIM_INT_LEAST16 32
+#define _CFFI_PRIM_UINT_LEAST16 33
+#define _CFFI_PRIM_INT_LEAST32 34
+#define _CFFI_PRIM_UINT_LEAST32 35
+#define _CFFI_PRIM_INT_LEAST64 36
+#define _CFFI_PRIM_UINT_LEAST64 37
+#define _CFFI_PRIM_INT_FAST8 38
+#define _CFFI_PRIM_UINT_FAST8 39
+#define _CFFI_PRIM_INT_FAST16 40
+#define _CFFI_PRIM_UINT_FAST16 41
+#define _CFFI_PRIM_INT_FAST32 42
+#define _CFFI_PRIM_UINT_FAST32 43
+#define _CFFI_PRIM_INT_FAST64 44
+#define _CFFI_PRIM_UINT_FAST64 45
+#define _CFFI_PRIM_INTMAX 46
+#define _CFFI_PRIM_UINTMAX 47
+#define _CFFI_PRIM_FLOATCOMPLEX 48
+#define _CFFI_PRIM_DOUBLECOMPLEX 49
+#define _CFFI_PRIM_CHAR16 50
+#define _CFFI_PRIM_CHAR32 51
+
+#define _CFFI__NUM_PRIM 52
+#define _CFFI__UNKNOWN_PRIM (-1)
+#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
+#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
+
+#define _CFFI__IO_FILE_STRUCT (-1)
+
+
+struct _cffi_global_s {
+ const char *name;
+ void *address;
+ _cffi_opcode_t type_op;
+ void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
+ // OP_CPYTHON_BLTN_*: addr of direct function
+};
+
+struct _cffi_getconst_s {
+ unsigned long long value;
+ const struct _cffi_type_context_s *ctx;
+ int gindex;
+};
+
+struct _cffi_struct_union_s {
+ const char *name;
+ int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
+ int flags; // _CFFI_F_* flags below
+ size_t size;
+ int alignment;
+ int first_field_index; // -> _cffi_fields array
+ int num_fields;
+};
+#define _CFFI_F_UNION 0x01 // is a union, not a struct
+#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
+ // "standard layout" or if some are missing
+#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
+#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
+#define _CFFI_F_OPAQUE 0x10 // opaque
+
+struct _cffi_field_s {
+ const char *name;
+ size_t field_offset;
+ size_t field_size;
+ _cffi_opcode_t field_type_op;
+};
+
+struct _cffi_enum_s {
+ const char *name;
+ int type_index; // -> _cffi_types, on a OP_ENUM
+ int type_prim; // _CFFI_PRIM_xxx
+ const char *enumerators; // comma-delimited string
+};
+
+struct _cffi_typename_s {
+ const char *name;
+ int type_index; /* if opaque, points to a possibly artificial
+ OP_STRUCT which is itself opaque */
+};
+
+struct _cffi_type_context_s {
+ _cffi_opcode_t *types;
+ const struct _cffi_global_s *globals;
+ const struct _cffi_field_s *fields;
+ const struct _cffi_struct_union_s *struct_unions;
+ const struct _cffi_enum_s *enums;
+ const struct _cffi_typename_s *typenames;
+ int num_globals;
+ int num_struct_unions;
+ int num_enums;
+ int num_typenames;
+ const char *const *includes;
+ int num_types;
+ int flags; /* future extension */
+};
+
+struct _cffi_parse_info_s {
+ const struct _cffi_type_context_s *ctx;
+ _cffi_opcode_t *output;
+ unsigned int output_size;
+ size_t error_location;
+ const char *error_message;
+};
+
+struct _cffi_externpy_s {
+ const char *name;
+ size_t size_of_result;
+ void *reserved1, *reserved2;
+};
+
+#ifdef _CFFI_INTERNAL
+static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
+static int search_in_globals(const struct _cffi_type_context_s *ctx,
+ const char *search, size_t search_len);
+static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
+ const char *search, size_t search_len);
+#endif
diff --git a/venv/Lib/site-packages/cffi/pkgconfig.py b/venv/Lib/site-packages/cffi/pkgconfig.py
new file mode 100644
index 000000000..5c93f15a6
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/pkgconfig.py
@@ -0,0 +1,121 @@
+# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
+import sys, os, subprocess
+
+from .error import PkgConfigError
+
+
+def merge_flags(cfg1, cfg2):
+ """Merge values from cffi config flags cfg2 to cf1
+
+ Example:
+ merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
+ {"libraries": ["one", "two"]}
+ """
+ for key, value in cfg2.items():
+ if key not in cfg1:
+ cfg1[key] = value
+ else:
+ if not isinstance(cfg1[key], list):
+ raise TypeError("cfg1[%r] should be a list of strings" % (key,))
+ if not isinstance(value, list):
+ raise TypeError("cfg2[%r] should be a list of strings" % (key,))
+ cfg1[key].extend(value)
+ return cfg1
+
+
+def call(libname, flag, encoding=sys.getfilesystemencoding()):
+ """Calls pkg-config and returns the output if found
+ """
+ a = ["pkg-config", "--print-errors"]
+ a.append(flag)
+ a.append(libname)
+ try:
+ pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except EnvironmentError as e:
+ raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
+
+ bout, berr = pc.communicate()
+ if pc.returncode != 0:
+ try:
+ berr = berr.decode(encoding)
+ except Exception:
+ pass
+ raise PkgConfigError(berr.strip())
+
+ if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
+ try:
+ bout = bout.decode(encoding)
+ except UnicodeDecodeError:
+ raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
+ "be decoded with encoding %r:\n%r" %
+ (flag, libname, encoding, bout))
+
+ if os.altsep != '\\' and '\\' in bout:
+ raise PkgConfigError("pkg-config %s %s returned an unsupported "
+ "backslash-escaped output:\n%r" %
+ (flag, libname, bout))
+ return bout
+
+
+def flags_from_pkgconfig(libs):
+ r"""Return compiler line flags for FFI.set_source based on pkg-config output
+
+ Usage
+ ...
+ ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
+
+ If pkg-config is installed on build machine, then arguments include_dirs,
+ library_dirs, libraries, define_macros, extra_compile_args and
+ extra_link_args are extended with an output of pkg-config for libfoo and
+ libbar.
+
+ Raises PkgConfigError in case the pkg-config call fails.
+ """
+
+ def get_include_dirs(string):
+ return [x[2:] for x in string.split() if x.startswith("-I")]
+
+ def get_library_dirs(string):
+ return [x[2:] for x in string.split() if x.startswith("-L")]
+
+ def get_libraries(string):
+ return [x[2:] for x in string.split() if x.startswith("-l")]
+
+ # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
+ def get_macros(string):
+ def _macro(x):
+ x = x[2:] # drop "-D"
+ if '=' in x:
+ return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
+ else:
+ return (x, None) # "-Dfoo" => ("foo", None)
+ return [_macro(x) for x in string.split() if x.startswith("-D")]
+
+ def get_other_cflags(string):
+ return [x for x in string.split() if not x.startswith("-I") and
+ not x.startswith("-D")]
+
+ def get_other_libs(string):
+ return [x for x in string.split() if not x.startswith("-L") and
+ not x.startswith("-l")]
+
+ # return kwargs for given libname
+ def kwargs(libname):
+ fse = sys.getfilesystemencoding()
+ all_cflags = call(libname, "--cflags")
+ all_libs = call(libname, "--libs")
+ return {
+ "include_dirs": get_include_dirs(all_cflags),
+ "library_dirs": get_library_dirs(all_libs),
+ "libraries": get_libraries(all_libs),
+ "define_macros": get_macros(all_cflags),
+ "extra_compile_args": get_other_cflags(all_cflags),
+ "extra_link_args": get_other_libs(all_libs),
+ }
+
+ # merge all arguments together
+ ret = {}
+ for libname in libs:
+ lib_flags = kwargs(libname)
+ merge_flags(ret, lib_flags)
+ return ret
diff --git a/venv/Lib/site-packages/cffi/recompiler.py b/venv/Lib/site-packages/cffi/recompiler.py
new file mode 100644
index 000000000..1aeae5b92
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/recompiler.py
@@ -0,0 +1,1571 @@
+import os, sys, io
+from . import ffiplatform, model
+from .error import VerificationError
+from .cffi_opcode import *
+
+VERSION_BASE = 0x2601
+VERSION_EMBEDDED = 0x2701
+VERSION_CHAR16CHAR32 = 0x2801
+
+USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or
+ sys.version_info >= (3, 5))
+
+
+class GlobalExpr:
+ def __init__(self, name, address, type_op, size=0, check_value=0):
+ self.name = name
+ self.address = address
+ self.type_op = type_op
+ self.size = size
+ self.check_value = check_value
+
+ def as_c_expr(self):
+ return ' { "%s", (void *)%s, %s, (void *)%s },' % (
+ self.name, self.address, self.type_op.as_c_expr(), self.size)
+
+ def as_python_expr(self):
+ return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name,
+ self.check_value)
+
+class FieldExpr:
+ def __init__(self, name, field_offset, field_size, fbitsize, field_type_op):
+ self.name = name
+ self.field_offset = field_offset
+ self.field_size = field_size
+ self.fbitsize = fbitsize
+ self.field_type_op = field_type_op
+
+ def as_c_expr(self):
+ spaces = " " * len(self.name)
+ return (' { "%s", %s,\n' % (self.name, self.field_offset) +
+ ' %s %s,\n' % (spaces, self.field_size) +
+ ' %s %s },' % (spaces, self.field_type_op.as_c_expr()))
+
+ def as_python_expr(self):
+ raise NotImplementedError
+
+ def as_field_python_expr(self):
+ if self.field_type_op.op == OP_NOOP:
+ size_expr = ''
+ elif self.field_type_op.op == OP_BITFIELD:
+ size_expr = format_four_bytes(self.fbitsize)
+ else:
+ raise NotImplementedError
+ return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(),
+ size_expr,
+ self.name)
+
+class StructUnionExpr:
+ def __init__(self, name, type_index, flags, size, alignment, comment,
+ first_field_index, c_fields):
+ self.name = name
+ self.type_index = type_index
+ self.flags = flags
+ self.size = size
+ self.alignment = alignment
+ self.comment = comment
+ self.first_field_index = first_field_index
+ self.c_fields = c_fields
+
+ def as_c_expr(self):
+ return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags)
+ + '\n %s, %s, ' % (self.size, self.alignment)
+ + '%d, %d ' % (self.first_field_index, len(self.c_fields))
+ + ('/* %s */ ' % self.comment if self.comment else '')
+ + '},')
+
+ def as_python_expr(self):
+ flags = eval(self.flags, G_FLAGS)
+ fields_expr = [c_field.as_field_python_expr()
+ for c_field in self.c_fields]
+ return "(b'%s%s%s',%s)" % (
+ format_four_bytes(self.type_index),
+ format_four_bytes(flags),
+ self.name,
+ ','.join(fields_expr))
+
+class EnumExpr:
+ def __init__(self, name, type_index, size, signed, allenums):
+ self.name = name
+ self.type_index = type_index
+ self.size = size
+ self.signed = signed
+ self.allenums = allenums
+
+ def as_c_expr(self):
+ return (' { "%s", %d, _cffi_prim_int(%s, %s),\n'
+ ' "%s" },' % (self.name, self.type_index,
+ self.size, self.signed, self.allenums))
+
+ def as_python_expr(self):
+ prim_index = {
+ (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8,
+ (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16,
+ (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32,
+ (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64,
+ }[self.size, self.signed]
+ return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index),
+ format_four_bytes(prim_index),
+ self.name, self.allenums)
+
+class TypenameExpr:
+ def __init__(self, name, type_index):
+ self.name = name
+ self.type_index = type_index
+
+ def as_c_expr(self):
+ return ' { "%s", %d },' % (self.name, self.type_index)
+
+ def as_python_expr(self):
+ return "b'%s%s'" % (format_four_bytes(self.type_index), self.name)
+
+
+# ____________________________________________________________
+
+
+class Recompiler:
+ _num_externpy = 0
+
+ def __init__(self, ffi, module_name, target_is_python=False):
+ self.ffi = ffi
+ self.module_name = module_name
+ self.target_is_python = target_is_python
+ self._version = VERSION_BASE
+
+ def needs_version(self, ver):
+ self._version = max(self._version, ver)
+
+ def collect_type_table(self):
+ self._typesdict = {}
+ self._generate("collecttype")
+ #
+ all_decls = sorted(self._typesdict, key=str)
+ #
+ # prepare all FUNCTION bytecode sequences first
+ self.cffi_types = []
+ for tp in all_decls:
+ if tp.is_raw_function:
+ assert self._typesdict[tp] is None
+ self._typesdict[tp] = len(self.cffi_types)
+ self.cffi_types.append(tp) # placeholder
+ for tp1 in tp.args:
+ assert isinstance(tp1, (model.VoidType,
+ model.BasePrimitiveType,
+ model.PointerType,
+ model.StructOrUnionOrEnum,
+ model.FunctionPtrType))
+ if self._typesdict[tp1] is None:
+ self._typesdict[tp1] = len(self.cffi_types)
+ self.cffi_types.append(tp1) # placeholder
+ self.cffi_types.append('END') # placeholder
+ #
+ # prepare all OTHER bytecode sequences
+ for tp in all_decls:
+ if not tp.is_raw_function and self._typesdict[tp] is None:
+ self._typesdict[tp] = len(self.cffi_types)
+ self.cffi_types.append(tp) # placeholder
+ if tp.is_array_type and tp.length is not None:
+ self.cffi_types.append('LEN') # placeholder
+ assert None not in self._typesdict.values()
+ #
+ # collect all structs and unions and enums
+ self._struct_unions = {}
+ self._enums = {}
+ for tp in all_decls:
+ if isinstance(tp, model.StructOrUnion):
+ self._struct_unions[tp] = None
+ elif isinstance(tp, model.EnumType):
+ self._enums[tp] = None
+ for i, tp in enumerate(sorted(self._struct_unions,
+ key=lambda tp: tp.name)):
+ self._struct_unions[tp] = i
+ for i, tp in enumerate(sorted(self._enums,
+ key=lambda tp: tp.name)):
+ self._enums[tp] = i
+ #
+ # emit all bytecode sequences now
+ for tp in all_decls:
+ method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__)
+ method(tp, self._typesdict[tp])
+ #
+ # consistency check
+ for op in self.cffi_types:
+ assert isinstance(op, CffiOp)
+ self.cffi_types = tuple(self.cffi_types) # don't change any more
+
+ def _do_collect_type(self, tp):
+ if not isinstance(tp, model.BaseTypeByIdentity):
+ if isinstance(tp, tuple):
+ for x in tp:
+ self._do_collect_type(x)
+ return
+ if tp not in self._typesdict:
+ self._typesdict[tp] = None
+ if isinstance(tp, model.FunctionPtrType):
+ self._do_collect_type(tp.as_raw_function())
+ elif isinstance(tp, model.StructOrUnion):
+ if tp.fldtypes is not None and (
+ tp not in self.ffi._parser._included_declarations):
+ for name1, tp1, _, _ in tp.enumfields():
+ self._do_collect_type(self._field_type(tp, name1, tp1))
+ else:
+ for _, x in tp._get_items():
+ self._do_collect_type(x)
+
+ def _generate(self, step_name):
+ lst = self.ffi._parser._declarations.items()
+ for name, (tp, quals) in sorted(lst):
+ kind, realname = name.split(' ', 1)
+ try:
+ method = getattr(self, '_generate_cpy_%s_%s' % (kind,
+ step_name))
+ except AttributeError:
+ raise VerificationError(
+ "not implemented in recompile(): %r" % name)
+ try:
+ self._current_quals = quals
+ method(tp, realname)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ # ----------
+
+ ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"]
+
+ def collect_step_tables(self):
+ # collect the declarations for '_cffi_globals', '_cffi_typenames', etc.
+ self._lsts = {}
+ for step_name in self.ALL_STEPS:
+ self._lsts[step_name] = []
+ self._seen_struct_unions = set()
+ self._generate("ctx")
+ self._add_missing_struct_unions()
+ #
+ for step_name in self.ALL_STEPS:
+ lst = self._lsts[step_name]
+ if step_name != "field":
+ lst.sort(key=lambda entry: entry.name)
+ self._lsts[step_name] = tuple(lst) # don't change any more
+ #
+ # check for a possible internal inconsistency: _cffi_struct_unions
+ # should have been generated with exactly self._struct_unions
+ lst = self._lsts["struct_union"]
+ for tp, i in self._struct_unions.items():
+ assert i < len(lst)
+ assert lst[i].name == tp.name
+ assert len(lst) == len(self._struct_unions)
+ # same with enums
+ lst = self._lsts["enum"]
+ for tp, i in self._enums.items():
+ assert i < len(lst)
+ assert lst[i].name == tp.name
+ assert len(lst) == len(self._enums)
+
+ # ----------
+
+ def _prnt(self, what=''):
+ self._f.write(what + '\n')
+
+ def write_source_to_f(self, f, preamble):
+ if self.target_is_python:
+ assert preamble is None
+ self.write_py_source_to_f(f)
+ else:
+ assert preamble is not None
+ self.write_c_source_to_f(f, preamble)
+
+ def _rel_readlines(self, filename):
+ g = open(os.path.join(os.path.dirname(__file__), filename), 'r')
+ lines = g.readlines()
+ g.close()
+ return lines
+
+ def write_c_source_to_f(self, f, preamble):
+ self._f = f
+ prnt = self._prnt
+ if self.ffi._embedding is not None:
+ prnt('#define _CFFI_USE_EMBEDDING')
+ if not USE_LIMITED_API:
+ prnt('#define _CFFI_NO_LIMITED_API')
+ #
+ # first the '#include' (actually done by inlining the file's content)
+ lines = self._rel_readlines('_cffi_include.h')
+ i = lines.index('#include "parse_c_type.h"\n')
+ lines[i:i+1] = self._rel_readlines('parse_c_type.h')
+ prnt(''.join(lines))
+ #
+ # if we have ffi._embedding != None, we give it here as a macro
+ # and include an extra file
+ base_module_name = self.module_name.split('.')[-1]
+ if self.ffi._embedding is not None:
+ prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,))
+ prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
+ self._print_string_literal_in_array(self.ffi._embedding)
+ prnt('0 };')
+ prnt('#ifdef PYPY_VERSION')
+ prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % (
+ base_module_name,))
+ prnt('#elif PY_MAJOR_VERSION >= 3')
+ prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % (
+ base_module_name,))
+ prnt('#else')
+ prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % (
+ base_module_name,))
+ prnt('#endif')
+ lines = self._rel_readlines('_embedding.h')
+ i = lines.index('#include "_cffi_errors.h"\n')
+ lines[i:i+1] = self._rel_readlines('_cffi_errors.h')
+ prnt(''.join(lines))
+ self.needs_version(VERSION_EMBEDDED)
+ #
+ # then paste the C source given by the user, verbatim.
+ prnt('/************************************************************/')
+ prnt()
+ prnt(preamble)
+ prnt()
+ prnt('/************************************************************/')
+ prnt()
+ #
+ # the declaration of '_cffi_types'
+ prnt('static void *_cffi_types[] = {')
+ typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
+ for i, op in enumerate(self.cffi_types):
+ comment = ''
+ if i in typeindex2type:
+ comment = ' // ' + typeindex2type[i]._get_c_name()
+ prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment))
+ if not self.cffi_types:
+ prnt(' 0')
+ prnt('};')
+ prnt()
+ #
+ # call generate_cpy_xxx_decl(), for every xxx found from
+ # ffi._parser._declarations. This generates all the functions.
+ self._seen_constants = set()
+ self._generate("decl")
+ #
+ # the declaration of '_cffi_globals' and '_cffi_typenames'
+ nums = {}
+ for step_name in self.ALL_STEPS:
+ lst = self._lsts[step_name]
+ nums[step_name] = len(lst)
+ if nums[step_name] > 0:
+ prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % (
+ step_name, step_name))
+ for entry in lst:
+ prnt(entry.as_c_expr())
+ prnt('};')
+ prnt()
+ #
+ # the declaration of '_cffi_includes'
+ if self.ffi._included_ffis:
+ prnt('static const char * const _cffi_includes[] = {')
+ for ffi_to_include in self.ffi._included_ffis:
+ try:
+ included_module_name, included_source = (
+ ffi_to_include._assigned_source[:2])
+ except AttributeError:
+ raise VerificationError(
+ "ffi object %r includes %r, but the latter has not "
+ "been prepared with set_source()" % (
+ self.ffi, ffi_to_include,))
+ if included_source is None:
+ raise VerificationError(
+ "not implemented yet: ffi.include() of a Python-based "
+ "ffi inside a C-based ffi")
+ prnt(' "%s",' % (included_module_name,))
+ prnt(' NULL')
+ prnt('};')
+ prnt()
+ #
+ # the declaration of '_cffi_type_context'
+ prnt('static const struct _cffi_type_context_s _cffi_type_context = {')
+ prnt(' _cffi_types,')
+ for step_name in self.ALL_STEPS:
+ if nums[step_name] > 0:
+ prnt(' _cffi_%ss,' % step_name)
+ else:
+ prnt(' NULL, /* no %ss */' % step_name)
+ for step_name in self.ALL_STEPS:
+ if step_name != "field":
+ prnt(' %d, /* num_%ss */' % (nums[step_name], step_name))
+ if self.ffi._included_ffis:
+ prnt(' _cffi_includes,')
+ else:
+ prnt(' NULL, /* no includes */')
+ prnt(' %d, /* num_types */' % (len(self.cffi_types),))
+ flags = 0
+ if self._num_externpy:
+ flags |= 1 # set to mean that we use extern "Python"
+ prnt(' %d, /* flags */' % flags)
+ prnt('};')
+ prnt()
+ #
+ # the init function
+ prnt('#ifdef __GNUC__')
+ prnt('# pragma GCC visibility push(default) /* for -fvisibility= */')
+ prnt('#endif')
+ prnt()
+ prnt('#ifdef PYPY_VERSION')
+ prnt('PyMODINIT_FUNC')
+ prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
+ prnt('{')
+ if self._num_externpy:
+ prnt(' if (((intptr_t)p[0]) >= 0x0A03) {')
+ prnt(' _cffi_call_python_org = '
+ '(void(*)(struct _cffi_externpy_s *, char *))p[1];')
+ prnt(' }')
+ prnt(' p[0] = (const void *)0x%x;' % self._version)
+ prnt(' p[1] = &_cffi_type_context;')
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt(' return NULL;')
+ prnt('#endif')
+ prnt('}')
+ # on Windows, distutils insists on putting init_cffi_xyz in
+ # 'export_symbols', so instead of fighting it, just give up and
+ # give it one
+ prnt('# ifdef _MSC_VER')
+ prnt(' PyMODINIT_FUNC')
+ prnt('# if PY_MAJOR_VERSION >= 3')
+ prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,))
+ prnt('# else')
+ prnt(' init%s(void) { }' % (base_module_name,))
+ prnt('# endif')
+ prnt('# endif')
+ prnt('#elif PY_MAJOR_VERSION >= 3')
+ prnt('PyMODINIT_FUNC')
+ prnt('PyInit_%s(void)' % (base_module_name,))
+ prnt('{')
+ prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
+ self.module_name, self._version))
+ prnt('}')
+ prnt('#else')
+ prnt('PyMODINIT_FUNC')
+ prnt('init%s(void)' % (base_module_name,))
+ prnt('{')
+ prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
+ self.module_name, self._version))
+ prnt('}')
+ prnt('#endif')
+ prnt()
+ prnt('#ifdef __GNUC__')
+ prnt('# pragma GCC visibility pop')
+ prnt('#endif')
+ self._version = None
+
+ def _to_py(self, x):
+ if isinstance(x, str):
+ return "b'%s'" % (x,)
+ if isinstance(x, (list, tuple)):
+ rep = [self._to_py(item) for item in x]
+ if len(rep) == 1:
+ rep.append('')
+ return "(%s)" % (','.join(rep),)
+ return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp.
+
+ def write_py_source_to_f(self, f):
+ self._f = f
+ prnt = self._prnt
+ #
+ # header
+ prnt("# auto-generated file")
+ prnt("import _cffi_backend")
+ #
+ # the 'import' of the included ffis
+ num_includes = len(self.ffi._included_ffis or ())
+ for i in range(num_includes):
+ ffi_to_include = self.ffi._included_ffis[i]
+ try:
+ included_module_name, included_source = (
+ ffi_to_include._assigned_source[:2])
+ except AttributeError:
+ raise VerificationError(
+ "ffi object %r includes %r, but the latter has not "
+ "been prepared with set_source()" % (
+ self.ffi, ffi_to_include,))
+ if included_source is not None:
+ raise VerificationError(
+ "not implemented yet: ffi.include() of a C-based "
+ "ffi inside a Python-based ffi")
+ prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
+ prnt()
+ prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
+ prnt(" _version = 0x%x," % (self._version,))
+ self._version = None
+ #
+ # the '_types' keyword argument
+ self.cffi_types = tuple(self.cffi_types) # don't change any more
+ types_lst = [op.as_python_bytes() for op in self.cffi_types]
+ prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),))
+ typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
+ #
+ # the keyword arguments from ALL_STEPS
+ for step_name in self.ALL_STEPS:
+ lst = self._lsts[step_name]
+ if len(lst) > 0 and step_name != "field":
+ prnt(' _%ss = %s,' % (step_name, self._to_py(lst)))
+ #
+ # the '_includes' keyword argument
+ if num_includes > 0:
+ prnt(' _includes = (%s,),' % (
+ ', '.join(['_ffi%d' % i for i in range(num_includes)]),))
+ #
+ # the footer
+ prnt(')')
+
+ # ----------
+
+ def _gettypenum(self, type):
+ # a KeyError here is a bug. please report it! :-)
+ return self._typesdict[type]
+
+ def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
+ extraarg = ''
+ if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
+ if tp.is_integer_type() and tp.name != '_Bool':
+ converter = '_cffi_to_c_int'
+ extraarg = ', %s' % tp.name
+ elif isinstance(tp, model.UnknownFloatType):
+ # don't check with is_float_type(): it may be a 'long
+ # double' here, and _cffi_to_c_double would loose precision
+ converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),)
+ else:
+ cname = tp.get_c_name('')
+ converter = '(%s)_cffi_to_c_%s' % (cname,
+ tp.name.replace(' ', '_'))
+ if cname in ('char16_t', 'char32_t'):
+ self.needs_version(VERSION_CHAR16CHAR32)
+ errvalue = '-1'
+ #
+ elif isinstance(tp, model.PointerType):
+ self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
+ tovar, errcode)
+ return
+ #
+ elif (isinstance(tp, model.StructOrUnionOrEnum) or
+ isinstance(tp, model.BasePrimitiveType)):
+ # a struct (not a struct pointer) as a function argument;
+ # or, a complex (the same code works)
+ self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
+ % (tovar, self._gettypenum(tp), fromvar))
+ self._prnt(' %s;' % errcode)
+ return
+ #
+ elif isinstance(tp, model.FunctionPtrType):
+ converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
+ extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
+ errvalue = 'NULL'
+ #
+ else:
+ raise NotImplementedError(tp)
+ #
+ self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
+ self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
+ tovar, tp.get_c_name(''), errvalue))
+ self._prnt(' %s;' % errcode)
+
+ def _extra_local_variables(self, tp, localvars, freelines):
+ if isinstance(tp, model.PointerType):
+ localvars.add('Py_ssize_t datasize')
+ localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
+ freelines.add('if (large_args_free != NULL)'
+ ' _cffi_free_array_arguments(large_args_free);')
+
+ def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
+ self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
+ self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
+ self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' if (datasize != 0) {')
+ self._prnt(' %s = ((size_t)datasize) <= 640 ? '
+ '(%s)alloca((size_t)datasize) : NULL;' % (
+ tovar, tp.get_c_name('')))
+ self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
+ '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' datasize, &large_args_free) < 0)')
+ self._prnt(' %s;' % errcode)
+ self._prnt(' }')
+
+ def _convert_expr_from_c(self, tp, var, context):
+ if isinstance(tp, model.BasePrimitiveType):
+ if tp.is_integer_type() and tp.name != '_Bool':
+ return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
+ elif isinstance(tp, model.UnknownFloatType):
+ return '_cffi_from_c_double(%s)' % (var,)
+ elif tp.name != 'long double' and not tp.is_complex_type():
+ cname = tp.name.replace(' ', '_')
+ if cname in ('char16_t', 'char32_t'):
+ self.needs_version(VERSION_CHAR16CHAR32)
+ return '_cffi_from_c_%s(%s)' % (cname, var)
+ else:
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.ArrayType):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(model.PointerType(tp.item)))
+ elif isinstance(tp, model.StructOrUnion):
+ if tp.fldnames is None:
+ raise TypeError("'%s' is used as %s, but is opaque" % (
+ tp._get_c_name(), context))
+ return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.EnumType):
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ else:
+ raise NotImplementedError(tp)
+
+ # ----------
+ # typedefs
+
+ def _typedef_type(self, tp, name):
+ return self._global_type(tp, "(*(%s *)0)" % (name,))
+
+ def _generate_cpy_typedef_collecttype(self, tp, name):
+ self._do_collect_type(self._typedef_type(tp, name))
+
+ def _generate_cpy_typedef_decl(self, tp, name):
+ pass
+
+ def _typedef_ctx(self, tp, name):
+ type_index = self._typesdict[tp]
+ self._lsts["typename"].append(TypenameExpr(name, type_index))
+
+ def _generate_cpy_typedef_ctx(self, tp, name):
+ tp = self._typedef_type(tp, name)
+ self._typedef_ctx(tp, name)
+ if getattr(tp, "origin", None) == "unknown_type":
+ self._struct_ctx(tp, tp.name, approxname=None)
+ elif isinstance(tp, model.NamedPointerType):
+ self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name,
+ named_ptr=tp)
+
+ # ----------
+ # function declarations
+
+ def _generate_cpy_function_collecttype(self, tp, name):
+ self._do_collect_type(tp.as_raw_function())
+ if tp.ellipsis and not self.target_is_python:
+ self._do_collect_type(tp)
+
+ def _generate_cpy_function_decl(self, tp, name):
+ assert not self.target_is_python
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ # cannot support vararg functions better than this: check for its
+ # exact type (including the fixed arguments), and build it as a
+ # constant function pointer (no CPython wrapper)
+ self._generate_cpy_constant_decl(tp, name)
+ return
+ prnt = self._prnt
+ numargs = len(tp.args)
+ if numargs == 0:
+ argname = 'noarg'
+ elif numargs == 1:
+ argname = 'arg0'
+ else:
+ argname = 'args'
+ #
+ # ------------------------------
+ # the 'd' version of the function, only for addressof(lib, 'func')
+ arguments = []
+ call_arguments = []
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ arguments.append(type.get_c_name(' x%d' % i, context))
+ call_arguments.append('x%d' % i)
+ repr_arguments = ', '.join(arguments)
+ repr_arguments = repr_arguments or 'void'
+ if tp.abi:
+ abi = tp.abi + ' '
+ else:
+ abi = ''
+ name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments)
+ prnt('static %s' % (tp.result.get_c_name(name_and_arguments),))
+ prnt('{')
+ call_arguments = ', '.join(call_arguments)
+ result_code = 'return '
+ if isinstance(tp.result, model.VoidType):
+ result_code = ''
+ prnt(' %s%s(%s);' % (result_code, name, call_arguments))
+ prnt('}')
+ #
+ prnt('#ifndef PYPY_VERSION') # ------------------------------
+ #
+ prnt('static PyObject *')
+ prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
+ prnt('{')
+ #
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ arg = type.get_c_name(' x%d' % i, context)
+ prnt(' %s;' % arg)
+ #
+ localvars = set()
+ freelines = set()
+ for type in tp.args:
+ self._extra_local_variables(type, localvars, freelines)
+ for decl in sorted(localvars):
+ prnt(' %s;' % (decl,))
+ #
+ if not isinstance(tp.result, model.VoidType):
+ result_code = 'result = '
+ context = 'result of %s' % name
+ result_decl = ' %s;' % tp.result.get_c_name(' result', context)
+ prnt(result_decl)
+ prnt(' PyObject *pyresult;')
+ else:
+ result_decl = None
+ result_code = ''
+ #
+ if len(tp.args) > 1:
+ rng = range(len(tp.args))
+ for i in rng:
+ prnt(' PyObject *arg%d;' % i)
+ prnt()
+ prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % (
+ name, len(rng), len(rng),
+ ', '.join(['&arg%d' % i for i in rng])))
+ prnt(' return NULL;')
+ prnt()
+ #
+ for i, type in enumerate(tp.args):
+ self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
+ 'return NULL')
+ prnt()
+ #
+ prnt(' Py_BEGIN_ALLOW_THREADS')
+ prnt(' _cffi_restore_errno();')
+ call_arguments = ['x%d' % i for i in range(len(tp.args))]
+ call_arguments = ', '.join(call_arguments)
+ prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
+ prnt(' _cffi_save_errno();')
+ prnt(' Py_END_ALLOW_THREADS')
+ prnt()
+ #
+ prnt(' (void)self; /* unused */')
+ if numargs == 0:
+ prnt(' (void)noarg; /* unused */')
+ if result_code:
+ prnt(' pyresult = %s;' %
+ self._convert_expr_from_c(tp.result, 'result', 'result type'))
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' return pyresult;')
+ else:
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' Py_INCREF(Py_None);')
+ prnt(' return Py_None;')
+ prnt('}')
+ #
+ prnt('#else') # ------------------------------
+ #
+ # the PyPy version: need to replace struct/union arguments with
+ # pointers, and if the result is a struct/union, insert a first
+ # arg that is a pointer to the result. We also do that for
+ # complex args and return type.
+ def need_indirection(type):
+ return (isinstance(type, model.StructOrUnion) or
+ (isinstance(type, model.PrimitiveType) and
+ type.is_complex_type()))
+ difference = False
+ arguments = []
+ call_arguments = []
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ indirection = ''
+ if need_indirection(type):
+ indirection = '*'
+ difference = True
+ arg = type.get_c_name(' %sx%d' % (indirection, i), context)
+ arguments.append(arg)
+ call_arguments.append('%sx%d' % (indirection, i))
+ tp_result = tp.result
+ if need_indirection(tp_result):
+ context = 'result of %s' % name
+ arg = tp_result.get_c_name(' *result', context)
+ arguments.insert(0, arg)
+ tp_result = model.void_type
+ result_decl = None
+ result_code = '*result = '
+ difference = True
+ if difference:
+ repr_arguments = ', '.join(arguments)
+ repr_arguments = repr_arguments or 'void'
+ name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name,
+ repr_arguments)
+ prnt('static %s' % (tp_result.get_c_name(name_and_arguments),))
+ prnt('{')
+ if result_decl:
+ prnt(result_decl)
+ call_arguments = ', '.join(call_arguments)
+ prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
+ if result_decl:
+ prnt(' return result;')
+ prnt('}')
+ else:
+ prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name))
+ #
+ prnt('#endif') # ------------------------------
+ prnt()
+
+ def _generate_cpy_function_ctx(self, tp, name):
+ if tp.ellipsis and not self.target_is_python:
+ self._generate_cpy_constant_ctx(tp, name)
+ return
+ type_index = self._typesdict[tp.as_raw_function()]
+ numargs = len(tp.args)
+ if self.target_is_python:
+ meth_kind = OP_DLOPEN_FUNC
+ elif numargs == 0:
+ meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS'
+ elif numargs == 1:
+ meth_kind = OP_CPYTHON_BLTN_O # 'METH_O'
+ else:
+ meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS'
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_f_%s' % name,
+ CffiOp(meth_kind, type_index),
+ size='_cffi_d_%s' % name))
+
+ # ----------
+ # named structs or unions
+
+ def _field_type(self, tp_struct, field_name, tp_field):
+ if isinstance(tp_field, model.ArrayType):
+ actual_length = tp_field.length
+ if actual_length == '...':
+ ptr_struct_name = tp_struct.get_c_name('*')
+ actual_length = '_cffi_array_len(((%s)0)->%s)' % (
+ ptr_struct_name, field_name)
+ tp_item = self._field_type(tp_struct, '%s[0]' % field_name,
+ tp_field.item)
+ tp_field = model.ArrayType(tp_item, actual_length)
+ return tp_field
+
+ def _struct_collecttype(self, tp):
+ self._do_collect_type(tp)
+ if self.target_is_python:
+ # also requires nested anon struct/unions in ABI mode, recursively
+ for fldtype in tp.anonymous_struct_fields():
+ self._struct_collecttype(fldtype)
+
+ def _struct_decl(self, tp, cname, approxname):
+ if tp.fldtypes is None:
+ return
+ prnt = self._prnt
+ checkfuncname = '_cffi_checkfld_%s' % (approxname,)
+ prnt('_CFFI_UNUSED_FN')
+ prnt('static void %s(%s *p)' % (checkfuncname, cname))
+ prnt('{')
+ prnt(' /* only to generate compile-time warnings or errors */')
+ prnt(' (void)p;')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ try:
+ if ftype.is_integer_type() or fbitsize >= 0:
+ # accept all integers, but complain on float or double
+ if fname != '':
+ prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is "
+ "an integer */" % (fname, cname, fname))
+ continue
+ # only accept exactly the type declared, except that '[]'
+ # is interpreted as a '*' and so will match any array length.
+ # (It would also match '*', but that's harder to detect...)
+ while (isinstance(ftype, model.ArrayType)
+ and (ftype.length is None or ftype.length == '...')):
+ ftype = ftype.item
+ fname = fname + '[0]'
+ prnt(' { %s = &p->%s; (void)tmp; }' % (
+ ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+ fname))
+ except VerificationError as e:
+ prnt(' /* %s */' % str(e)) # cannot verify it, ignore
+ prnt('}')
+ prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
+ prnt()
+
+ def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
+ type_index = self._typesdict[tp]
+ reason_for_not_expanding = None
+ flags = []
+ if isinstance(tp, model.UnionType):
+ flags.append("_CFFI_F_UNION")
+ if tp.fldtypes is None:
+ flags.append("_CFFI_F_OPAQUE")
+ reason_for_not_expanding = "opaque"
+ if (tp not in self.ffi._parser._included_declarations and
+ (named_ptr is None or
+ named_ptr not in self.ffi._parser._included_declarations)):
+ if tp.fldtypes is None:
+ pass # opaque
+ elif tp.partial or any(tp.anonymous_struct_fields()):
+ pass # field layout obtained silently from the C compiler
+ else:
+ flags.append("_CFFI_F_CHECK_FIELDS")
+ if tp.packed:
+ if tp.packed > 1:
+ raise NotImplementedError(
+ "%r is declared with 'pack=%r'; only 0 or 1 are "
+ "supported in API mode (try to use \"...;\", which "
+ "does not require a 'pack' declaration)" %
+ (tp, tp.packed))
+ flags.append("_CFFI_F_PACKED")
+ else:
+ flags.append("_CFFI_F_EXTERNAL")
+ reason_for_not_expanding = "external"
+ flags = '|'.join(flags) or '0'
+ c_fields = []
+ if reason_for_not_expanding is None:
+ expand_anonymous_struct_union = not self.target_is_python
+ enumfields = list(tp.enumfields(expand_anonymous_struct_union))
+ for fldname, fldtype, fbitsize, fqual in enumfields:
+ fldtype = self._field_type(tp, fldname, fldtype)
+ self._check_not_opaque(fldtype,
+ "field '%s.%s'" % (tp.name, fldname))
+ # cname is None for _add_missing_struct_unions() only
+ op = OP_NOOP
+ if fbitsize >= 0:
+ op = OP_BITFIELD
+ size = '%d /* bits */' % fbitsize
+ elif cname is None or (
+ isinstance(fldtype, model.ArrayType) and
+ fldtype.length is None):
+ size = '(size_t)-1'
+ else:
+ size = 'sizeof(((%s)0)->%s)' % (
+ tp.get_c_name('*') if named_ptr is None
+ else named_ptr.name,
+ fldname)
+ if cname is None or fbitsize >= 0:
+ offset = '(size_t)-1'
+ elif named_ptr is not None:
+ offset = '((char *)&((%s)0)->%s) - (char *)0' % (
+ named_ptr.name, fldname)
+ else:
+ offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname)
+ c_fields.append(
+ FieldExpr(fldname, offset, size, fbitsize,
+ CffiOp(op, self._typesdict[fldtype])))
+ first_field_index = len(self._lsts["field"])
+ self._lsts["field"].extend(c_fields)
+ #
+ if cname is None: # unknown name, for _add_missing_struct_unions
+ size = '(size_t)-2'
+ align = -2
+ comment = "unnamed"
+ else:
+ if named_ptr is not None:
+ size = 'sizeof(*(%s)0)' % (named_ptr.name,)
+ align = '-1 /* unknown alignment */'
+ else:
+ size = 'sizeof(%s)' % (cname,)
+ align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,)
+ comment = None
+ else:
+ size = '(size_t)-1'
+ align = -1
+ first_field_index = -1
+ comment = reason_for_not_expanding
+ self._lsts["struct_union"].append(
+ StructUnionExpr(tp.name, type_index, flags, size, align, comment,
+ first_field_index, c_fields))
+ self._seen_struct_unions.add(tp)
+
+ def _check_not_opaque(self, tp, location):
+ while isinstance(tp, model.ArrayType):
+ tp = tp.item
+ if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
+ raise TypeError(
+ "%s is of an opaque type (not declared in cdef())" % location)
+
+ def _add_missing_struct_unions(self):
+ # not very nice, but some struct declarations might be missing
+ # because they don't have any known C name. Check that they are
+ # not partial (we can't complete or verify them!) and emit them
+ # anonymously.
+ lst = list(self._struct_unions.items())
+ lst.sort(key=lambda tp_order: tp_order[1])
+ for tp, order in lst:
+ if tp not in self._seen_struct_unions:
+ if tp.partial:
+ raise NotImplementedError("internal inconsistency: %r is "
+ "partial but was not seen at "
+ "this point" % (tp,))
+ if tp.name.startswith('$') and tp.name[1:].isdigit():
+ approxname = tp.name[1:]
+ elif tp.name == '_IO_FILE' and tp.forcename == 'FILE':
+ approxname = 'FILE'
+ self._typedef_ctx(tp, 'FILE')
+ else:
+ raise NotImplementedError("internal inconsistency: %r" %
+ (tp,))
+ self._struct_ctx(tp, None, approxname)
+
+ def _generate_cpy_struct_collecttype(self, tp, name):
+ self._struct_collecttype(tp)
+ _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype
+
+ def _struct_names(self, tp):
+ cname = tp.get_c_name('')
+ if ' ' in cname:
+ return cname, cname.replace(' ', '_')
+ else:
+ return cname, '_' + cname
+
+ def _generate_cpy_struct_decl(self, tp, name):
+ self._struct_decl(tp, *self._struct_names(tp))
+ _generate_cpy_union_decl = _generate_cpy_struct_decl
+
+ def _generate_cpy_struct_ctx(self, tp, name):
+ self._struct_ctx(tp, *self._struct_names(tp))
+ _generate_cpy_union_ctx = _generate_cpy_struct_ctx
+
+ # ----------
+ # 'anonymous' declarations. These are produced for anonymous structs
+ # or unions; the 'name' is obtained by a typedef.
+
+ def _generate_cpy_anonymous_collecttype(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_cpy_enum_collecttype(tp, name)
+ else:
+ self._struct_collecttype(tp)
+
+ def _generate_cpy_anonymous_decl(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_cpy_enum_decl(tp)
+ else:
+ self._struct_decl(tp, name, 'typedef_' + name)
+
+ def _generate_cpy_anonymous_ctx(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._enum_ctx(tp, name)
+ else:
+ self._struct_ctx(tp, name, 'typedef_' + name)
+
+ # ----------
+ # constants, declared with "static const ..."
+
+ def _generate_cpy_const(self, is_int, name, tp=None, category='const',
+ check_value=None):
+ if (category, name) in self._seen_constants:
+ raise VerificationError(
+ "duplicate declaration of %s '%s'" % (category, name))
+ self._seen_constants.add((category, name))
+ #
+ prnt = self._prnt
+ funcname = '_cffi_%s_%s' % (category, name)
+ if is_int:
+ prnt('static int %s(unsigned long long *o)' % funcname)
+ prnt('{')
+ prnt(' int n = (%s) <= 0;' % (name,))
+ prnt(' *o = (unsigned long long)((%s) | 0);'
+ ' /* check that %s is an integer */' % (name, name))
+ if check_value is not None:
+ if check_value > 0:
+ check_value = '%dU' % (check_value,)
+ prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,))
+ prnt(' n |= 2;')
+ prnt(' return n;')
+ prnt('}')
+ else:
+ assert check_value is None
+ prnt('static void %s(char *o)' % funcname)
+ prnt('{')
+ prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name))
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_constant_collecttype(self, tp, name):
+ is_int = tp.is_integer_type()
+ if not is_int or self.target_is_python:
+ self._do_collect_type(tp)
+
+ def _generate_cpy_constant_decl(self, tp, name):
+ is_int = tp.is_integer_type()
+ self._generate_cpy_const(is_int, name, tp)
+
+ def _generate_cpy_constant_ctx(self, tp, name):
+ if not self.target_is_python and tp.is_integer_type():
+ type_op = CffiOp(OP_CONSTANT_INT, -1)
+ else:
+ if self.target_is_python:
+ const_kind = OP_DLOPEN_CONST
+ else:
+ const_kind = OP_CONSTANT
+ type_index = self._typesdict[tp]
+ type_op = CffiOp(const_kind, type_index)
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_const_%s' % name, type_op))
+
+ # ----------
+ # enums
+
+ def _generate_cpy_enum_collecttype(self, tp, name):
+ self._do_collect_type(tp)
+
+ def _generate_cpy_enum_decl(self, tp, name=None):
+ for enumerator in tp.enumerators:
+ self._generate_cpy_const(True, enumerator)
+
+ def _enum_ctx(self, tp, cname):
+ type_index = self._typesdict[tp]
+ type_op = CffiOp(OP_ENUM, -1)
+ if self.target_is_python:
+ tp.check_not_partial()
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ self._lsts["global"].append(
+ GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op,
+ check_value=enumvalue))
+ #
+ if cname is not None and '$' not in cname and not self.target_is_python:
+ size = "sizeof(%s)" % cname
+ signed = "((%s)-1) <= 0" % cname
+ else:
+ basetp = tp.build_baseinttype(self.ffi, [])
+ size = self.ffi.sizeof(basetp)
+ signed = int(int(self.ffi.cast(basetp, -1)) < 0)
+ allenums = ",".join(tp.enumerators)
+ self._lsts["enum"].append(
+ EnumExpr(tp.name, type_index, size, signed, allenums))
+
+ def _generate_cpy_enum_ctx(self, tp, name):
+ self._enum_ctx(tp, tp._get_c_name())
+
+ # ----------
+ # macros: for now only for integers
+
+ def _generate_cpy_macro_collecttype(self, tp, name):
+ pass
+
+ def _generate_cpy_macro_decl(self, tp, name):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ self._generate_cpy_const(True, name, check_value=check_value)
+
+ def _generate_cpy_macro_ctx(self, tp, name):
+ if tp == '...':
+ if self.target_is_python:
+ raise VerificationError(
+ "cannot use the syntax '...' in '#define %s ...' when "
+ "using the ABI mode" % (name,))
+ check_value = None
+ else:
+ check_value = tp # an integer
+ type_op = CffiOp(OP_CONSTANT_INT, -1)
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_const_%s' % name, type_op,
+ check_value=check_value))
+
+ # ----------
+ # global variables
+
+ def _global_type(self, tp, global_name):
+ if isinstance(tp, model.ArrayType):
+ actual_length = tp.length
+ if actual_length == '...':
+ actual_length = '_cffi_array_len(%s)' % (global_name,)
+ tp_item = self._global_type(tp.item, '%s[0]' % global_name)
+ tp = model.ArrayType(tp_item, actual_length)
+ return tp
+
+ def _generate_cpy_variable_collecttype(self, tp, name):
+ self._do_collect_type(self._global_type(tp, name))
+
+ def _generate_cpy_variable_decl(self, tp, name):
+ prnt = self._prnt
+ tp = self._global_type(tp, name)
+ if isinstance(tp, model.ArrayType) and tp.length is None:
+ tp = tp.item
+ ampersand = ''
+ else:
+ ampersand = '&'
+ # This code assumes that casts from "tp *" to "void *" is a
+ # no-op, i.e. a function that returns a "tp *" can be called
+ # as if it returned a "void *". This should be generally true
+ # on any modern machine. The only exception to that rule (on
+ # uncommon architectures, and as far as I can tell) might be
+ # if 'tp' were a function type, but that is not possible here.
+ # (If 'tp' is a function _pointer_ type, then casts from "fn_t
+ # **" to "void *" are again no-ops, as far as I can tell.)
+ decl = '*_cffi_var_%s(void)' % (name,)
+ prnt('static ' + tp.get_c_name(decl, quals=self._current_quals))
+ prnt('{')
+ prnt(' return %s(%s);' % (ampersand, name))
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_variable_ctx(self, tp, name):
+ tp = self._global_type(tp, name)
+ type_index = self._typesdict[tp]
+ if self.target_is_python:
+ op = OP_GLOBAL_VAR
+ else:
+ op = OP_GLOBAL_VAR_F
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index)))
+
+ # ----------
+ # extern "Python"
+
+ def _generate_cpy_extern_python_collecttype(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ self._do_collect_type(tp)
+ _generate_cpy_dllexport_python_collecttype = \
+ _generate_cpy_extern_python_plus_c_collecttype = \
+ _generate_cpy_extern_python_collecttype
+
+ def _extern_python_decl(self, tp, name, tag_and_space):
+ prnt = self._prnt
+ if isinstance(tp.result, model.VoidType):
+ size_of_result = '0'
+ else:
+ context = 'result of %s' % name
+ size_of_result = '(int)sizeof(%s)' % (
+ tp.result.get_c_name('', context),)
+ prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
+ prnt(' { "%s.%s", %s, 0, 0 };' % (
+ self.module_name, name, size_of_result))
+ prnt()
+ #
+ arguments = []
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ arg = type.get_c_name(' a%d' % i, context)
+ arguments.append(arg)
+ #
+ repr_arguments = ', '.join(arguments)
+ repr_arguments = repr_arguments or 'void'
+ name_and_arguments = '%s(%s)' % (name, repr_arguments)
+ if tp.abi == "__stdcall":
+ name_and_arguments = '_cffi_stdcall ' + name_and_arguments
+ #
+ def may_need_128_bits(tp):
+ return (isinstance(tp, model.PrimitiveType) and
+ tp.name == 'long double')
+ #
+ size_of_a = max(len(tp.args)*8, 8)
+ if may_need_128_bits(tp.result):
+ size_of_a = max(size_of_a, 16)
+ if isinstance(tp.result, model.StructOrUnion):
+ size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % (
+ tp.result.get_c_name(''), size_of_a,
+ tp.result.get_c_name(''), size_of_a)
+ prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
+ prnt('{')
+ prnt(' char a[%s];' % size_of_a)
+ prnt(' char *p = a;')
+ for i, type in enumerate(tp.args):
+ arg = 'a%d' % i
+ if (isinstance(type, model.StructOrUnion) or
+ may_need_128_bits(type)):
+ arg = '&' + arg
+ type = model.PointerType(type)
+ prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg))
+ prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name)
+ if not isinstance(tp.result, model.VoidType):
+ prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),))
+ prnt('}')
+ prnt()
+ self._num_externpy += 1
+
+ def _generate_cpy_extern_python_decl(self, tp, name):
+ self._extern_python_decl(tp, name, 'static ')
+
+ def _generate_cpy_dllexport_python_decl(self, tp, name):
+ self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ')
+
+ def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
+ self._extern_python_decl(tp, name, '')
+
+ def _generate_cpy_extern_python_ctx(self, tp, name):
+ if self.target_is_python:
+ raise VerificationError(
+ "cannot use 'extern \"Python\"' in the ABI mode")
+ if tp.ellipsis:
+ raise NotImplementedError("a vararg function is extern \"Python\"")
+ type_index = self._typesdict[tp]
+ type_op = CffiOp(OP_EXTERN_PYTHON, type_index)
+ self._lsts["global"].append(
+ GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name))
+
+ _generate_cpy_dllexport_python_ctx = \
+ _generate_cpy_extern_python_plus_c_ctx = \
+ _generate_cpy_extern_python_ctx
+
+ def _print_string_literal_in_array(self, s):
+ prnt = self._prnt
+ prnt('// # NB. this is not a string because of a size limit in MSVC')
+ if not isinstance(s, bytes): # unicode
+ s = s.encode('utf-8') # -> bytes
+ else:
+ s.decode('utf-8') # got bytes, check for valid utf-8
+ try:
+ s.decode('ascii')
+ except UnicodeDecodeError:
+ s = b'# -*- encoding: utf8 -*-\n' + s
+ for line in s.splitlines(True):
+ comment = line
+ if type('//') is bytes: # python2
+ line = map(ord, line) # make a list of integers
+ else: # python3
+ # type(line) is bytes, which enumerates like a list of integers
+ comment = ascii(comment)[1:-1]
+ prnt(('// ' + comment).rstrip())
+ printed_line = ''
+ for c in line:
+ if len(printed_line) >= 76:
+ prnt(printed_line)
+ printed_line = ''
+ printed_line += '%d,' % (c,)
+ prnt(printed_line)
+
+ # ----------
+ # emitting the opcodes for individual types
+
+ def _emit_bytecode_VoidType(self, tp, index):
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID)
+
+ def _emit_bytecode_PrimitiveType(self, tp, index):
+ prim_index = PRIMITIVE_TO_INDEX[tp.name]
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index)
+
+ def _emit_bytecode_UnknownIntegerType(self, tp, index):
+ s = ('_cffi_prim_int(sizeof(%s), (\n'
+ ' ((%s)-1) | 0 /* check that %s is an integer type */\n'
+ ' ) <= 0)' % (tp.name, tp.name, tp.name))
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
+
+ def _emit_bytecode_UnknownFloatType(self, tp, index):
+ s = ('_cffi_prim_float(sizeof(%s) *\n'
+ ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n'
+ ' )' % (tp.name, tp.name))
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
+
+ def _emit_bytecode_RawFunctionType(self, tp, index):
+ self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result])
+ index += 1
+ for tp1 in tp.args:
+ realindex = self._typesdict[tp1]
+ if index != realindex:
+ if isinstance(tp1, model.PrimitiveType):
+ self._emit_bytecode_PrimitiveType(tp1, index)
+ else:
+ self.cffi_types[index] = CffiOp(OP_NOOP, realindex)
+ index += 1
+ flags = int(tp.ellipsis)
+ if tp.abi is not None:
+ if tp.abi == '__stdcall':
+ flags |= 2
+ else:
+ raise NotImplementedError("abi=%r" % (tp.abi,))
+ self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags)
+
+ def _emit_bytecode_PointerType(self, tp, index):
+ self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype])
+
+ _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType
+ _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType
+
+ def _emit_bytecode_FunctionPtrType(self, tp, index):
+ raw = tp.as_raw_function()
+ self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw])
+
+ def _emit_bytecode_ArrayType(self, tp, index):
+ item_index = self._typesdict[tp.item]
+ if tp.length is None:
+ self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
+ elif tp.length == '...':
+ raise VerificationError(
+ "type %s badly placed: the '...' array length can only be "
+ "used on global arrays or on fields of structures" % (
+ str(tp).replace('/*...*/', '...'),))
+ else:
+ assert self.cffi_types[index + 1] == 'LEN'
+ self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
+ self.cffi_types[index + 1] = CffiOp(None, str(tp.length))
+
+ def _emit_bytecode_StructType(self, tp, index):
+ struct_index = self._struct_unions[tp]
+ self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index)
+ _emit_bytecode_UnionType = _emit_bytecode_StructType
+
+ def _emit_bytecode_EnumType(self, tp, index):
+ enum_index = self._enums[tp]
+ self.cffi_types[index] = CffiOp(OP_ENUM, enum_index)
+
+
+if sys.version_info >= (3,):
+ NativeIO = io.StringIO
+else:
+ class NativeIO(io.BytesIO):
+ def write(self, s):
+ if isinstance(s, unicode):
+ s = s.encode('ascii')
+ super(NativeIO, self).write(s)
+
+def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
+ if verbose:
+ print("generating %s" % (target_file,))
+ recompiler = Recompiler(ffi, module_name,
+ target_is_python=(preamble is None))
+ recompiler.collect_type_table()
+ recompiler.collect_step_tables()
+ f = NativeIO()
+ recompiler.write_source_to_f(f, preamble)
+ output = f.getvalue()
+ try:
+ with open(target_file, 'r') as f1:
+ if f1.read(len(output) + 1) != output:
+ raise IOError
+ if verbose:
+ print("(already up-to-date)")
+ return False # already up-to-date
+ except IOError:
+ tmp_file = '%s.~%d' % (target_file, os.getpid())
+ with open(tmp_file, 'w') as f1:
+ f1.write(output)
+ try:
+ os.rename(tmp_file, target_file)
+ except OSError:
+ os.unlink(target_file)
+ os.rename(tmp_file, target_file)
+ return True
+
+def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
+ assert preamble is not None
+ return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
+ verbose)
+
+def make_py_source(ffi, module_name, target_py_file, verbose=False):
+ return _make_c_or_py_source(ffi, module_name, None, target_py_file,
+ verbose)
+
+def _modname_to_file(outputdir, modname, extension):
+ parts = modname.split('.')
+ try:
+ os.makedirs(os.path.join(outputdir, *parts[:-1]))
+ except OSError:
+ pass
+ parts[-1] += extension
+ return os.path.join(outputdir, *parts), parts
+
+
+# Aaargh. Distutils is not tested at all for the purpose of compiling
+# DLLs that are not extension modules. Here are some hacks to work
+# around that, in the _patch_for_*() functions...
+
+def _patch_meth(patchlist, cls, name, new_meth):
+ old = getattr(cls, name)
+ patchlist.append((cls, name, old))
+ setattr(cls, name, new_meth)
+ return old
+
+def _unpatch_meths(patchlist):
+ for cls, name, old_meth in reversed(patchlist):
+ setattr(cls, name, old_meth)
+
+def _patch_for_embedding(patchlist):
+ if sys.platform == 'win32':
+ # we must not remove the manifest when building for embedding!
+ from distutils.msvc9compiler import MSVCCompiler
+ _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref',
+ lambda self, manifest_file: manifest_file)
+
+ if sys.platform == 'darwin':
+ # we must not make a '-bundle', but a '-dynamiclib' instead
+ from distutils.ccompiler import CCompiler
+ def my_link_shared_object(self, *args, **kwds):
+ if '-bundle' in self.linker_so:
+ self.linker_so = list(self.linker_so)
+ i = self.linker_so.index('-bundle')
+ self.linker_so[i] = '-dynamiclib'
+ return old_link_shared_object(self, *args, **kwds)
+ old_link_shared_object = _patch_meth(patchlist, CCompiler,
+ 'link_shared_object',
+ my_link_shared_object)
+
+def _patch_for_target(patchlist, target):
+ from distutils.command.build_ext import build_ext
+ # if 'target' is different from '*', we need to patch some internal
+ # method to just return this 'target' value, instead of having it
+ # built from module_name
+ if target.endswith('.*'):
+ target = target[:-2]
+ if sys.platform == 'win32':
+ target += '.dll'
+ elif sys.platform == 'darwin':
+ target += '.dylib'
+ else:
+ target += '.so'
+ _patch_meth(patchlist, build_ext, 'get_ext_filename',
+ lambda self, ext_name: target)
+
+
+def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
+ c_file=None, source_extension='.c', extradir=None,
+ compiler_verbose=1, target=None, debug=None, **kwds):
+ if not isinstance(module_name, str):
+ module_name = module_name.encode('ascii')
+ if ffi._windows_unicode:
+ ffi._apply_windows_unicode(kwds)
+ if preamble is not None:
+ embedding = (ffi._embedding is not None)
+ if embedding:
+ ffi._apply_embedding_fix(kwds)
+ if c_file is None:
+ c_file, parts = _modname_to_file(tmpdir, module_name,
+ source_extension)
+ if extradir:
+ parts = [extradir] + parts
+ ext_c_file = os.path.join(*parts)
+ else:
+ ext_c_file = c_file
+ #
+ if target is None:
+ if embedding:
+ target = '%s.*' % module_name
+ else:
+ target = '*'
+ #
+ ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
+ updated = make_c_source(ffi, module_name, preamble, c_file,
+ verbose=compiler_verbose)
+ if call_c_compiler:
+ patchlist = []
+ cwd = os.getcwd()
+ try:
+ if embedding:
+ _patch_for_embedding(patchlist)
+ if target != '*':
+ _patch_for_target(patchlist, target)
+ if compiler_verbose:
+ if tmpdir == '.':
+ msg = 'the current directory is'
+ else:
+ msg = 'setting the current directory to'
+ print('%s %r' % (msg, os.path.abspath(tmpdir)))
+ os.chdir(tmpdir)
+ outputfilename = ffiplatform.compile('.', ext,
+ compiler_verbose, debug)
+ finally:
+ os.chdir(cwd)
+ _unpatch_meths(patchlist)
+ return outputfilename
+ else:
+ return ext, updated
+ else:
+ if c_file is None:
+ c_file, _ = _modname_to_file(tmpdir, module_name, '.py')
+ updated = make_py_source(ffi, module_name, c_file,
+ verbose=compiler_verbose)
+ if call_c_compiler:
+ return c_file
+ else:
+ return None, updated
+
diff --git a/venv/Lib/site-packages/cffi/setuptools_ext.py b/venv/Lib/site-packages/cffi/setuptools_ext.py
new file mode 100644
index 000000000..8fe361487
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/setuptools_ext.py
@@ -0,0 +1,219 @@
+import os
+import sys
+
+try:
+ basestring
+except NameError:
+ # Python 3.x
+ basestring = str
+
+def error(msg):
+ from distutils.errors import DistutilsSetupError
+ raise DistutilsSetupError(msg)
+
+
+def execfile(filename, glob):
+ # We use execfile() (here rewritten for Python 3) instead of
+ # __import__() to load the build script. The problem with
+ # a normal import is that in some packages, the intermediate
+ # __init__.py files may already try to import the file that
+ # we are generating.
+ with open(filename) as f:
+ src = f.read()
+ src += '\n' # Python 2.6 compatibility
+ code = compile(src, filename, 'exec')
+ exec(code, glob, glob)
+
+
+def add_cffi_module(dist, mod_spec):
+ from cffi.api import FFI
+
+ if not isinstance(mod_spec, basestring):
+ error("argument to 'cffi_modules=...' must be a str or a list of str,"
+ " not %r" % (type(mod_spec).__name__,))
+ mod_spec = str(mod_spec)
+ try:
+ build_file_name, ffi_var_name = mod_spec.split(':')
+ except ValueError:
+ error("%r must be of the form 'path/build.py:ffi_variable'" %
+ (mod_spec,))
+ if not os.path.exists(build_file_name):
+ ext = ''
+ rewritten = build_file_name.replace('.', '/') + '.py'
+ if os.path.exists(rewritten):
+ ext = ' (rewrite cffi_modules to [%r])' % (
+ rewritten + ':' + ffi_var_name,)
+ error("%r does not name an existing file%s" % (build_file_name, ext))
+
+ mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
+ execfile(build_file_name, mod_vars)
+
+ try:
+ ffi = mod_vars[ffi_var_name]
+ except KeyError:
+ error("%r: object %r not found in module" % (mod_spec,
+ ffi_var_name))
+ if not isinstance(ffi, FFI):
+ ffi = ffi() # maybe it's a function instead of directly an ffi
+ if not isinstance(ffi, FFI):
+ error("%r is not an FFI instance (got %r)" % (mod_spec,
+ type(ffi).__name__))
+ if not hasattr(ffi, '_assigned_source'):
+ error("%r: the set_source() method was not called" % (mod_spec,))
+ module_name, source, source_extension, kwds = ffi._assigned_source
+ if ffi._windows_unicode:
+ kwds = kwds.copy()
+ ffi._apply_windows_unicode(kwds)
+
+ if source is None:
+ _add_py_module(dist, ffi, module_name)
+ else:
+ _add_c_module(dist, ffi, module_name, source, source_extension, kwds)
+
+def _set_py_limited_api(Extension, kwds):
+ """
+ Add py_limited_api to kwds if setuptools >= 26 is in use.
+ Do not alter the setting if it already exists.
+ Setuptools takes care of ignoring the flag on Python 2 and PyPy.
+
+ CPython itself should ignore the flag in a debugging version
+ (by not listing .abi3.so in the extensions it supports), but
+ it doesn't so far, creating troubles. That's why we check
+ for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
+ of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
+
+ On Windows, with CPython <= 3.4, it's better not to use py_limited_api
+ because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
+ Recently (2020) we started shipping only >= 3.5 wheels, though. So
+ we'll give it another try and set py_limited_api on Windows >= 3.5.
+ """
+ from cffi import recompiler
+
+ if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
+ and recompiler.USE_LIMITED_API):
+ import setuptools
+ try:
+ setuptools_major_version = int(setuptools.__version__.partition('.')[0])
+ if setuptools_major_version >= 26:
+ kwds['py_limited_api'] = True
+ except ValueError: # certain development versions of setuptools
+ # If we don't know the version number of setuptools, we
+ # try to set 'py_limited_api' anyway. At worst, we get a
+ # warning.
+ kwds['py_limited_api'] = True
+ return kwds
+
+def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
+ from distutils.core import Extension
+ # We are a setuptools extension. Need this build_ext for py_limited_api.
+ from setuptools.command.build_ext import build_ext
+ from distutils.dir_util import mkpath
+ from distutils import log
+ from cffi import recompiler
+
+ allsources = ['$PLACEHOLDER']
+ allsources.extend(kwds.pop('sources', []))
+ kwds = _set_py_limited_api(Extension, kwds)
+ ext = Extension(name=module_name, sources=allsources, **kwds)
+
+ def make_mod(tmpdir, pre_run=None):
+ c_file = os.path.join(tmpdir, module_name + source_extension)
+ log.info("generating cffi module %r" % c_file)
+ mkpath(tmpdir)
+ # a setuptools-only, API-only hook: called with the "ext" and "ffi"
+ # arguments just before we turn the ffi into C code. To use it,
+ # subclass the 'distutils.command.build_ext.build_ext' class and
+ # add a method 'def pre_run(self, ext, ffi)'.
+ if pre_run is not None:
+ pre_run(ext, ffi)
+ updated = recompiler.make_c_source(ffi, module_name, source, c_file)
+ if not updated:
+ log.info("already up-to-date")
+ return c_file
+
+ if dist.ext_modules is None:
+ dist.ext_modules = []
+ dist.ext_modules.append(ext)
+
+ base_class = dist.cmdclass.get('build_ext', build_ext)
+ class build_ext_make_mod(base_class):
+ def run(self):
+ if ext.sources[0] == '$PLACEHOLDER':
+ pre_run = getattr(self, 'pre_run', None)
+ ext.sources[0] = make_mod(self.build_temp, pre_run)
+ base_class.run(self)
+ dist.cmdclass['build_ext'] = build_ext_make_mod
+ # NB. multiple runs here will create multiple 'build_ext_make_mod'
+ # classes. Even in this case the 'build_ext' command should be
+ # run once; but just in case, the logic above does nothing if
+ # called again.
+
+
+def _add_py_module(dist, ffi, module_name):
+ from distutils.dir_util import mkpath
+ from setuptools.command.build_py import build_py
+ from setuptools.command.build_ext import build_ext
+ from distutils import log
+ from cffi import recompiler
+
+ def generate_mod(py_file):
+ log.info("generating cffi module %r" % py_file)
+ mkpath(os.path.dirname(py_file))
+ updated = recompiler.make_py_source(ffi, module_name, py_file)
+ if not updated:
+ log.info("already up-to-date")
+
+ base_class = dist.cmdclass.get('build_py', build_py)
+ class build_py_make_mod(base_class):
+ def run(self):
+ base_class.run(self)
+ module_path = module_name.split('.')
+ module_path[-1] += '.py'
+ generate_mod(os.path.join(self.build_lib, *module_path))
+ def get_source_files(self):
+ # This is called from 'setup.py sdist' only. Exclude
+ # the generate .py module in this case.
+ saved_py_modules = self.py_modules
+ try:
+ if saved_py_modules:
+ self.py_modules = [m for m in saved_py_modules
+ if m != module_name]
+ return base_class.get_source_files(self)
+ finally:
+ self.py_modules = saved_py_modules
+ dist.cmdclass['build_py'] = build_py_make_mod
+
+ # distutils and setuptools have no notion I could find of a
+ # generated python module. If we don't add module_name to
+ # dist.py_modules, then things mostly work but there are some
+ # combination of options (--root and --record) that will miss
+ # the module. So we add it here, which gives a few apparently
+ # harmless warnings about not finding the file outside the
+ # build directory.
+ # Then we need to hack more in get_source_files(); see above.
+ if dist.py_modules is None:
+ dist.py_modules = []
+ dist.py_modules.append(module_name)
+
+ # the following is only for "build_ext -i"
+ base_class_2 = dist.cmdclass.get('build_ext', build_ext)
+ class build_ext_make_mod(base_class_2):
+ def run(self):
+ base_class_2.run(self)
+ if self.inplace:
+ # from get_ext_fullpath() in distutils/command/build_ext.py
+ module_path = module_name.split('.')
+ package = '.'.join(module_path[:-1])
+ build_py = self.get_finalized_command('build_py')
+ package_dir = build_py.get_package_dir(package)
+ file_name = module_path[-1] + '.py'
+ generate_mod(os.path.join(package_dir, file_name))
+ dist.cmdclass['build_ext'] = build_ext_make_mod
+
+def cffi_modules(dist, attr, value):
+ assert attr == 'cffi_modules'
+ if isinstance(value, basestring):
+ value = [value]
+
+ for cffi_module in value:
+ add_cffi_module(dist, cffi_module)
diff --git a/venv/Lib/site-packages/cffi/vengine_cpy.py b/venv/Lib/site-packages/cffi/vengine_cpy.py
new file mode 100644
index 000000000..6de0df0ea
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/vengine_cpy.py
@@ -0,0 +1,1076 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, imp
+from . import model
+from .error import VerificationError
+
+
+class VCPythonEngine(object):
+ _class_key = 'x'
+ _gen_python_module = True
+
+ def __init__(self, verifier):
+ self.verifier = verifier
+ self.ffi = verifier.ffi
+ self._struct_pending_verification = {}
+ self._types_of_builtin_functions = {}
+
+ def patch_extension_kwds(self, kwds):
+ pass
+
+ def find_module(self, module_name, path, so_suffixes):
+ try:
+ f, filename, descr = imp.find_module(module_name, path)
+ except ImportError:
+ return None
+ if f is not None:
+ f.close()
+ # Note that after a setuptools installation, there are both .py
+ # and .so files with the same basename. The code here relies on
+ # imp.find_module() locating the .so in priority.
+ if descr[0] not in so_suffixes:
+ return None
+ return filename
+
+ def collect_types(self):
+ self._typesdict = {}
+ self._generate("collecttype")
+
+ def _prnt(self, what=''):
+ self._f.write(what + '\n')
+
+ def _gettypenum(self, type):
+ # a KeyError here is a bug. please report it! :-)
+ return self._typesdict[type]
+
+ def _do_collect_type(self, tp):
+ if ((not isinstance(tp, model.PrimitiveType)
+ or tp.name == 'long double')
+ and tp not in self._typesdict):
+ num = len(self._typesdict)
+ self._typesdict[tp] = num
+
+ def write_source_to_f(self):
+ self.collect_types()
+ #
+ # The new module will have a _cffi_setup() function that receives
+ # objects from the ffi world, and that calls some setup code in
+ # the module. This setup code is split in several independent
+ # functions, e.g. one per constant. The functions are "chained"
+ # by ending in a tail call to each other.
+ #
+ # This is further split in two chained lists, depending on if we
+ # can do it at import-time or if we must wait for _cffi_setup() to
+ # provide us with the objects. This is needed because we
+ # need the values of the enum constants in order to build the
+ # that we may have to pass to _cffi_setup().
+ #
+ # The following two 'chained_list_constants' items contains
+ # the head of these two chained lists, as a string that gives the
+ # call to do, if any.
+ self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)']
+ #
+ prnt = self._prnt
+ # first paste some standard set of lines that are mostly '#define'
+ prnt(cffimod_header)
+ prnt()
+ # then paste the C source given by the user, verbatim.
+ prnt(self.verifier.preamble)
+ prnt()
+ #
+ # call generate_cpy_xxx_decl(), for every xxx found from
+ # ffi._parser._declarations. This generates all the functions.
+ self._generate("decl")
+ #
+ # implement the function _cffi_setup_custom() as calling the
+ # head of the chained list.
+ self._generate_setup_custom()
+ prnt()
+ #
+ # produce the method table, including the entries for the
+ # generated Python->C function wrappers, which are done
+ # by generate_cpy_function_method().
+ prnt('static PyMethodDef _cffi_methods[] = {')
+ self._generate("method")
+ prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
+ prnt(' {NULL, NULL, 0, NULL} /* Sentinel */')
+ prnt('};')
+ prnt()
+ #
+ # standard init.
+ modname = self.verifier.get_module_name()
+ constants = self._chained_list_constants[False]
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt()
+ prnt('static struct PyModuleDef _cffi_module_def = {')
+ prnt(' PyModuleDef_HEAD_INIT,')
+ prnt(' "%s",' % modname)
+ prnt(' NULL,')
+ prnt(' -1,')
+ prnt(' _cffi_methods,')
+ prnt(' NULL, NULL, NULL, NULL')
+ prnt('};')
+ prnt()
+ prnt('PyMODINIT_FUNC')
+ prnt('PyInit_%s(void)' % modname)
+ prnt('{')
+ prnt(' PyObject *lib;')
+ prnt(' lib = PyModule_Create(&_cffi_module_def);')
+ prnt(' if (lib == NULL)')
+ prnt(' return NULL;')
+ prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,))
+ prnt(' Py_DECREF(lib);')
+ prnt(' return NULL;')
+ prnt(' }')
+ prnt(' return lib;')
+ prnt('}')
+ prnt()
+ prnt('#else')
+ prnt()
+ prnt('PyMODINIT_FUNC')
+ prnt('init%s(void)' % modname)
+ prnt('{')
+ prnt(' PyObject *lib;')
+ prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname)
+ prnt(' if (lib == NULL)')
+ prnt(' return;')
+ prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,))
+ prnt(' return;')
+ prnt(' return;')
+ prnt('}')
+ prnt()
+ prnt('#endif')
+
+ def load_library(self, flags=None):
+ # XXX review all usages of 'self' here!
+ # import it as a new extension module
+ imp.acquire_lock()
+ try:
+ if hasattr(sys, "getdlopenflags"):
+ previous_flags = sys.getdlopenflags()
+ try:
+ if hasattr(sys, "setdlopenflags") and flags is not None:
+ sys.setdlopenflags(flags)
+ module = imp.load_dynamic(self.verifier.get_module_name(),
+ self.verifier.modulefilename)
+ except ImportError as e:
+ error = "importing %r: %s" % (self.verifier.modulefilename, e)
+ raise VerificationError(error)
+ finally:
+ if hasattr(sys, "setdlopenflags"):
+ sys.setdlopenflags(previous_flags)
+ finally:
+ imp.release_lock()
+ #
+ # call loading_cpy_struct() to get the struct layout inferred by
+ # the C compiler
+ self._load(module, 'loading')
+ #
+ # the C code will need the objects. Collect them in
+ # order in a list.
+ revmapping = dict([(value, key)
+ for (key, value) in self._typesdict.items()])
+ lst = [revmapping[i] for i in range(len(revmapping))]
+ lst = list(map(self.ffi._get_cached_btype, lst))
+ #
+ # build the FFILibrary class and instance and call _cffi_setup().
+ # this will set up some fields like '_cffi_types', and only then
+ # it will invoke the chained list of functions that will really
+ # build (notably) the constant objects, as if they are
+ # pointers, and store them as attributes on the 'library' object.
+ class FFILibrary(object):
+ _cffi_python_module = module
+ _cffi_ffi = self.ffi
+ _cffi_dir = []
+ def __dir__(self):
+ return FFILibrary._cffi_dir + list(self.__dict__)
+ library = FFILibrary()
+ if module._cffi_setup(lst, VerificationError, library):
+ import warnings
+ warnings.warn("reimporting %r might overwrite older definitions"
+ % (self.verifier.get_module_name()))
+ #
+ # finally, call the loaded_cpy_xxx() functions. This will perform
+ # the final adjustments, like copying the Python->C wrapper
+ # functions from the module to the 'library' object, and setting
+ # up the FFILibrary class with properties for the global C variables.
+ self._load(module, 'loaded', library=library)
+ module._cffi_original_ffi = self.ffi
+ module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions
+ return library
+
+ def _get_declarations(self):
+ lst = [(key, tp) for (key, (tp, qual)) in
+ self.ffi._parser._declarations.items()]
+ lst.sort()
+ return lst
+
+ def _generate(self, step_name):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ try:
+ method = getattr(self, '_generate_cpy_%s_%s' % (kind,
+ step_name))
+ except AttributeError:
+ raise VerificationError(
+ "not implemented in verify(): %r" % name)
+ try:
+ method(tp, realname)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _load(self, module, step_name, **kwds):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ method = getattr(self, '_%s_cpy_%s' % (step_name, kind))
+ try:
+ method(tp, realname, module, **kwds)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _generate_nothing(self, tp, name):
+ pass
+
+ def _loaded_noop(self, tp, name, module, **kwds):
+ pass
+
+ # ----------
+
+ def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
+ extraarg = ''
+ if isinstance(tp, model.PrimitiveType):
+ if tp.is_integer_type() and tp.name != '_Bool':
+ converter = '_cffi_to_c_int'
+ extraarg = ', %s' % tp.name
+ else:
+ converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
+ tp.name.replace(' ', '_'))
+ errvalue = '-1'
+ #
+ elif isinstance(tp, model.PointerType):
+ self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
+ tovar, errcode)
+ return
+ #
+ elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
+ # a struct (not a struct pointer) as a function argument
+ self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
+ % (tovar, self._gettypenum(tp), fromvar))
+ self._prnt(' %s;' % errcode)
+ return
+ #
+ elif isinstance(tp, model.FunctionPtrType):
+ converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
+ extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
+ errvalue = 'NULL'
+ #
+ else:
+ raise NotImplementedError(tp)
+ #
+ self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
+ self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
+ tovar, tp.get_c_name(''), errvalue))
+ self._prnt(' %s;' % errcode)
+
+ def _extra_local_variables(self, tp, localvars, freelines):
+ if isinstance(tp, model.PointerType):
+ localvars.add('Py_ssize_t datasize')
+ localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
+ freelines.add('if (large_args_free != NULL)'
+ ' _cffi_free_array_arguments(large_args_free);')
+
+ def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
+ self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
+ self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
+ self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' if (datasize != 0) {')
+ self._prnt(' %s = ((size_t)datasize) <= 640 ? '
+ 'alloca((size_t)datasize) : NULL;' % (tovar,))
+ self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
+ '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' datasize, &large_args_free) < 0)')
+ self._prnt(' %s;' % errcode)
+ self._prnt(' }')
+
+ def _convert_expr_from_c(self, tp, var, context):
+ if isinstance(tp, model.PrimitiveType):
+ if tp.is_integer_type() and tp.name != '_Bool':
+ return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
+ elif tp.name != 'long double':
+ return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
+ else:
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.ArrayType):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(model.PointerType(tp.item)))
+ elif isinstance(tp, model.StructOrUnion):
+ if tp.fldnames is None:
+ raise TypeError("'%s' is used as %s, but is opaque" % (
+ tp._get_c_name(), context))
+ return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.EnumType):
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ else:
+ raise NotImplementedError(tp)
+
+ # ----------
+ # typedefs: generates no code so far
+
+ _generate_cpy_typedef_collecttype = _generate_nothing
+ _generate_cpy_typedef_decl = _generate_nothing
+ _generate_cpy_typedef_method = _generate_nothing
+ _loading_cpy_typedef = _loaded_noop
+ _loaded_cpy_typedef = _loaded_noop
+
+ # ----------
+ # function declarations
+
+ def _generate_cpy_function_collecttype(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ self._do_collect_type(tp)
+ else:
+ # don't call _do_collect_type(tp) in this common case,
+ # otherwise test_autofilled_struct_as_argument fails
+ for type in tp.args:
+ self._do_collect_type(type)
+ self._do_collect_type(tp.result)
+
+ def _generate_cpy_function_decl(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ # cannot support vararg functions better than this: check for its
+ # exact type (including the fixed arguments), and build it as a
+ # constant function pointer (no CPython wrapper)
+ self._generate_cpy_const(False, name, tp)
+ return
+ prnt = self._prnt
+ numargs = len(tp.args)
+ if numargs == 0:
+ argname = 'noarg'
+ elif numargs == 1:
+ argname = 'arg0'
+ else:
+ argname = 'args'
+ prnt('static PyObject *')
+ prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
+ prnt('{')
+ #
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ prnt(' %s;' % type.get_c_name(' x%d' % i, context))
+ #
+ localvars = set()
+ freelines = set()
+ for type in tp.args:
+ self._extra_local_variables(type, localvars, freelines)
+ for decl in sorted(localvars):
+ prnt(' %s;' % (decl,))
+ #
+ if not isinstance(tp.result, model.VoidType):
+ result_code = 'result = '
+ context = 'result of %s' % name
+ prnt(' %s;' % tp.result.get_c_name(' result', context))
+ prnt(' PyObject *pyresult;')
+ else:
+ result_code = ''
+ #
+ if len(tp.args) > 1:
+ rng = range(len(tp.args))
+ for i in rng:
+ prnt(' PyObject *arg%d;' % i)
+ prnt()
+ prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % (
+ 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng])))
+ prnt(' return NULL;')
+ prnt()
+ #
+ for i, type in enumerate(tp.args):
+ self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
+ 'return NULL')
+ prnt()
+ #
+ prnt(' Py_BEGIN_ALLOW_THREADS')
+ prnt(' _cffi_restore_errno();')
+ prnt(' { %s%s(%s); }' % (
+ result_code, name,
+ ', '.join(['x%d' % i for i in range(len(tp.args))])))
+ prnt(' _cffi_save_errno();')
+ prnt(' Py_END_ALLOW_THREADS')
+ prnt()
+ #
+ prnt(' (void)self; /* unused */')
+ if numargs == 0:
+ prnt(' (void)noarg; /* unused */')
+ if result_code:
+ prnt(' pyresult = %s;' %
+ self._convert_expr_from_c(tp.result, 'result', 'result type'))
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' return pyresult;')
+ else:
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' Py_INCREF(Py_None);')
+ prnt(' return Py_None;')
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_function_method(self, tp, name):
+ if tp.ellipsis:
+ return
+ numargs = len(tp.args)
+ if numargs == 0:
+ meth = 'METH_NOARGS'
+ elif numargs == 1:
+ meth = 'METH_O'
+ else:
+ meth = 'METH_VARARGS'
+ self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
+
+ _loading_cpy_function = _loaded_noop
+
+ def _loaded_cpy_function(self, tp, name, module, library):
+ if tp.ellipsis:
+ return
+ func = getattr(module, name)
+ setattr(library, name, func)
+ self._types_of_builtin_functions[func] = tp
+
+ # ----------
+ # named structs
+
+ _generate_cpy_struct_collecttype = _generate_nothing
+ def _generate_cpy_struct_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'struct', name)
+ def _generate_cpy_struct_method(self, tp, name):
+ self._generate_struct_or_union_method(tp, 'struct', name)
+ def _loading_cpy_struct(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'struct', name, module)
+ def _loaded_cpy_struct(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ _generate_cpy_union_collecttype = _generate_nothing
+ def _generate_cpy_union_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'union', name)
+ def _generate_cpy_union_method(self, tp, name):
+ self._generate_struct_or_union_method(tp, 'union', name)
+ def _loading_cpy_union(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'union', name, module)
+ def _loaded_cpy_union(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ def _generate_struct_or_union_decl(self, tp, prefix, name):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ cname = ('%s %s' % (prefix, name)).strip()
+ #
+ prnt = self._prnt
+ prnt('static void %s(%s *p)' % (checkfuncname, cname))
+ prnt('{')
+ prnt(' /* only to generate compile-time warnings or errors */')
+ prnt(' (void)p;')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if (isinstance(ftype, model.PrimitiveType)
+ and ftype.is_integer_type()) or fbitsize >= 0:
+ # accept all integers, but complain on float or double
+ prnt(' (void)((p->%s) << 1);' % fname)
+ else:
+ # only accept exactly the type declared.
+ try:
+ prnt(' { %s = &p->%s; (void)tmp; }' % (
+ ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+ fname))
+ except VerificationError as e:
+ prnt(' /* %s */' % str(e)) # cannot verify it, ignore
+ prnt('}')
+ prnt('static PyObject *')
+ prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,))
+ prnt('{')
+ prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
+ prnt(' static Py_ssize_t nums[] = {')
+ prnt(' sizeof(%s),' % cname)
+ prnt(' offsetof(struct _cffi_aligncheck, y),')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ prnt(' offsetof(%s, %s),' % (cname, fname))
+ if isinstance(ftype, model.ArrayType) and ftype.length is None:
+ prnt(' 0, /* %s */' % ftype._get_c_name())
+ else:
+ prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
+ prnt(' -1')
+ prnt(' };')
+ prnt(' (void)self; /* unused */')
+ prnt(' (void)noarg; /* unused */')
+ prnt(' return _cffi_get_struct_layout(nums);')
+ prnt(' /* the next line is not executed, but compiled */')
+ prnt(' %s(0);' % (checkfuncname,))
+ prnt('}')
+ prnt()
+
+ def _generate_struct_or_union_method(self, tp, prefix, name):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
+ layoutfuncname))
+
+ def _loading_struct_or_union(self, tp, prefix, name, module):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ #
+ function = getattr(module, layoutfuncname)
+ layout = function()
+ if isinstance(tp, model.StructOrUnion) and tp.partial:
+ # use the function()'s sizes and offsets to guide the
+ # layout of the struct
+ totalsize = layout[0]
+ totalalignment = layout[1]
+ fieldofs = layout[2::2]
+ fieldsize = layout[3::2]
+ tp.force_flatten()
+ assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
+ tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
+ else:
+ cname = ('%s %s' % (prefix, name)).strip()
+ self._struct_pending_verification[tp] = layout, cname
+
+ def _loaded_struct_or_union(self, tp):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
+
+ if tp in self._struct_pending_verification:
+ # check that the layout sizes and offsets match the real ones
+ def check(realvalue, expectedvalue, msg):
+ if realvalue != expectedvalue:
+ raise VerificationError(
+ "%s (we have %d, but C compiler says %d)"
+ % (msg, expectedvalue, realvalue))
+ ffi = self.ffi
+ BStruct = ffi._get_cached_btype(tp)
+ layout, cname = self._struct_pending_verification.pop(tp)
+ check(layout[0], ffi.sizeof(BStruct), "wrong total size")
+ check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
+ i = 2
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ check(layout[i], ffi.offsetof(BStruct, fname),
+ "wrong offset for field %r" % (fname,))
+ if layout[i+1] != 0:
+ BField = ffi._get_cached_btype(ftype)
+ check(layout[i+1], ffi.sizeof(BField),
+ "wrong size for field %r" % (fname,))
+ i += 2
+ assert i == len(layout)
+
+ # ----------
+ # 'anonymous' declarations. These are produced for anonymous structs
+ # or unions; the 'name' is obtained by a typedef.
+
+ _generate_cpy_anonymous_collecttype = _generate_nothing
+
+ def _generate_cpy_anonymous_decl(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_cpy_enum_decl(tp, name, '')
+ else:
+ self._generate_struct_or_union_decl(tp, '', name)
+
+ def _generate_cpy_anonymous_method(self, tp, name):
+ if not isinstance(tp, model.EnumType):
+ self._generate_struct_or_union_method(tp, '', name)
+
+ def _loading_cpy_anonymous(self, tp, name, module):
+ if isinstance(tp, model.EnumType):
+ self._loading_cpy_enum(tp, name, module)
+ else:
+ self._loading_struct_or_union(tp, '', name, module)
+
+ def _loaded_cpy_anonymous(self, tp, name, module, **kwds):
+ if isinstance(tp, model.EnumType):
+ self._loaded_cpy_enum(tp, name, module, **kwds)
+ else:
+ self._loaded_struct_or_union(tp)
+
+ # ----------
+ # constants, likely declared with '#define'
+
+ def _generate_cpy_const(self, is_int, name, tp=None, category='const',
+ vartp=None, delayed=True, size_too=False,
+ check_value=None):
+ prnt = self._prnt
+ funcname = '_cffi_%s_%s' % (category, name)
+ prnt('static int %s(PyObject *lib)' % funcname)
+ prnt('{')
+ prnt(' PyObject *o;')
+ prnt(' int res;')
+ if not is_int:
+ prnt(' %s;' % (vartp or tp).get_c_name(' i', name))
+ else:
+ assert category == 'const'
+ #
+ if check_value is not None:
+ self._check_int_constant_value(name, check_value)
+ #
+ if not is_int:
+ if category == 'var':
+ realexpr = '&' + name
+ else:
+ realexpr = name
+ prnt(' i = (%s);' % (realexpr,))
+ prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i',
+ 'variable type'),))
+ assert delayed
+ else:
+ prnt(' o = _cffi_from_c_int_const(%s);' % name)
+ prnt(' if (o == NULL)')
+ prnt(' return -1;')
+ if size_too:
+ prnt(' {')
+ prnt(' PyObject *o1 = o;')
+ prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));'
+ % (name,))
+ prnt(' Py_DECREF(o1);')
+ prnt(' if (o == NULL)')
+ prnt(' return -1;')
+ prnt(' }')
+ prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name)
+ prnt(' Py_DECREF(o);')
+ prnt(' if (res < 0)')
+ prnt(' return -1;')
+ prnt(' return %s;' % self._chained_list_constants[delayed])
+ self._chained_list_constants[delayed] = funcname + '(lib)'
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_constant_collecttype(self, tp, name):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ if not is_int:
+ self._do_collect_type(tp)
+
+ def _generate_cpy_constant_decl(self, tp, name):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ self._generate_cpy_const(is_int, name, tp)
+
+ _generate_cpy_constant_method = _generate_nothing
+ _loading_cpy_constant = _loaded_noop
+ _loaded_cpy_constant = _loaded_noop
+
+ # ----------
+ # enums
+
+ def _check_int_constant_value(self, name, value, err_prefix=''):
+ prnt = self._prnt
+ if value <= 0:
+ prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
+ name, name, value))
+ else:
+ prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
+ name, name, value))
+ prnt(' char buf[64];')
+ prnt(' if ((%s) <= 0)' % name)
+ prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name)
+ prnt(' else')
+ prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
+ name)
+ prnt(' PyErr_Format(_cffi_VerificationError,')
+ prnt(' "%s%s has the real value %s, not %s",')
+ prnt(' "%s", "%s", buf, "%d");' % (
+ err_prefix, name, value))
+ prnt(' return -1;')
+ prnt(' }')
+
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
+ def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
+ if tp.partial:
+ for enumerator in tp.enumerators:
+ self._generate_cpy_const(True, enumerator, delayed=False)
+ return
+ #
+ funcname = self._enum_funcname(prefix, name)
+ prnt = self._prnt
+ prnt('static int %s(PyObject *lib)' % funcname)
+ prnt('{')
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ self._check_int_constant_value(enumerator, enumvalue,
+ "enum %s: " % name)
+ prnt(' return %s;' % self._chained_list_constants[True])
+ self._chained_list_constants[True] = funcname + '(lib)'
+ prnt('}')
+ prnt()
+
+ _generate_cpy_enum_collecttype = _generate_nothing
+ _generate_cpy_enum_method = _generate_nothing
+
+ def _loading_cpy_enum(self, tp, name, module):
+ if tp.partial:
+ enumvalues = [getattr(module, enumerator)
+ for enumerator in tp.enumerators]
+ tp.enumvalues = tuple(enumvalues)
+ tp.partial_resolved = True
+
+ def _loaded_cpy_enum(self, tp, name, module, library):
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ setattr(library, enumerator, enumvalue)
+
+ # ----------
+ # macros: for now only for integers
+
+ def _generate_cpy_macro_decl(self, tp, name):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ self._generate_cpy_const(True, name, check_value=check_value)
+
+ _generate_cpy_macro_collecttype = _generate_nothing
+ _generate_cpy_macro_method = _generate_nothing
+ _loading_cpy_macro = _loaded_noop
+ _loaded_cpy_macro = _loaded_noop
+
+ # ----------
+ # global variables
+
+ def _generate_cpy_variable_collecttype(self, tp, name):
+ if isinstance(tp, model.ArrayType):
+ tp_ptr = model.PointerType(tp.item)
+ else:
+ tp_ptr = model.PointerType(tp)
+ self._do_collect_type(tp_ptr)
+
+ def _generate_cpy_variable_decl(self, tp, name):
+ if isinstance(tp, model.ArrayType):
+ tp_ptr = model.PointerType(tp.item)
+ self._generate_cpy_const(False, name, tp, vartp=tp_ptr,
+ size_too = tp.length_is_unknown())
+ else:
+ tp_ptr = model.PointerType(tp)
+ self._generate_cpy_const(False, name, tp_ptr, category='var')
+
+ _generate_cpy_variable_method = _generate_nothing
+ _loading_cpy_variable = _loaded_noop
+
+ def _loaded_cpy_variable(self, tp, name, module, library):
+ value = getattr(library, name)
+ if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
+ # sense that "a=..." is forbidden
+ if tp.length_is_unknown():
+ assert isinstance(value, tuple)
+ (value, size) = value
+ BItemType = self.ffi._get_cached_btype(tp.item)
+ length, rest = divmod(size, self.ffi.sizeof(BItemType))
+ if rest != 0:
+ raise VerificationError(
+ "bad size: %r does not seem to be an array of %s" %
+ (name, tp.item))
+ tp = tp.resolve_length(length)
+ # 'value' is a which we have to replace with
+ # a if the N is actually known
+ if tp.length is not None:
+ BArray = self.ffi._get_cached_btype(tp)
+ value = self.ffi.cast(BArray, value)
+ setattr(library, name, value)
+ return
+ # remove ptr= from the library instance, and replace
+ # it by a property on the class, which reads/writes into ptr[0].
+ ptr = value
+ delattr(library, name)
+ def getter(library):
+ return ptr[0]
+ def setter(library, value):
+ ptr[0] = value
+ setattr(type(library), name, property(getter, setter))
+ type(library)._cffi_dir.append(name)
+
+ # ----------
+
+ def _generate_setup_custom(self):
+ prnt = self._prnt
+ prnt('static int _cffi_setup_custom(PyObject *lib)')
+ prnt('{')
+ prnt(' return %s;' % self._chained_list_constants[True])
+ prnt('}')
+
+cffimod_header = r'''
+#include
+#include
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+ and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+ typedef __int8 int_least8_t;
+ typedef __int16 int_least16_t;
+ typedef __int32 int_least32_t;
+ typedef __int64 int_least64_t;
+ typedef unsigned __int8 uint_least8_t;
+ typedef unsigned __int16 uint_least16_t;
+ typedef unsigned __int32 uint_least32_t;
+ typedef unsigned __int64 uint_least64_t;
+ typedef __int8 int_fast8_t;
+ typedef __int16 int_fast16_t;
+ typedef __int32 int_fast32_t;
+ typedef __int64 int_fast64_t;
+ typedef unsigned __int8 uint_fast8_t;
+ typedef unsigned __int16 uint_fast16_t;
+ typedef unsigned __int32 uint_fast32_t;
+ typedef unsigned __int64 uint_fast64_t;
+ typedef __int64 intmax_t;
+ typedef unsigned __int64 uintmax_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+# ifndef __cplusplus
+ typedef unsigned char _Bool;
+# endif
+# endif
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+# include
+# endif
+#endif
+
+#if PY_MAJOR_VERSION < 3
+# undef PyCapsule_CheckExact
+# undef PyCapsule_GetPointer
+# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
+# define PyCapsule_GetPointer(capsule, name) \
+ (PyCObject_AsVoidPtr(capsule))
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+# define PyInt_FromLong PyLong_FromLong
+#endif
+
+#define _cffi_from_c_double PyFloat_FromDouble
+#define _cffi_from_c_float PyFloat_FromDouble
+#define _cffi_from_c_long PyInt_FromLong
+#define _cffi_from_c_ulong PyLong_FromUnsignedLong
+#define _cffi_from_c_longlong PyLong_FromLongLong
+#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
+
+#define _cffi_to_c_double PyFloat_AsDouble
+#define _cffi_to_c_float PyFloat_AsDouble
+
+#define _cffi_from_c_int_const(x) \
+ (((x) > 0) ? \
+ ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \
+ ((long long)(x) >= (long long)LONG_MIN) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromLongLong((long long)(x)))
+
+#define _cffi_from_c_int(x, type) \
+ (((type)-1) > 0 ? /* unsigned */ \
+ (sizeof(type) < sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ sizeof(type) == sizeof(long) ? \
+ PyLong_FromUnsignedLong((unsigned long)x) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
+ (sizeof(type) <= sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ PyLong_FromLongLong((long long)x)))
+
+#define _cffi_to_c_int(o, type) \
+ ((type)( \
+ sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
+ : (type)_cffi_to_c_i8(o)) : \
+ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
+ : (type)_cffi_to_c_i16(o)) : \
+ sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
+ : (type)_cffi_to_c_i32(o)) : \
+ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
+ : (type)_cffi_to_c_i64(o)) : \
+ (Py_FatalError("unsupported size for type " #type), (type)0)))
+
+#define _cffi_to_c_i8 \
+ ((int(*)(PyObject *))_cffi_exports[1])
+#define _cffi_to_c_u8 \
+ ((int(*)(PyObject *))_cffi_exports[2])
+#define _cffi_to_c_i16 \
+ ((int(*)(PyObject *))_cffi_exports[3])
+#define _cffi_to_c_u16 \
+ ((int(*)(PyObject *))_cffi_exports[4])
+#define _cffi_to_c_i32 \
+ ((int(*)(PyObject *))_cffi_exports[5])
+#define _cffi_to_c_u32 \
+ ((unsigned int(*)(PyObject *))_cffi_exports[6])
+#define _cffi_to_c_i64 \
+ ((long long(*)(PyObject *))_cffi_exports[7])
+#define _cffi_to_c_u64 \
+ ((unsigned long long(*)(PyObject *))_cffi_exports[8])
+#define _cffi_to_c_char \
+ ((int(*)(PyObject *))_cffi_exports[9])
+#define _cffi_from_c_pointer \
+ ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
+#define _cffi_to_c_pointer \
+ ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
+#define _cffi_get_struct_layout \
+ ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12])
+#define _cffi_restore_errno \
+ ((void(*)(void))_cffi_exports[13])
+#define _cffi_save_errno \
+ ((void(*)(void))_cffi_exports[14])
+#define _cffi_from_c_char \
+ ((PyObject *(*)(char))_cffi_exports[15])
+#define _cffi_from_c_deref \
+ ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
+#define _cffi_to_c \
+ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
+#define _cffi_from_c_struct \
+ ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
+#define _cffi_to_c_wchar_t \
+ ((wchar_t(*)(PyObject *))_cffi_exports[19])
+#define _cffi_from_c_wchar_t \
+ ((PyObject *(*)(wchar_t))_cffi_exports[20])
+#define _cffi_to_c_long_double \
+ ((long double(*)(PyObject *))_cffi_exports[21])
+#define _cffi_to_c__Bool \
+ ((_Bool(*)(PyObject *))_cffi_exports[22])
+#define _cffi_prepare_pointer_call_argument \
+ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
+#define _cffi_convert_array_from_object \
+ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
+#define _CFFI_NUM_EXPORTS 25
+
+typedef struct _ctypedescr CTypeDescrObject;
+
+static void *_cffi_exports[_CFFI_NUM_EXPORTS];
+static PyObject *_cffi_types, *_cffi_VerificationError;
+
+static int _cffi_setup_custom(PyObject *lib); /* forward */
+
+static PyObject *_cffi_setup(PyObject *self, PyObject *args)
+{
+ PyObject *library;
+ int was_alive = (_cffi_types != NULL);
+ (void)self; /* unused */
+ if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError,
+ &library))
+ return NULL;
+ Py_INCREF(_cffi_types);
+ Py_INCREF(_cffi_VerificationError);
+ if (_cffi_setup_custom(library) < 0)
+ return NULL;
+ return PyBool_FromLong(was_alive);
+}
+
+union _cffi_union_alignment_u {
+ unsigned char m_char;
+ unsigned short m_short;
+ unsigned int m_int;
+ unsigned long m_long;
+ unsigned long long m_longlong;
+ float m_float;
+ double m_double;
+ long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+ struct _cffi_freeme_s *next;
+ union _cffi_union_alignment_u alignment;
+};
+
+#ifdef __GNUC__
+ __attribute__((unused))
+#endif
+static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg,
+ char **output_data, Py_ssize_t datasize,
+ struct _cffi_freeme_s **freeme)
+{
+ char *p;
+ if (datasize < 0)
+ return -1;
+
+ p = *output_data;
+ if (p == NULL) {
+ struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+ offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+ if (fp == NULL)
+ return -1;
+ fp->next = *freeme;
+ *freeme = fp;
+ p = *output_data = (char *)&fp->alignment;
+ }
+ memset((void *)p, 0, (size_t)datasize);
+ return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+#ifdef __GNUC__
+ __attribute__((unused))
+#endif
+static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+ do {
+ void *p = (void *)freeme;
+ freeme = freeme->next;
+ PyObject_Free(p);
+ } while (freeme != NULL);
+}
+
+static int _cffi_init(void)
+{
+ PyObject *module, *c_api_object = NULL;
+
+ module = PyImport_ImportModule("_cffi_backend");
+ if (module == NULL)
+ goto failure;
+
+ c_api_object = PyObject_GetAttrString(module, "_C_API");
+ if (c_api_object == NULL)
+ goto failure;
+ if (!PyCapsule_CheckExact(c_api_object)) {
+ PyErr_SetNone(PyExc_ImportError);
+ goto failure;
+ }
+ memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
+ _CFFI_NUM_EXPORTS * sizeof(void *));
+
+ Py_DECREF(module);
+ Py_DECREF(c_api_object);
+ return 0;
+
+ failure:
+ Py_XDECREF(module);
+ Py_XDECREF(c_api_object);
+ return -1;
+}
+
+#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
+
+/**********/
+'''
diff --git a/venv/Lib/site-packages/cffi/vengine_gen.py b/venv/Lib/site-packages/cffi/vengine_gen.py
new file mode 100644
index 000000000..26421526f
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/vengine_gen.py
@@ -0,0 +1,675 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, os
+import types
+
+from . import model
+from .error import VerificationError
+
+
+class VGenericEngine(object):
+ _class_key = 'g'
+ _gen_python_module = False
+
+ def __init__(self, verifier):
+ self.verifier = verifier
+ self.ffi = verifier.ffi
+ self.export_symbols = []
+ self._struct_pending_verification = {}
+
+ def patch_extension_kwds(self, kwds):
+ # add 'export_symbols' to the dictionary. Note that we add the
+ # list before filling it. When we fill it, it will thus also show
+ # up in kwds['export_symbols'].
+ kwds.setdefault('export_symbols', self.export_symbols)
+
+ def find_module(self, module_name, path, so_suffixes):
+ for so_suffix in so_suffixes:
+ basename = module_name + so_suffix
+ if path is None:
+ path = sys.path
+ for dirname in path:
+ filename = os.path.join(dirname, basename)
+ if os.path.isfile(filename):
+ return filename
+
+ def collect_types(self):
+ pass # not needed in the generic engine
+
+ def _prnt(self, what=''):
+ self._f.write(what + '\n')
+
+ def write_source_to_f(self):
+ prnt = self._prnt
+ # first paste some standard set of lines that are mostly '#include'
+ prnt(cffimod_header)
+ # then paste the C source given by the user, verbatim.
+ prnt(self.verifier.preamble)
+ #
+ # call generate_gen_xxx_decl(), for every xxx found from
+ # ffi._parser._declarations. This generates all the functions.
+ self._generate('decl')
+ #
+ # on Windows, distutils insists on putting init_cffi_xyz in
+ # 'export_symbols', so instead of fighting it, just give up and
+ # give it one
+ if sys.platform == 'win32':
+ if sys.version_info >= (3,):
+ prefix = 'PyInit_'
+ else:
+ prefix = 'init'
+ modname = self.verifier.get_module_name()
+ prnt("void %s%s(void) { }\n" % (prefix, modname))
+
+ def load_library(self, flags=0):
+ # import it with the CFFI backend
+ backend = self.ffi._backend
+ # needs to make a path that contains '/', on Posix
+ filename = os.path.join(os.curdir, self.verifier.modulefilename)
+ module = backend.load_library(filename, flags)
+ #
+ # call loading_gen_struct() to get the struct layout inferred by
+ # the C compiler
+ self._load(module, 'loading')
+
+ # build the FFILibrary class and instance, this is a module subclass
+ # because modules are expected to have usually-constant-attributes and
+ # in PyPy this means the JIT is able to treat attributes as constant,
+ # which we want.
+ class FFILibrary(types.ModuleType):
+ _cffi_generic_module = module
+ _cffi_ffi = self.ffi
+ _cffi_dir = []
+ def __dir__(self):
+ return FFILibrary._cffi_dir
+ library = FFILibrary("")
+ #
+ # finally, call the loaded_gen_xxx() functions. This will set
+ # up the 'library' object.
+ self._load(module, 'loaded', library=library)
+ return library
+
+ def _get_declarations(self):
+ lst = [(key, tp) for (key, (tp, qual)) in
+ self.ffi._parser._declarations.items()]
+ lst.sort()
+ return lst
+
+ def _generate(self, step_name):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ try:
+ method = getattr(self, '_generate_gen_%s_%s' % (kind,
+ step_name))
+ except AttributeError:
+ raise VerificationError(
+ "not implemented in verify(): %r" % name)
+ try:
+ method(tp, realname)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _load(self, module, step_name, **kwds):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ method = getattr(self, '_%s_gen_%s' % (step_name, kind))
+ try:
+ method(tp, realname, module, **kwds)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _generate_nothing(self, tp, name):
+ pass
+
+ def _loaded_noop(self, tp, name, module, **kwds):
+ pass
+
+ # ----------
+ # typedefs: generates no code so far
+
+ _generate_gen_typedef_decl = _generate_nothing
+ _loading_gen_typedef = _loaded_noop
+ _loaded_gen_typedef = _loaded_noop
+
+ # ----------
+ # function declarations
+
+ def _generate_gen_function_decl(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ # cannot support vararg functions better than this: check for its
+ # exact type (including the fixed arguments), and build it as a
+ # constant function pointer (no _cffi_f_%s wrapper)
+ self._generate_gen_const(False, name, tp)
+ return
+ prnt = self._prnt
+ numargs = len(tp.args)
+ argnames = []
+ for i, type in enumerate(tp.args):
+ indirection = ''
+ if isinstance(type, model.StructOrUnion):
+ indirection = '*'
+ argnames.append('%sx%d' % (indirection, i))
+ context = 'argument of %s' % name
+ arglist = [type.get_c_name(' %s' % arg, context)
+ for type, arg in zip(tp.args, argnames)]
+ tpresult = tp.result
+ if isinstance(tpresult, model.StructOrUnion):
+ arglist.insert(0, tpresult.get_c_name(' *r', context))
+ tpresult = model.void_type
+ arglist = ', '.join(arglist) or 'void'
+ wrappername = '_cffi_f_%s' % name
+ self.export_symbols.append(wrappername)
+ if tp.abi:
+ abi = tp.abi + ' '
+ else:
+ abi = ''
+ funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
+ context = 'result of %s' % name
+ prnt(tpresult.get_c_name(funcdecl, context))
+ prnt('{')
+ #
+ if isinstance(tp.result, model.StructOrUnion):
+ result_code = '*r = '
+ elif not isinstance(tp.result, model.VoidType):
+ result_code = 'return '
+ else:
+ result_code = ''
+ prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
+ prnt('}')
+ prnt()
+
+ _loading_gen_function = _loaded_noop
+
+ def _loaded_gen_function(self, tp, name, module, library):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ newfunction = self._load_constant(False, tp, name, module)
+ else:
+ indirections = []
+ base_tp = tp
+ if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
+ or isinstance(tp.result, model.StructOrUnion)):
+ indirect_args = []
+ for i, typ in enumerate(tp.args):
+ if isinstance(typ, model.StructOrUnion):
+ typ = model.PointerType(typ)
+ indirections.append((i, typ))
+ indirect_args.append(typ)
+ indirect_result = tp.result
+ if isinstance(indirect_result, model.StructOrUnion):
+ if indirect_result.fldtypes is None:
+ raise TypeError("'%s' is used as result type, "
+ "but is opaque" % (
+ indirect_result._get_c_name(),))
+ indirect_result = model.PointerType(indirect_result)
+ indirect_args.insert(0, indirect_result)
+ indirections.insert(0, ("result", indirect_result))
+ indirect_result = model.void_type
+ tp = model.FunctionPtrType(tuple(indirect_args),
+ indirect_result, tp.ellipsis)
+ BFunc = self.ffi._get_cached_btype(tp)
+ wrappername = '_cffi_f_%s' % name
+ newfunction = module.load_function(BFunc, wrappername)
+ for i, typ in indirections:
+ newfunction = self._make_struct_wrapper(newfunction, i, typ,
+ base_tp)
+ setattr(library, name, newfunction)
+ type(library)._cffi_dir.append(name)
+
+ def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
+ backend = self.ffi._backend
+ BType = self.ffi._get_cached_btype(tp)
+ if i == "result":
+ ffi = self.ffi
+ def newfunc(*args):
+ res = ffi.new(BType)
+ oldfunc(res, *args)
+ return res[0]
+ else:
+ def newfunc(*args):
+ args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
+ return oldfunc(*args)
+ newfunc._cffi_base_type = base_tp
+ return newfunc
+
+ # ----------
+ # named structs
+
+ def _generate_gen_struct_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'struct', name)
+
+ def _loading_gen_struct(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'struct', name, module)
+
+ def _loaded_gen_struct(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ def _generate_gen_union_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'union', name)
+
+ def _loading_gen_union(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'union', name, module)
+
+ def _loaded_gen_union(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ def _generate_struct_or_union_decl(self, tp, prefix, name):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ cname = ('%s %s' % (prefix, name)).strip()
+ #
+ prnt = self._prnt
+ prnt('static void %s(%s *p)' % (checkfuncname, cname))
+ prnt('{')
+ prnt(' /* only to generate compile-time warnings or errors */')
+ prnt(' (void)p;')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if (isinstance(ftype, model.PrimitiveType)
+ and ftype.is_integer_type()) or fbitsize >= 0:
+ # accept all integers, but complain on float or double
+ prnt(' (void)((p->%s) << 1);' % fname)
+ else:
+ # only accept exactly the type declared.
+ try:
+ prnt(' { %s = &p->%s; (void)tmp; }' % (
+ ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+ fname))
+ except VerificationError as e:
+ prnt(' /* %s */' % str(e)) # cannot verify it, ignore
+ prnt('}')
+ self.export_symbols.append(layoutfuncname)
+ prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
+ prnt('{')
+ prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
+ prnt(' static intptr_t nums[] = {')
+ prnt(' sizeof(%s),' % cname)
+ prnt(' offsetof(struct _cffi_aligncheck, y),')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ prnt(' offsetof(%s, %s),' % (cname, fname))
+ if isinstance(ftype, model.ArrayType) and ftype.length is None:
+ prnt(' 0, /* %s */' % ftype._get_c_name())
+ else:
+ prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
+ prnt(' -1')
+ prnt(' };')
+ prnt(' return nums[i];')
+ prnt(' /* the next line is not executed, but compiled */')
+ prnt(' %s(0);' % (checkfuncname,))
+ prnt('}')
+ prnt()
+
+ def _loading_struct_or_union(self, tp, prefix, name, module):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ #
+ BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
+ function = module.load_function(BFunc, layoutfuncname)
+ layout = []
+ num = 0
+ while True:
+ x = function(num)
+ if x < 0: break
+ layout.append(x)
+ num += 1
+ if isinstance(tp, model.StructOrUnion) and tp.partial:
+ # use the function()'s sizes and offsets to guide the
+ # layout of the struct
+ totalsize = layout[0]
+ totalalignment = layout[1]
+ fieldofs = layout[2::2]
+ fieldsize = layout[3::2]
+ tp.force_flatten()
+ assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
+ tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
+ else:
+ cname = ('%s %s' % (prefix, name)).strip()
+ self._struct_pending_verification[tp] = layout, cname
+
+ def _loaded_struct_or_union(self, tp):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
+
+ if tp in self._struct_pending_verification:
+ # check that the layout sizes and offsets match the real ones
+ def check(realvalue, expectedvalue, msg):
+ if realvalue != expectedvalue:
+ raise VerificationError(
+ "%s (we have %d, but C compiler says %d)"
+ % (msg, expectedvalue, realvalue))
+ ffi = self.ffi
+ BStruct = ffi._get_cached_btype(tp)
+ layout, cname = self._struct_pending_verification.pop(tp)
+ check(layout[0], ffi.sizeof(BStruct), "wrong total size")
+ check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
+ i = 2
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ check(layout[i], ffi.offsetof(BStruct, fname),
+ "wrong offset for field %r" % (fname,))
+ if layout[i+1] != 0:
+ BField = ffi._get_cached_btype(ftype)
+ check(layout[i+1], ffi.sizeof(BField),
+ "wrong size for field %r" % (fname,))
+ i += 2
+ assert i == len(layout)
+
+ # ----------
+ # 'anonymous' declarations. These are produced for anonymous structs
+ # or unions; the 'name' is obtained by a typedef.
+
+ def _generate_gen_anonymous_decl(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_gen_enum_decl(tp, name, '')
+ else:
+ self._generate_struct_or_union_decl(tp, '', name)
+
+ def _loading_gen_anonymous(self, tp, name, module):
+ if isinstance(tp, model.EnumType):
+ self._loading_gen_enum(tp, name, module, '')
+ else:
+ self._loading_struct_or_union(tp, '', name, module)
+
+ def _loaded_gen_anonymous(self, tp, name, module, **kwds):
+ if isinstance(tp, model.EnumType):
+ self._loaded_gen_enum(tp, name, module, **kwds)
+ else:
+ self._loaded_struct_or_union(tp)
+
+ # ----------
+ # constants, likely declared with '#define'
+
+ def _generate_gen_const(self, is_int, name, tp=None, category='const',
+ check_value=None):
+ prnt = self._prnt
+ funcname = '_cffi_%s_%s' % (category, name)
+ self.export_symbols.append(funcname)
+ if check_value is not None:
+ assert is_int
+ assert category == 'const'
+ prnt('int %s(char *out_error)' % funcname)
+ prnt('{')
+ self._check_int_constant_value(name, check_value)
+ prnt(' return 0;')
+ prnt('}')
+ elif is_int:
+ assert category == 'const'
+ prnt('int %s(long long *out_value)' % funcname)
+ prnt('{')
+ prnt(' *out_value = (long long)(%s);' % (name,))
+ prnt(' return (%s) <= 0;' % (name,))
+ prnt('}')
+ else:
+ assert tp is not None
+ assert check_value is None
+ if category == 'var':
+ ampersand = '&'
+ else:
+ ampersand = ''
+ extra = ''
+ if category == 'const' and isinstance(tp, model.StructOrUnion):
+ extra = 'const *'
+ ampersand = '&'
+ prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
+ prnt('{')
+ prnt(' return (%s%s);' % (ampersand, name))
+ prnt('}')
+ prnt()
+
+ def _generate_gen_constant_decl(self, tp, name):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ self._generate_gen_const(is_int, name, tp)
+
+ _loading_gen_constant = _loaded_noop
+
+ def _load_constant(self, is_int, tp, name, module, check_value=None):
+ funcname = '_cffi_const_%s' % name
+ if check_value is not None:
+ assert is_int
+ self._load_known_int_constant(module, funcname)
+ value = check_value
+ elif is_int:
+ BType = self.ffi._typeof_locked("long long*")[0]
+ BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
+ function = module.load_function(BFunc, funcname)
+ p = self.ffi.new(BType)
+ negative = function(p)
+ value = int(p[0])
+ if value < 0 and not negative:
+ BLongLong = self.ffi._typeof_locked("long long")[0]
+ value += (1 << (8*self.ffi.sizeof(BLongLong)))
+ else:
+ assert check_value is None
+ fntypeextra = '(*)(void)'
+ if isinstance(tp, model.StructOrUnion):
+ fntypeextra = '*' + fntypeextra
+ BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
+ function = module.load_function(BFunc, funcname)
+ value = function()
+ if isinstance(tp, model.StructOrUnion):
+ value = value[0]
+ return value
+
+ def _loaded_gen_constant(self, tp, name, module, library):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ value = self._load_constant(is_int, tp, name, module)
+ setattr(library, name, value)
+ type(library)._cffi_dir.append(name)
+
+ # ----------
+ # enums
+
+ def _check_int_constant_value(self, name, value):
+ prnt = self._prnt
+ if value <= 0:
+ prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
+ name, name, value))
+ else:
+ prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
+ name, name, value))
+ prnt(' char buf[64];')
+ prnt(' if ((%s) <= 0)' % name)
+ prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
+ prnt(' else')
+ prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
+ name)
+ prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
+ prnt(' "%s", buf, "%d");' % (name[:100], value))
+ prnt(' return -1;')
+ prnt(' }')
+
+ def _load_known_int_constant(self, module, funcname):
+ BType = self.ffi._typeof_locked("char[]")[0]
+ BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
+ function = module.load_function(BFunc, funcname)
+ p = self.ffi.new(BType, 256)
+ if function(p) < 0:
+ error = self.ffi.string(p)
+ if sys.version_info >= (3,):
+ error = str(error, 'utf-8')
+ raise VerificationError(error)
+
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
+ def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
+ if tp.partial:
+ for enumerator in tp.enumerators:
+ self._generate_gen_const(True, enumerator)
+ return
+ #
+ funcname = self._enum_funcname(prefix, name)
+ self.export_symbols.append(funcname)
+ prnt = self._prnt
+ prnt('int %s(char *out_error)' % funcname)
+ prnt('{')
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ self._check_int_constant_value(enumerator, enumvalue)
+ prnt(' return 0;')
+ prnt('}')
+ prnt()
+
+ def _loading_gen_enum(self, tp, name, module, prefix='enum'):
+ if tp.partial:
+ enumvalues = [self._load_constant(True, tp, enumerator, module)
+ for enumerator in tp.enumerators]
+ tp.enumvalues = tuple(enumvalues)
+ tp.partial_resolved = True
+ else:
+ funcname = self._enum_funcname(prefix, name)
+ self._load_known_int_constant(module, funcname)
+
+ def _loaded_gen_enum(self, tp, name, module, library):
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ setattr(library, enumerator, enumvalue)
+ type(library)._cffi_dir.append(enumerator)
+
+ # ----------
+ # macros: for now only for integers
+
+ def _generate_gen_macro_decl(self, tp, name):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ self._generate_gen_const(True, name, check_value=check_value)
+
+ _loading_gen_macro = _loaded_noop
+
+ def _loaded_gen_macro(self, tp, name, module, library):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ value = self._load_constant(True, tp, name, module,
+ check_value=check_value)
+ setattr(library, name, value)
+ type(library)._cffi_dir.append(name)
+
+ # ----------
+ # global variables
+
+ def _generate_gen_variable_decl(self, tp, name):
+ if isinstance(tp, model.ArrayType):
+ if tp.length_is_unknown():
+ prnt = self._prnt
+ funcname = '_cffi_sizeof_%s' % (name,)
+ self.export_symbols.append(funcname)
+ prnt("size_t %s(void)" % funcname)
+ prnt("{")
+ prnt(" return sizeof(%s);" % (name,))
+ prnt("}")
+ tp_ptr = model.PointerType(tp.item)
+ self._generate_gen_const(False, name, tp_ptr)
+ else:
+ tp_ptr = model.PointerType(tp)
+ self._generate_gen_const(False, name, tp_ptr, category='var')
+
+ _loading_gen_variable = _loaded_noop
+
+ def _loaded_gen_variable(self, tp, name, module, library):
+ if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
+ # sense that "a=..." is forbidden
+ if tp.length_is_unknown():
+ funcname = '_cffi_sizeof_%s' % (name,)
+ BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
+ function = module.load_function(BFunc, funcname)
+ size = function()
+ BItemType = self.ffi._get_cached_btype(tp.item)
+ length, rest = divmod(size, self.ffi.sizeof(BItemType))
+ if rest != 0:
+ raise VerificationError(
+ "bad size: %r does not seem to be an array of %s" %
+ (name, tp.item))
+ tp = tp.resolve_length(length)
+ tp_ptr = model.PointerType(tp.item)
+ value = self._load_constant(False, tp_ptr, name, module)
+ # 'value' is a which we have to replace with
+ # a if the N is actually known
+ if tp.length is not None:
+ BArray = self.ffi._get_cached_btype(tp)
+ value = self.ffi.cast(BArray, value)
+ setattr(library, name, value)
+ type(library)._cffi_dir.append(name)
+ return
+ # remove ptr= from the library instance, and replace
+ # it by a property on the class, which reads/writes into ptr[0].
+ funcname = '_cffi_var_%s' % name
+ BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
+ function = module.load_function(BFunc, funcname)
+ ptr = function()
+ def getter(library):
+ return ptr[0]
+ def setter(library, value):
+ ptr[0] = value
+ setattr(type(library), name, property(getter, setter))
+ type(library)._cffi_dir.append(name)
+
+cffimod_header = r'''
+#include
+#include
+#include
+#include
+#include /* XXX for ssize_t on some platforms */
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+ and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+ typedef __int8 int_least8_t;
+ typedef __int16 int_least16_t;
+ typedef __int32 int_least32_t;
+ typedef __int64 int_least64_t;
+ typedef unsigned __int8 uint_least8_t;
+ typedef unsigned __int16 uint_least16_t;
+ typedef unsigned __int32 uint_least32_t;
+ typedef unsigned __int64 uint_least64_t;
+ typedef __int8 int_fast8_t;
+ typedef __int16 int_fast16_t;
+ typedef __int32 int_fast32_t;
+ typedef __int64 int_fast64_t;
+ typedef unsigned __int8 uint_fast8_t;
+ typedef unsigned __int16 uint_fast16_t;
+ typedef unsigned __int32 uint_fast32_t;
+ typedef unsigned __int64 uint_fast64_t;
+ typedef __int64 intmax_t;
+ typedef unsigned __int64 uintmax_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+# ifndef __cplusplus
+ typedef unsigned char _Bool;
+# endif
+# endif
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+# include
+# endif
+#endif
+'''
diff --git a/venv/Lib/site-packages/cffi/verifier.py b/venv/Lib/site-packages/cffi/verifier.py
new file mode 100644
index 000000000..59b78c216
--- /dev/null
+++ b/venv/Lib/site-packages/cffi/verifier.py
@@ -0,0 +1,306 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, os, binascii, shutil, io
+from . import __version_verifier_modules__
+from . import ffiplatform
+from .error import VerificationError
+
+if sys.version_info >= (3, 3):
+ import importlib.machinery
+ def _extension_suffixes():
+ return importlib.machinery.EXTENSION_SUFFIXES[:]
+else:
+ import imp
+ def _extension_suffixes():
+ return [suffix for suffix, _, type in imp.get_suffixes()
+ if type == imp.C_EXTENSION]
+
+
+if sys.version_info >= (3,):
+ NativeIO = io.StringIO
+else:
+ class NativeIO(io.BytesIO):
+ def write(self, s):
+ if isinstance(s, unicode):
+ s = s.encode('ascii')
+ super(NativeIO, self).write(s)
+
+
+class Verifier(object):
+
+ def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
+ ext_package=None, tag='', force_generic_engine=False,
+ source_extension='.c', flags=None, relative_to=None, **kwds):
+ if ffi._parser._uses_new_feature:
+ raise VerificationError(
+ "feature not supported with ffi.verify(), but only "
+ "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
+ self.ffi = ffi
+ self.preamble = preamble
+ if not modulename:
+ flattened_kwds = ffiplatform.flatten(kwds)
+ vengine_class = _locate_engine_class(ffi, force_generic_engine)
+ self._vengine = vengine_class(self)
+ self._vengine.patch_extension_kwds(kwds)
+ self.flags = flags
+ self.kwds = self.make_relative_to(kwds, relative_to)
+ #
+ if modulename:
+ if tag:
+ raise TypeError("can't specify both 'modulename' and 'tag'")
+ else:
+ key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
+ preamble, flattened_kwds] +
+ ffi._cdefsources)
+ if sys.version_info >= (3,):
+ key = key.encode('utf-8')
+ k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
+ k1 = k1.lstrip('0x').rstrip('L')
+ k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
+ k2 = k2.lstrip('0').rstrip('L')
+ modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
+ k1, k2)
+ suffix = _get_so_suffixes()[0]
+ self.tmpdir = tmpdir or _caller_dir_pycache()
+ self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
+ self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
+ self.ext_package = ext_package
+ self._has_source = False
+ self._has_module = False
+
+ def write_source(self, file=None):
+ """Write the C source code. It is produced in 'self.sourcefilename',
+ which can be tweaked beforehand."""
+ with self.ffi._lock:
+ if self._has_source and file is None:
+ raise VerificationError(
+ "source code already written")
+ self._write_source(file)
+
+ def compile_module(self):
+ """Write the C source code (if not done already) and compile it.
+ This produces a dynamic link library in 'self.modulefilename'."""
+ with self.ffi._lock:
+ if self._has_module:
+ raise VerificationError("module already compiled")
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+
+ def load_library(self):
+ """Get a C module from this Verifier instance.
+ Returns an instance of a FFILibrary class that behaves like the
+ objects returned by ffi.dlopen(), but that delegates all
+ operations to the C module. If necessary, the C code is written
+ and compiled first.
+ """
+ with self.ffi._lock:
+ if not self._has_module:
+ self._locate_module()
+ if not self._has_module:
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+ return self._load_library()
+
+ def get_module_name(self):
+ basename = os.path.basename(self.modulefilename)
+ # kill both the .so extension and the other .'s, as introduced
+ # by Python 3: 'basename.cpython-33m.so'
+ basename = basename.split('.', 1)[0]
+ # and the _d added in Python 2 debug builds --- but try to be
+ # conservative and not kill a legitimate _d
+ if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
+ basename = basename[:-2]
+ return basename
+
+ def get_extension(self):
+ ffiplatform._hack_at_distutils() # backward compatibility hack
+ if not self._has_source:
+ with self.ffi._lock:
+ if not self._has_source:
+ self._write_source()
+ sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
+ modname = self.get_module_name()
+ return ffiplatform.get_extension(sourcename, modname, **self.kwds)
+
+ def generates_python_module(self):
+ return self._vengine._gen_python_module
+
+ def make_relative_to(self, kwds, relative_to):
+ if relative_to and os.path.dirname(relative_to):
+ dirname = os.path.dirname(relative_to)
+ kwds = kwds.copy()
+ for key in ffiplatform.LIST_OF_FILE_NAMES:
+ if key in kwds:
+ lst = kwds[key]
+ if not isinstance(lst, (list, tuple)):
+ raise TypeError("keyword '%s' should be a list or tuple"
+ % (key,))
+ lst = [os.path.join(dirname, fn) for fn in lst]
+ kwds[key] = lst
+ return kwds
+
+ # ----------
+
+ def _locate_module(self):
+ if not os.path.isfile(self.modulefilename):
+ if self.ext_package:
+ try:
+ pkg = __import__(self.ext_package, None, None, ['__doc__'])
+ except ImportError:
+ return # cannot import the package itself, give up
+ # (e.g. it might be called differently before installation)
+ path = pkg.__path__
+ else:
+ path = None
+ filename = self._vengine.find_module(self.get_module_name(), path,
+ _get_so_suffixes())
+ if filename is None:
+ return
+ self.modulefilename = filename
+ self._vengine.collect_types()
+ self._has_module = True
+
+ def _write_source_to(self, file):
+ self._vengine._f = file
+ try:
+ self._vengine.write_source_to_f()
+ finally:
+ del self._vengine._f
+
+ def _write_source(self, file=None):
+ if file is not None:
+ self._write_source_to(file)
+ else:
+ # Write our source file to an in memory file.
+ f = NativeIO()
+ self._write_source_to(f)
+ source_data = f.getvalue()
+
+ # Determine if this matches the current file
+ if os.path.exists(self.sourcefilename):
+ with open(self.sourcefilename, "r") as fp:
+ needs_written = not (fp.read() == source_data)
+ else:
+ needs_written = True
+
+ # Actually write the file out if it doesn't match
+ if needs_written:
+ _ensure_dir(self.sourcefilename)
+ with open(self.sourcefilename, "w") as fp:
+ fp.write(source_data)
+
+ # Set this flag
+ self._has_source = True
+
+ def _compile_module(self):
+ # compile this C source
+ tmpdir = os.path.dirname(self.sourcefilename)
+ outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
+ try:
+ same = ffiplatform.samefile(outputfilename, self.modulefilename)
+ except OSError:
+ same = False
+ if not same:
+ _ensure_dir(self.modulefilename)
+ shutil.move(outputfilename, self.modulefilename)
+ self._has_module = True
+
+ def _load_library(self):
+ assert self._has_module
+ if self.flags is not None:
+ return self._vengine.load_library(self.flags)
+ else:
+ return self._vengine.load_library()
+
+# ____________________________________________________________
+
+_FORCE_GENERIC_ENGINE = False # for tests
+
+def _locate_engine_class(ffi, force_generic_engine):
+ if _FORCE_GENERIC_ENGINE:
+ force_generic_engine = True
+ if not force_generic_engine:
+ if '__pypy__' in sys.builtin_module_names:
+ force_generic_engine = True
+ else:
+ try:
+ import _cffi_backend
+ except ImportError:
+ _cffi_backend = '?'
+ if ffi._backend is not _cffi_backend:
+ force_generic_engine = True
+ if force_generic_engine:
+ from . import vengine_gen
+ return vengine_gen.VGenericEngine
+ else:
+ from . import vengine_cpy
+ return vengine_cpy.VCPythonEngine
+
+# ____________________________________________________________
+
+_TMPDIR = None
+
+def _caller_dir_pycache():
+ if _TMPDIR:
+ return _TMPDIR
+ result = os.environ.get('CFFI_TMPDIR')
+ if result:
+ return result
+ filename = sys._getframe(2).f_code.co_filename
+ return os.path.abspath(os.path.join(os.path.dirname(filename),
+ '__pycache__'))
+
+def set_tmpdir(dirname):
+ """Set the temporary directory to use instead of __pycache__."""
+ global _TMPDIR
+ _TMPDIR = dirname
+
+def cleanup_tmpdir(tmpdir=None, keep_so=False):
+ """Clean up the temporary directory by removing all files in it
+ called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
+ tmpdir = tmpdir or _caller_dir_pycache()
+ try:
+ filelist = os.listdir(tmpdir)
+ except OSError:
+ return
+ if keep_so:
+ suffix = '.c' # only remove .c files
+ else:
+ suffix = _get_so_suffixes()[0].lower()
+ for fn in filelist:
+ if fn.lower().startswith('_cffi_') and (
+ fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
+ try:
+ os.unlink(os.path.join(tmpdir, fn))
+ except OSError:
+ pass
+ clean_dir = [os.path.join(tmpdir, 'build')]
+ for dir in clean_dir:
+ try:
+ for fn in os.listdir(dir):
+ fn = os.path.join(dir, fn)
+ if os.path.isdir(fn):
+ clean_dir.append(fn)
+ else:
+ os.unlink(fn)
+ except OSError:
+ pass
+
+def _get_so_suffixes():
+ suffixes = _extension_suffixes()
+ if not suffixes:
+ # bah, no C_EXTENSION available. Occurs on pypy without cpyext
+ if sys.platform == 'win32':
+ suffixes = [".pyd"]
+ else:
+ suffixes = [".so"]
+
+ return suffixes
+
+def _ensure_dir(filename):
+ dirname = os.path.dirname(filename)
+ if dirname and not os.path.isdir(dirname):
+ os.makedirs(dirname)
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/venv/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst
new file mode 100644
index 000000000..c0f044d84
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst
@@ -0,0 +1,70 @@
+Chardet: The Universal Character Encoding Detector
+--------------------------------------------------
+
+.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg
+ :alt: Build status
+ :target: https://travis-ci.org/chardet/chardet
+
+.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg
+ :target: https://coveralls.io/r/chardet/chardet
+
+.. image:: https://img.shields.io/pypi/v/chardet.svg
+ :target: https://warehouse.python.org/project/chardet/
+ :alt: Latest version on PyPI
+
+.. image:: https://img.shields.io/pypi/l/chardet.svg
+ :alt: License
+
+
+Detects
+ - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants)
+ - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese)
+ - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese)
+ - EUC-KR, ISO-2022-KR (Korean)
+ - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic)
+ - ISO-8859-5, windows-1251 (Bulgarian)
+ - ISO-8859-1, windows-1252 (Western European languages)
+ - ISO-8859-7, windows-1253 (Greek)
+ - ISO-8859-8, windows-1255 (Visual and Logical Hebrew)
+ - TIS-620 (Thai)
+
+.. note::
+ Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily
+ disabled until we can retrain the models.
+
+Requires Python 2.6, 2.7, or 3.3+.
+
+Installation
+------------
+
+Install from `PyPI `_::
+
+ pip install chardet
+
+Documentation
+-------------
+
+For users, docs are now available at https://chardet.readthedocs.io/.
+
+Command-line Tool
+-----------------
+
+chardet comes with a command-line script which reports on the encodings of one
+or more files::
+
+ % chardetect somefile someotherfile
+ somefile: windows-1252 with confidence 0.5
+ someotherfile: ascii with confidence 1.0
+
+About
+-----
+
+This is a continuation of Mark Pilgrim's excellent chardet. Previously, two
+versions needed to be maintained: one that supported python 2.x and one that
+supported python 3.x. We've recently merged with `Ian Cordasco `_'s
+`charade `_ fork, so now we have one
+coherent version that works for Python 2.6+.
+
+:maintainer: Dan Blanchard
+
+
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER b/venv/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/METADATA b/venv/Lib/site-packages/chardet-3.0.4.dist-info/METADATA
new file mode 100644
index 000000000..1427867ab
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/METADATA
@@ -0,0 +1,96 @@
+Metadata-Version: 2.0
+Name: chardet
+Version: 3.0.4
+Summary: Universal encoding detector for Python 2 and 3
+Home-page: https://github.com/chardet/chardet
+Author: Daniel Blanchard
+Author-email: dan.blanchard@gmail.com
+License: LGPL
+Keywords: encoding,i18n,xml
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Linguistic
+
+Chardet: The Universal Character Encoding Detector
+--------------------------------------------------
+
+.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg
+ :alt: Build status
+ :target: https://travis-ci.org/chardet/chardet
+
+.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg
+ :target: https://coveralls.io/r/chardet/chardet
+
+.. image:: https://img.shields.io/pypi/v/chardet.svg
+ :target: https://warehouse.python.org/project/chardet/
+ :alt: Latest version on PyPI
+
+.. image:: https://img.shields.io/pypi/l/chardet.svg
+ :alt: License
+
+
+Detects
+ - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants)
+ - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese)
+ - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese)
+ - EUC-KR, ISO-2022-KR (Korean)
+ - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic)
+ - ISO-8859-5, windows-1251 (Bulgarian)
+ - ISO-8859-1, windows-1252 (Western European languages)
+ - ISO-8859-7, windows-1253 (Greek)
+ - ISO-8859-8, windows-1255 (Visual and Logical Hebrew)
+ - TIS-620 (Thai)
+
+.. note::
+ Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily
+ disabled until we can retrain the models.
+
+Requires Python 2.6, 2.7, or 3.3+.
+
+Installation
+------------
+
+Install from `PyPI `_::
+
+ pip install chardet
+
+Documentation
+-------------
+
+For users, docs are now available at https://chardet.readthedocs.io/.
+
+Command-line Tool
+-----------------
+
+chardet comes with a command-line script which reports on the encodings of one
+or more files::
+
+ % chardetect somefile someotherfile
+ somefile: windows-1252 with confidence 0.5
+ someotherfile: ascii with confidence 1.0
+
+About
+-----
+
+This is a continuation of Mark Pilgrim's excellent chardet. Previously, two
+versions needed to be maintained: one that supported python 2.x and one that
+supported python 3.x. We've recently merged with `Ian Cordasco `_'s
+`charade `_ fork, so now we have one
+coherent version that works for Python 2.6+.
+
+:maintainer: Dan Blanchard
+
+
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/RECORD b/venv/Lib/site-packages/chardet-3.0.4.dist-info/RECORD
new file mode 100644
index 000000000..94aed0c2c
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/RECORD
@@ -0,0 +1,91 @@
+../../Scripts/chardetect.exe,sha256=ppzfbkr-fZFrimgadK1tlo-rInIap9DTd1NT2LxyYSw,97237
+chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174
+chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239
+chardet-3.0.4.dist-info/RECORD,,
+chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110
+chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60
+chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375
+chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8
+chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559
+chardet/__pycache__/__init__.cpython-36.pyc,,
+chardet/__pycache__/big5freq.cpython-36.pyc,,
+chardet/__pycache__/big5prober.cpython-36.pyc,,
+chardet/__pycache__/chardistribution.cpython-36.pyc,,
+chardet/__pycache__/charsetgroupprober.cpython-36.pyc,,
+chardet/__pycache__/charsetprober.cpython-36.pyc,,
+chardet/__pycache__/codingstatemachine.cpython-36.pyc,,
+chardet/__pycache__/compat.cpython-36.pyc,,
+chardet/__pycache__/cp949prober.cpython-36.pyc,,
+chardet/__pycache__/enums.cpython-36.pyc,,
+chardet/__pycache__/escprober.cpython-36.pyc,,
+chardet/__pycache__/escsm.cpython-36.pyc,,
+chardet/__pycache__/eucjpprober.cpython-36.pyc,,
+chardet/__pycache__/euckrfreq.cpython-36.pyc,,
+chardet/__pycache__/euckrprober.cpython-36.pyc,,
+chardet/__pycache__/euctwfreq.cpython-36.pyc,,
+chardet/__pycache__/euctwprober.cpython-36.pyc,,
+chardet/__pycache__/gb2312freq.cpython-36.pyc,,
+chardet/__pycache__/gb2312prober.cpython-36.pyc,,
+chardet/__pycache__/hebrewprober.cpython-36.pyc,,
+chardet/__pycache__/jisfreq.cpython-36.pyc,,
+chardet/__pycache__/jpcntx.cpython-36.pyc,,
+chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,,
+chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,,
+chardet/__pycache__/langgreekmodel.cpython-36.pyc,,
+chardet/__pycache__/langhebrewmodel.cpython-36.pyc,,
+chardet/__pycache__/langhungarianmodel.cpython-36.pyc,,
+chardet/__pycache__/langthaimodel.cpython-36.pyc,,
+chardet/__pycache__/langturkishmodel.cpython-36.pyc,,
+chardet/__pycache__/latin1prober.cpython-36.pyc,,
+chardet/__pycache__/mbcharsetprober.cpython-36.pyc,,
+chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,,
+chardet/__pycache__/mbcssm.cpython-36.pyc,,
+chardet/__pycache__/sbcharsetprober.cpython-36.pyc,,
+chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,,
+chardet/__pycache__/sjisprober.cpython-36.pyc,,
+chardet/__pycache__/universaldetector.cpython-36.pyc,,
+chardet/__pycache__/utf8prober.cpython-36.pyc,,
+chardet/__pycache__/version.cpython-36.pyc,,
+chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
+chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
+chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
+chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787
+chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
+chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+chardet/cli/__pycache__/__init__.cpython-36.pyc,,
+chardet/cli/__pycache__/chardetect.cpython-36.pyc,,
+chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738
+chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
+chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134
+chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
+chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
+chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
+chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
+chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
+chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
+chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
+chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
+chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
+chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
+chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
+chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
+chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
+chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
+chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839
+chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948
+chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688
+chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345
+chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592
+chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290
+chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102
+chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
+chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
+chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
+chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
+chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657
+chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546
+chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
+chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485
+chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
+chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL b/venv/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL
new file mode 100644
index 000000000..8b6dd1b5a
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.29.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/venv/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt
new file mode 100644
index 000000000..a884269e7
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+chardetect = chardet.cli.chardetect:main
+
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json b/venv/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json
new file mode 100644
index 000000000..8cdf02560
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json
@@ -0,0 +1 @@
+{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"}
\ No newline at end of file
diff --git a/venv/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt b/venv/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt
new file mode 100644
index 000000000..79236f25c
--- /dev/null
+++ b/venv/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+chardet
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__init__.py b/venv/Lib/site-packages/chardet/__init__.py
similarity index 55%
rename from venv/Lib/site-packages/requests/packages/chardet/__init__.py
rename to venv/Lib/site-packages/chardet/__init__.py
index 82c2a48d2..0f9f820ef 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/__init__.py
+++ b/venv/Lib/site-packages/chardet/__init__.py
@@ -15,18 +15,25 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-__version__ = "2.3.0"
-from sys import version_info
+
+from .compat import PY2, PY3
+from .universaldetector import UniversalDetector
+from .version import __version__, VERSION
-def detect(aBuf):
- if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
- (version_info >= (3, 0) and not isinstance(aBuf, bytes))):
- raise ValueError('Expected a bytes object, not a unicode object')
+def detect(byte_str):
+ """
+ Detect the encoding of the given byte string.
- from . import universaldetector
- u = universaldetector.UniversalDetector()
- u.reset()
- u.feed(aBuf)
- u.close()
- return u.result
+ :param byte_str: The byte sequence to examine.
+ :type byte_str: ``bytes`` or ``bytearray``
+ """
+ if not isinstance(byte_str, bytearray):
+ if not isinstance(byte_str, bytes):
+ raise TypeError('Expected object of type bytes or bytearray, got: '
+ '{0}'.format(type(byte_str)))
+ else:
+ byte_str = bytearray(byte_str)
+ detector = UniversalDetector()
+ detector.feed(byte_str)
+ return detector.close()
diff --git a/venv/Lib/site-packages/chardet/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..8fa125032
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/big5freq.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/big5freq.cpython-36.pyc
new file mode 100644
index 000000000..53ba5b2e9
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/big5freq.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/big5prober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/big5prober.cpython-36.pyc
new file mode 100644
index 000000000..2f39adf3a
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/big5prober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/chardistribution.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/chardistribution.cpython-36.pyc
new file mode 100644
index 000000000..ea4411763
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/chardistribution.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc
new file mode 100644
index 000000000..1de2f280e
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/charsetprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/charsetprober.cpython-36.pyc
new file mode 100644
index 000000000..07ee4db2e
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/charsetprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc
new file mode 100644
index 000000000..64e04708a
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/compat.cpython-36.pyc
new file mode 100644
index 000000000..983fa1864
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/compat.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/cp949prober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/cp949prober.cpython-36.pyc
new file mode 100644
index 000000000..e88df34b4
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/cp949prober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/enums.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/enums.cpython-36.pyc
new file mode 100644
index 000000000..10f3c1f81
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/enums.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/escprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/escprober.cpython-36.pyc
new file mode 100644
index 000000000..0eff5b93a
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/escprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/escsm.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/escsm.cpython-36.pyc
new file mode 100644
index 000000000..e8fe8e4ec
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/escsm.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/eucjpprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/eucjpprober.cpython-36.pyc
new file mode 100644
index 000000000..3b4aa70ec
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/eucjpprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/euckrfreq.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/euckrfreq.cpython-36.pyc
new file mode 100644
index 000000000..6c702a14b
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/euckrfreq.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/euckrprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/euckrprober.cpython-36.pyc
new file mode 100644
index 000000000..93a9ed1d6
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/euckrprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/euctwfreq.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/euctwfreq.cpython-36.pyc
similarity index 87%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/euctwfreq.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/euctwfreq.cpython-36.pyc
index 93d9105e9..9622e4de2 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/euctwfreq.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/euctwfreq.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/euctwprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/euctwprober.cpython-36.pyc
new file mode 100644
index 000000000..c6e0c8090
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/euctwprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/gb2312freq.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/gb2312freq.cpython-36.pyc
similarity index 54%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/gb2312freq.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/gb2312freq.cpython-36.pyc
index eef1c1baf..8727d8a6c 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/gb2312freq.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/gb2312freq.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/gb2312prober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/gb2312prober.cpython-36.pyc
new file mode 100644
index 000000000..71361390f
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/gb2312prober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/hebrewprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/hebrewprober.cpython-36.pyc
new file mode 100644
index 000000000..936617b47
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/hebrewprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/jisfreq.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/jisfreq.cpython-36.pyc
similarity index 52%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/jisfreq.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/jisfreq.cpython-36.pyc
index 54ecb7b34..f2e2048a9 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/jisfreq.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/jisfreq.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/jpcntx.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/jpcntx.cpython-36.pyc
new file mode 100644
index 000000000..771e64bfb
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/jpcntx.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc
similarity index 93%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc
index dade07f2d..9e55593d9 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc
similarity index 94%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc
index 349d1ed2d..91a8c209c 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc
similarity index 93%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc
index 243ae38c9..8111240e4 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc
similarity index 92%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc
index 59dfbeb04..532429e0a 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc
similarity index 95%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc
index 1c9cfc191..ea76bb2ea 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langthaimodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langthaimodel.cpython-36.pyc
similarity index 93%
rename from venv/Lib/site-packages/requests/packages/chardet/__pycache__/langthaimodel.cpython-36.pyc
rename to venv/Lib/site-packages/chardet/__pycache__/langthaimodel.cpython-36.pyc
index 76301be08..d34eadca4 100644
Binary files a/venv/Lib/site-packages/requests/packages/chardet/__pycache__/langthaimodel.cpython-36.pyc and b/venv/Lib/site-packages/chardet/__pycache__/langthaimodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/langturkishmodel.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/langturkishmodel.cpython-36.pyc
new file mode 100644
index 000000000..e20cb3b75
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/langturkishmodel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/latin1prober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/latin1prober.cpython-36.pyc
new file mode 100644
index 000000000..a938cfcb0
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/latin1prober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc
new file mode 100644
index 000000000..f6316ebbe
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc
new file mode 100644
index 000000000..4b7476872
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/mbcssm.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/mbcssm.cpython-36.pyc
new file mode 100644
index 000000000..8775b83c9
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/mbcssm.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc
new file mode 100644
index 000000000..3ddd2b4e2
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc
new file mode 100644
index 000000000..af6dfc587
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/sjisprober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/sjisprober.cpython-36.pyc
new file mode 100644
index 000000000..458332159
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/sjisprober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/universaldetector.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/universaldetector.cpython-36.pyc
new file mode 100644
index 000000000..2ff7c4ca1
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/universaldetector.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/utf8prober.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/utf8prober.cpython-36.pyc
new file mode 100644
index 000000000..b0231185a
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/utf8prober.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/chardet/__pycache__/version.cpython-36.pyc
new file mode 100644
index 000000000..46309c2d1
Binary files /dev/null and b/venv/Lib/site-packages/chardet/__pycache__/version.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/big5freq.py b/venv/Lib/site-packages/chardet/big5freq.py
new file mode 100644
index 000000000..38f32517a
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/big5freq.py
@@ -0,0 +1,386 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Big5 frequency table
+# by Taiwan's Mandarin Promotion Council
+#
+#
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+#Char to FreqOrder table
+BIG5_TABLE_SIZE = 5376
+
+BIG5_CHAR_TO_FREQ_ORDER = (
+ 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
+3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
+1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
+ 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
+3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
+4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
+5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
+ 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
+ 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
+ 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
+2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
+1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
+3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
+ 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
+3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
+2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
+ 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
+3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
+1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
+5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
+ 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
+5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
+1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
+ 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
+ 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
+3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
+3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
+ 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
+2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
+2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
+ 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
+ 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
+3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
+1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
+1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
+1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
+2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
+ 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
+4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
+1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
+5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
+2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
+ 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
+ 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
+ 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
+ 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
+5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
+ 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
+1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
+ 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
+ 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
+5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
+1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
+ 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
+3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
+4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
+3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
+ 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
+ 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
+1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
+4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
+3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
+3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
+2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
+5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
+3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
+5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
+1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
+2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
+1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
+ 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
+1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
+4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
+3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
+ 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
+ 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
+ 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
+2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
+5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
+1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
+2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
+1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
+1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
+5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
+5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
+5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
+3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
+4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
+4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
+2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
+5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
+3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
+ 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
+5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
+5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
+1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
+2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
+3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
+4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
+5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
+3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
+4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
+1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
+1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
+4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
+1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
+ 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
+1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
+1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
+3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
+ 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
+5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
+2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
+1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
+1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
+5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
+ 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
+4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
+ 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
+2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
+ 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
+1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
+1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
+ 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
+4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
+4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
+1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
+3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
+5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
+5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
+1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
+2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
+1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
+3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
+2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
+3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
+2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
+4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
+4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
+3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
+ 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
+3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
+ 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
+3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
+4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
+3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
+1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
+5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
+ 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
+5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
+1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
+ 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
+4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
+4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
+ 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
+2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
+2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
+3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
+1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
+4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
+2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
+1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
+1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
+2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
+3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
+1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
+5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
+1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
+4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
+1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
+ 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
+1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
+4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
+4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
+2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
+1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
+4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
+ 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
+5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
+2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
+3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
+4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
+ 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
+5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
+5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
+1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
+4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
+4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
+2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
+3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
+3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
+2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
+1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
+4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
+3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
+3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
+2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
+4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
+5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
+3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
+2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
+3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
+1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
+2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
+3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
+4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
+2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
+2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
+5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
+1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
+2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
+1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
+3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
+4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
+2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
+3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
+3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
+2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
+4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
+2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
+3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
+4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
+5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
+3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
+ 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
+1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
+4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
+1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
+4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
+5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
+ 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
+5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
+5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
+2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
+3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
+2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
+2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
+ 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
+1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
+4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
+3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
+3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
+ 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
+2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
+ 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
+2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
+4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
+1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
+4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
+1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
+3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
+ 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
+3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
+5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
+5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
+3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
+3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
+1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
+2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
+5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
+1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
+1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
+3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
+ 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
+1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
+4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
+5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
+2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
+3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
+ 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
+1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
+2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
+2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
+5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
+5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
+5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
+2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
+2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
+1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
+4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
+3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
+3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
+4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
+4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
+2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
+2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
+5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
+4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
+5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
+4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
+ 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
+ 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
+1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
+3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
+4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
+1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
+5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
+2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
+2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
+3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
+5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
+1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
+3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
+5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
+1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
+5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
+2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
+3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
+2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
+3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
+3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
+3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
+4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
+ 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
+2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
+4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
+3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
+5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
+1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
+5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
+ 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
+1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
+ 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
+4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
+1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
+4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
+1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
+ 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
+3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
+4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
+5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
+ 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
+3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
+ 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
+2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
+)
+
diff --git a/venv/Lib/site-packages/requests/packages/chardet/big5prober.py b/venv/Lib/site-packages/chardet/big5prober.py
similarity index 82%
rename from venv/Lib/site-packages/requests/packages/chardet/big5prober.py
rename to venv/Lib/site-packages/chardet/big5prober.py
index becce81e5..98f997012 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/big5prober.py
+++ b/venv/Lib/site-packages/chardet/big5prober.py
@@ -28,15 +28,20 @@
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
-from .mbcssm import Big5SMModel
+from .mbcssm import BIG5_SM_MODEL
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
- MultiByteCharSetProber.__init__(self)
- self._mCodingSM = CodingStateMachine(Big5SMModel)
- self._mDistributionAnalyzer = Big5DistributionAnalysis()
+ super(Big5Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
+ self.distribution_analyzer = Big5DistributionAnalysis()
self.reset()
- def get_charset_name(self):
+ @property
+ def charset_name(self):
return "Big5"
+
+ @property
+ def language(self):
+ return "Chinese"
diff --git a/venv/Lib/site-packages/requests/packages/chardet/chardistribution.py b/venv/Lib/site-packages/chardet/chardistribution.py
similarity index 61%
rename from venv/Lib/site-packages/requests/packages/chardet/chardistribution.py
rename to venv/Lib/site-packages/chardet/chardistribution.py
index 4e64a00be..c0395f4a4 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/chardistribution.py
+++ b/venv/Lib/site-packages/chardet/chardistribution.py
@@ -25,82 +25,84 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
+from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
-from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
+from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
-from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
+from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
-from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
+from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
-from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
+from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
-from .compat import wrap_ord
-
-ENOUGH_DATA_THRESHOLD = 1024
-SURE_YES = 0.99
-SURE_NO = 0.01
-MINIMUM_DATA_THRESHOLD = 3
-class CharDistributionAnalysis:
+class CharDistributionAnalysis(object):
+ ENOUGH_DATA_THRESHOLD = 1024
+ SURE_YES = 0.99
+ SURE_NO = 0.01
+ MINIMUM_DATA_THRESHOLD = 3
+
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
- self._mCharToFreqOrder = None
- self._mTableSize = None # Size of above table
+ self._char_to_freq_order = None
+ self._table_size = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
- self._mTypicalDistributionRatio = None
+ self.typical_distribution_ratio = None
+ self._done = None
+ self._total_chars = None
+ self._freq_chars = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
- self._mDone = False
- self._mTotalChars = 0 # Total characters encountered
+ self._done = False
+ self._total_chars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
- self._mFreqChars = 0
+ self._freq_chars = 0
- def feed(self, aBuf, aCharLen):
+ def feed(self, char, char_len):
"""feed a character with known length"""
- if aCharLen == 2:
+ if char_len == 2:
# we only care about 2-bytes character in our distribution analysis
- order = self.get_order(aBuf)
+ order = self.get_order(char)
else:
order = -1
if order >= 0:
- self._mTotalChars += 1
+ self._total_chars += 1
# order is valid
- if order < self._mTableSize:
- if 512 > self._mCharToFreqOrder[order]:
- self._mFreqChars += 1
+ if order < self._table_size:
+ if 512 > self._char_to_freq_order[order]:
+ self._freq_chars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
- if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
- return SURE_NO
+ if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
+ return self.SURE_NO
- if self._mTotalChars != self._mFreqChars:
- r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
- * self._mTypicalDistributionRatio))
- if r < SURE_YES:
+ if self._total_chars != self._freq_chars:
+ r = (self._freq_chars / ((self._total_chars - self._freq_chars)
+ * self.typical_distribution_ratio))
+ if r < self.SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
- return SURE_YES
+ return self.SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
- return self._mTotalChars > ENOUGH_DATA_THRESHOLD
+ return self._total_chars > self.ENOUGH_DATA_THRESHOLD
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
@@ -110,55 +112,55 @@ class CharDistributionAnalysis:
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
- CharDistributionAnalysis.__init__(self)
- self._mCharToFreqOrder = EUCTWCharToFreqOrder
- self._mTableSize = EUCTW_TABLE_SIZE
- self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
+ super(EUCTWDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
+ self._table_size = EUCTW_TABLE_SIZE
+ self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
- first_char = wrap_ord(aBuf[0])
+ first_char = byte_str[0]
if first_char >= 0xC4:
- return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
+ return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
- CharDistributionAnalysis.__init__(self)
- self._mCharToFreqOrder = EUCKRCharToFreqOrder
- self._mTableSize = EUCKR_TABLE_SIZE
- self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+ super(EUCKRDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
+ self._table_size = EUCKR_TABLE_SIZE
+ self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
- first_char = wrap_ord(aBuf[0])
+ first_char = byte_str[0]
if first_char >= 0xB0:
- return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
+ return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
- CharDistributionAnalysis.__init__(self)
- self._mCharToFreqOrder = GB2312CharToFreqOrder
- self._mTableSize = GB2312_TABLE_SIZE
- self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
+ super(GB2312DistributionAnalysis, self).__init__()
+ self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
+ self._table_size = GB2312_TABLE_SIZE
+ self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
- first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ first_char, second_char = byte_str[0], byte_str[1]
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
@@ -167,17 +169,17 @@ class GB2312DistributionAnalysis(CharDistributionAnalysis):
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
- CharDistributionAnalysis.__init__(self)
- self._mCharToFreqOrder = Big5CharToFreqOrder
- self._mTableSize = BIG5_TABLE_SIZE
- self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
+ super(Big5DistributionAnalysis, self).__init__()
+ self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
+ self._table_size = BIG5_TABLE_SIZE
+ self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
- first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ first_char, second_char = byte_str[0], byte_str[1]
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
@@ -189,17 +191,17 @@ class Big5DistributionAnalysis(CharDistributionAnalysis):
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
- CharDistributionAnalysis.__init__(self)
- self._mCharToFreqOrder = JISCharToFreqOrder
- self._mTableSize = JIS_TABLE_SIZE
- self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
+ super(SJISDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+ self._table_size = JIS_TABLE_SIZE
+ self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
- first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ first_char, second_char = byte_str[0], byte_str[1]
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
@@ -214,18 +216,18 @@ class SJISDistributionAnalysis(CharDistributionAnalysis):
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
- CharDistributionAnalysis.__init__(self)
- self._mCharToFreqOrder = JISCharToFreqOrder
- self._mTableSize = JIS_TABLE_SIZE
- self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
+ super(EUCJPDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+ self._table_size = JIS_TABLE_SIZE
+ self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
- char = wrap_ord(aBuf[0])
+ char = byte_str[0]
if char >= 0xA0:
- return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
+ return 94 * (char - 0xA1) + byte_str[1] - 0xa1
else:
return -1
diff --git a/venv/Lib/site-packages/chardet/charsetgroupprober.py b/venv/Lib/site-packages/chardet/charsetgroupprober.py
new file mode 100644
index 000000000..8b3738efd
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/charsetgroupprober.py
@@ -0,0 +1,106 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import ProbingState
+from .charsetprober import CharSetProber
+
+
+class CharSetGroupProber(CharSetProber):
+ def __init__(self, lang_filter=None):
+ super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
+ self._active_num = 0
+ self.probers = []
+ self._best_guess_prober = None
+
+ def reset(self):
+ super(CharSetGroupProber, self).reset()
+ self._active_num = 0
+ for prober in self.probers:
+ if prober:
+ prober.reset()
+ prober.active = True
+ self._active_num += 1
+ self._best_guess_prober = None
+
+ @property
+ def charset_name(self):
+ if not self._best_guess_prober:
+ self.get_confidence()
+ if not self._best_guess_prober:
+ return None
+ return self._best_guess_prober.charset_name
+
+ @property
+ def language(self):
+ if not self._best_guess_prober:
+ self.get_confidence()
+ if not self._best_guess_prober:
+ return None
+ return self._best_guess_prober.language
+
+ def feed(self, byte_str):
+ for prober in self.probers:
+ if not prober:
+ continue
+ if not prober.active:
+ continue
+ state = prober.feed(byte_str)
+ if not state:
+ continue
+ if state == ProbingState.FOUND_IT:
+ self._best_guess_prober = prober
+ return self.state
+ elif state == ProbingState.NOT_ME:
+ prober.active = False
+ self._active_num -= 1
+ if self._active_num <= 0:
+ self._state = ProbingState.NOT_ME
+ return self.state
+ return self.state
+
+ def get_confidence(self):
+ state = self.state
+ if state == ProbingState.FOUND_IT:
+ return 0.99
+ elif state == ProbingState.NOT_ME:
+ return 0.01
+ best_conf = 0.0
+ self._best_guess_prober = None
+ for prober in self.probers:
+ if not prober:
+ continue
+ if not prober.active:
+ self.logger.debug('%s not active', prober.charset_name)
+ continue
+ conf = prober.get_confidence()
+ self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf)
+ if best_conf < conf:
+ best_conf = conf
+ self._best_guess_prober = prober
+ if not self._best_guess_prober:
+ return 0.0
+ return best_conf
diff --git a/venv/Lib/site-packages/chardet/charsetprober.py b/venv/Lib/site-packages/chardet/charsetprober.py
new file mode 100644
index 000000000..eac4e5986
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/charsetprober.py
@@ -0,0 +1,145 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+import re
+
+from .enums import ProbingState
+
+
+class CharSetProber(object):
+
+ SHORTCUT_THRESHOLD = 0.95
+
+ def __init__(self, lang_filter=None):
+ self._state = None
+ self.lang_filter = lang_filter
+ self.logger = logging.getLogger(__name__)
+
+ def reset(self):
+ self._state = ProbingState.DETECTING
+
+ @property
+ def charset_name(self):
+ return None
+
+ def feed(self, buf):
+ pass
+
+ @property
+ def state(self):
+ return self._state
+
+ def get_confidence(self):
+ return 0.0
+
+ @staticmethod
+ def filter_high_byte_only(buf):
+ buf = re.sub(b'([\x00-\x7F])+', b' ', buf)
+ return buf
+
+ @staticmethod
+ def filter_international_words(buf):
+ """
+ We define three types of bytes:
+ alphabet: english alphabets [a-zA-Z]
+ international: international characters [\x80-\xFF]
+ marker: everything else [^a-zA-Z\x80-\xFF]
+
+ The input buffer can be thought to contain a series of words delimited
+ by markers. This function works to filter all words that contain at
+ least one international character. All contiguous sequences of markers
+ are replaced by a single space ascii character.
+
+ This filter applies to all scripts which do not use English characters.
+ """
+ filtered = bytearray()
+
+ # This regex expression filters out only words that have at-least one
+ # international character. The word may include one marker character at
+ # the end.
+ words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?',
+ buf)
+
+ for word in words:
+ filtered.extend(word[:-1])
+
+ # If the last character in the word is a marker, replace it with a
+ # space as markers shouldn't affect our analysis (they are used
+ # similarly across all languages and may thus have similar
+ # frequencies).
+ last_char = word[-1:]
+ if not last_char.isalpha() and last_char < b'\x80':
+ last_char = b' '
+ filtered.extend(last_char)
+
+ return filtered
+
+ @staticmethod
+ def filter_with_english_letters(buf):
+ """
+ Returns a copy of ``buf`` that retains only the sequences of English
+ alphabet and high byte characters that are not between <> characters.
+ Also retains English alphabet and high byte characters immediately
+ before occurrences of >.
+
+ This filter can be applied to all scripts which contain both English
+ characters and extended ASCII characters, but is currently only used by
+ ``Latin1Prober``.
+ """
+ filtered = bytearray()
+ in_tag = False
+ prev = 0
+
+ for curr in range(len(buf)):
+ # Slice here to get bytes instead of an int with Python 3
+ buf_char = buf[curr:curr + 1]
+ # Check if we're coming out of or entering an HTML tag
+ if buf_char == b'>':
+ in_tag = False
+ elif buf_char == b'<':
+ in_tag = True
+
+ # If current character is not extended-ASCII and not alphabetic...
+ if buf_char < b'\x80' and not buf_char.isalpha():
+ # ...and we're not in a tag
+ if curr > prev and not in_tag:
+ # Keep everything after last non-extended-ASCII,
+ # non-alphabetic character
+ filtered.extend(buf[prev:curr])
+ # Output a space to delimit stretch we kept
+ filtered.extend(b' ')
+ prev = curr + 1
+
+ # If we're not in a tag...
+ if not in_tag:
+ # Keep everything after last non-extended-ASCII, non-alphabetic
+ # character
+ filtered.extend(buf[prev:])
+
+ return filtered
diff --git a/venv/Lib/site-packages/chardet/cli/__init__.py b/venv/Lib/site-packages/chardet/cli/__init__.py
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/cli/__init__.py
@@ -0,0 +1 @@
+
diff --git a/venv/Lib/site-packages/chardet/cli/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/chardet/cli/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..c8138669e
Binary files /dev/null and b/venv/Lib/site-packages/chardet/cli/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/chardet/cli/__pycache__/chardetect.cpython-36.pyc b/venv/Lib/site-packages/chardet/cli/__pycache__/chardetect.cpython-36.pyc
new file mode 100644
index 000000000..99dd97317
Binary files /dev/null and b/venv/Lib/site-packages/chardet/cli/__pycache__/chardetect.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/packages/chardet/chardetect.py b/venv/Lib/site-packages/chardet/cli/chardetect.py
similarity index 83%
rename from venv/Lib/site-packages/requests/packages/chardet/chardetect.py
rename to venv/Lib/site-packages/chardet/cli/chardetect.py
index ffe892f25..f0a4cc5d7 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/chardetect.py
+++ b/venv/Lib/site-packages/chardet/cli/chardetect.py
@@ -17,9 +17,9 @@ from __future__ import absolute_import, print_function, unicode_literals
import argparse
import sys
-from io import open
from chardet import __version__
+from chardet.compat import PY2
from chardet.universaldetector import UniversalDetector
@@ -35,9 +35,15 @@ def description_of(lines, name='stdin'):
"""
u = UniversalDetector()
for line in lines:
+ line = bytearray(line)
u.feed(line)
+ # shortcut out of the loop to save reading further - particularly useful if we read a BOM.
+ if u.done:
+ break
u.close()
result = u.result
+ if PY2:
+ name = name.decode(sys.getfilesystemencoding(), 'ignore')
if result['encoding']:
return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
result['confidence'])
@@ -46,23 +52,22 @@ def description_of(lines, name='stdin'):
def main(argv=None):
- '''
+ """
Handles command line arguments and gets things started.
:param argv: List of arguments, as if specified on the command-line.
If None, ``sys.argv[1:]`` is used instead.
:type argv: list of str
- '''
+ """
# Get command line arguments
parser = argparse.ArgumentParser(
description="Takes one or more file paths and reports their detected \
- encodings",
- formatter_class=argparse.ArgumentDefaultsHelpFormatter,
- conflict_handler='resolve')
+ encodings")
parser.add_argument('input',
- help='File whose encoding we would like to determine.',
+ help='File whose encoding we would like to determine. \
+ (default: stdin)',
type=argparse.FileType('rb'), nargs='*',
- default=[sys.stdin])
+ default=[sys.stdin if PY2 else sys.stdin.buffer])
parser.add_argument('--version', action='version',
version='%(prog)s {0}'.format(__version__))
args = parser.parse_args(argv)
diff --git a/venv/Lib/site-packages/chardet/codingstatemachine.py b/venv/Lib/site-packages/chardet/codingstatemachine.py
new file mode 100644
index 000000000..68fba44f1
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/codingstatemachine.py
@@ -0,0 +1,88 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+
+from .enums import MachineState
+
+
+class CodingStateMachine(object):
+ """
+ A state machine to verify a byte sequence for a particular encoding. For
+ each byte the detector receives, it will feed that byte to every active
+ state machine available, one byte at a time. The state machine changes its
+ state based on its previous state and the byte it receives. There are 3
+ states in a state machine that are of interest to an auto-detector:
+
+ START state: This is the state to start with, or a legal byte sequence
+ (i.e. a valid code point) for character has been identified.
+
+ ME state: This indicates that the state machine identified a byte sequence
+ that is specific to the charset it is designed for and that
+ there is no other possible encoding which can contain this byte
+ sequence. This will to lead to an immediate positive answer for
+ the detector.
+
+ ERROR state: This indicates the state machine identified an illegal byte
+ sequence for that encoding. This will lead to an immediate
+ negative answer for this encoding. Detector will exclude this
+ encoding from consideration from here on.
+ """
+ def __init__(self, sm):
+ self._model = sm
+ self._curr_byte_pos = 0
+ self._curr_char_len = 0
+ self._curr_state = None
+ self.logger = logging.getLogger(__name__)
+ self.reset()
+
+ def reset(self):
+ self._curr_state = MachineState.START
+
+ def next_state(self, c):
+ # for each byte we get its class
+ # if it is first byte, we also get byte length
+ byte_class = self._model['class_table'][c]
+ if self._curr_state == MachineState.START:
+ self._curr_byte_pos = 0
+ self._curr_char_len = self._model['char_len_table'][byte_class]
+ # from byte's class and state_table, we get its next state
+ curr_state = (self._curr_state * self._model['class_factor']
+ + byte_class)
+ self._curr_state = self._model['state_table'][curr_state]
+ self._curr_byte_pos += 1
+ return self._curr_state
+
+ def get_current_charlen(self):
+ return self._curr_char_len
+
+ def get_coding_state_machine(self):
+ return self._model['name']
+
+ @property
+ def language(self):
+ return self._model['language']
diff --git a/venv/Lib/site-packages/requests/packages/chardet/compat.py b/venv/Lib/site-packages/chardet/compat.py
similarity index 85%
rename from venv/Lib/site-packages/requests/packages/chardet/compat.py
rename to venv/Lib/site-packages/chardet/compat.py
index d9e30addf..ddd74687c 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/compat.py
+++ b/venv/Lib/site-packages/chardet/compat.py
@@ -1,6 +1,7 @@
######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
-# Ian Cordasco - port to Python
+# Dan Blanchard
+# Ian Cordasco
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
@@ -22,13 +23,12 @@ import sys
if sys.version_info < (3, 0):
+ PY2 = True
+ PY3 = False
base_str = (str, unicode)
+ text_type = unicode
else:
+ PY2 = False
+ PY3 = True
base_str = (bytes, str)
-
-
-def wrap_ord(a):
- if sys.version_info < (3, 0) and isinstance(a, base_str):
- return ord(a)
- else:
- return a
+ text_type = str
diff --git a/venv/Lib/site-packages/requests/packages/chardet/cp949prober.py b/venv/Lib/site-packages/chardet/cp949prober.py
similarity index 83%
rename from venv/Lib/site-packages/requests/packages/chardet/cp949prober.py
rename to venv/Lib/site-packages/chardet/cp949prober.py
index ff4272f82..efd793abc 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/cp949prober.py
+++ b/venv/Lib/site-packages/chardet/cp949prober.py
@@ -25,20 +25,25 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
-from .mbcssm import CP949SMModel
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import CP949_SM_MODEL
class CP949Prober(MultiByteCharSetProber):
def __init__(self):
- MultiByteCharSetProber.__init__(self)
- self._mCodingSM = CodingStateMachine(CP949SMModel)
+ super(CP949Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
# NOTE: CP949 is a superset of EUC-KR, so the distribution should be
# not different.
- self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ self.distribution_analyzer = EUCKRDistributionAnalysis()
self.reset()
- def get_charset_name(self):
+ @property
+ def charset_name(self):
return "CP949"
+
+ @property
+ def language(self):
+ return "Korean"
diff --git a/venv/Lib/site-packages/chardet/enums.py b/venv/Lib/site-packages/chardet/enums.py
new file mode 100644
index 000000000..045120722
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/enums.py
@@ -0,0 +1,76 @@
+"""
+All of the Enums that are used throughout the chardet package.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+
+class InputState(object):
+ """
+ This enum represents the different states a universal detector can be in.
+ """
+ PURE_ASCII = 0
+ ESC_ASCII = 1
+ HIGH_BYTE = 2
+
+
+class LanguageFilter(object):
+ """
+ This enum represents the different language filters we can apply to a
+ ``UniversalDetector``.
+ """
+ CHINESE_SIMPLIFIED = 0x01
+ CHINESE_TRADITIONAL = 0x02
+ JAPANESE = 0x04
+ KOREAN = 0x08
+ NON_CJK = 0x10
+ ALL = 0x1F
+ CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
+ CJK = CHINESE | JAPANESE | KOREAN
+
+
+class ProbingState(object):
+ """
+ This enum represents the different states a prober can be in.
+ """
+ DETECTING = 0
+ FOUND_IT = 1
+ NOT_ME = 2
+
+
+class MachineState(object):
+ """
+ This enum represents the different states a state machine can be in.
+ """
+ START = 0
+ ERROR = 1
+ ITS_ME = 2
+
+
+class SequenceLikelihood(object):
+ """
+ This enum represents the likelihood of a character following the previous one.
+ """
+ NEGATIVE = 0
+ UNLIKELY = 1
+ LIKELY = 2
+ POSITIVE = 3
+
+ @classmethod
+ def get_num_categories(cls):
+ """:returns: The number of likelihood categories in the enum."""
+ return 4
+
+
+class CharacterCategory(object):
+ """
+ This enum represents the different categories language models for
+ ``SingleByteCharsetProber`` put characters into.
+
+ Anything less than CONTROL is considered a letter.
+ """
+ UNDEFINED = 255
+ LINE_BREAK = 254
+ SYMBOL = 253
+ DIGIT = 252
+ CONTROL = 251
diff --git a/venv/Lib/site-packages/chardet/escprober.py b/venv/Lib/site-packages/chardet/escprober.py
new file mode 100644
index 000000000..c70493f2b
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/escprober.py
@@ -0,0 +1,101 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .enums import LanguageFilter, ProbingState, MachineState
+from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
+ ISO2022KR_SM_MODEL)
+
+
+class EscCharSetProber(CharSetProber):
+ """
+ This CharSetProber uses a "code scheme" approach for detecting encodings,
+ whereby easily recognizable escape or shift sequences are relied on to
+ identify these encodings.
+ """
+
+ def __init__(self, lang_filter=None):
+ super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
+ self.coding_sm = []
+ if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
+ self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
+ self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
+ if self.lang_filter & LanguageFilter.JAPANESE:
+ self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
+ if self.lang_filter & LanguageFilter.KOREAN:
+ self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
+ self.active_sm_count = None
+ self._detected_charset = None
+ self._detected_language = None
+ self._state = None
+ self.reset()
+
+ def reset(self):
+ super(EscCharSetProber, self).reset()
+ for coding_sm in self.coding_sm:
+ if not coding_sm:
+ continue
+ coding_sm.active = True
+ coding_sm.reset()
+ self.active_sm_count = len(self.coding_sm)
+ self._detected_charset = None
+ self._detected_language = None
+
+ @property
+ def charset_name(self):
+ return self._detected_charset
+
+ @property
+ def language(self):
+ return self._detected_language
+
+ def get_confidence(self):
+ if self._detected_charset:
+ return 0.99
+ else:
+ return 0.00
+
+ def feed(self, byte_str):
+ for c in byte_str:
+ for coding_sm in self.coding_sm:
+ if not coding_sm or not coding_sm.active:
+ continue
+ coding_state = coding_sm.next_state(c)
+ if coding_state == MachineState.ERROR:
+ coding_sm.active = False
+ self.active_sm_count -= 1
+ if self.active_sm_count <= 0:
+ self._state = ProbingState.NOT_ME
+ return self.state
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ self._detected_charset = coding_sm.get_coding_state_machine()
+ self._detected_language = coding_sm.language
+ return self.state
+
+ return self.state
diff --git a/venv/Lib/site-packages/chardet/escsm.py b/venv/Lib/site-packages/chardet/escsm.py
new file mode 100644
index 000000000..0069523a0
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/escsm.py
@@ -0,0 +1,246 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import MachineState
+
+HZ_CLS = (
+1,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,0,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,4,0,5,2,0, # 78 - 7f
+1,1,1,1,1,1,1,1, # 80 - 87
+1,1,1,1,1,1,1,1, # 88 - 8f
+1,1,1,1,1,1,1,1, # 90 - 97
+1,1,1,1,1,1,1,1, # 98 - 9f
+1,1,1,1,1,1,1,1, # a0 - a7
+1,1,1,1,1,1,1,1, # a8 - af
+1,1,1,1,1,1,1,1, # b0 - b7
+1,1,1,1,1,1,1,1, # b8 - bf
+1,1,1,1,1,1,1,1, # c0 - c7
+1,1,1,1,1,1,1,1, # c8 - cf
+1,1,1,1,1,1,1,1, # d0 - d7
+1,1,1,1,1,1,1,1, # d8 - df
+1,1,1,1,1,1,1,1, # e0 - e7
+1,1,1,1,1,1,1,1, # e8 - ef
+1,1,1,1,1,1,1,1, # f0 - f7
+1,1,1,1,1,1,1,1, # f8 - ff
+)
+
+HZ_ST = (
+MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17
+ 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f
+ 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27
+ 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
+)
+
+HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+HZ_SM_MODEL = {'class_table': HZ_CLS,
+ 'class_factor': 6,
+ 'state_table': HZ_ST,
+ 'char_len_table': HZ_CHAR_LEN_TABLE,
+ 'name': "HZ-GB-2312",
+ 'language': 'Chinese'}
+
+ISO2022CN_CLS = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,3,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,4,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022CN_ST = (
+MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
+MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
+MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
+ 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
+)
+
+ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
+ 'class_factor': 9,
+ 'state_table': ISO2022CN_ST,
+ 'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
+ 'name': "ISO-2022-CN",
+ 'language': 'Chinese'}
+
+ISO2022JP_CLS = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,2,2, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,7,0,0,0, # 20 - 27
+3,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+6,0,4,0,8,0,0,0, # 40 - 47
+0,9,5,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022JP_ST = (
+MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
+MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
+MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
+)
+
+ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
+ 'class_factor': 10,
+ 'state_table': ISO2022JP_ST,
+ 'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
+ 'name': "ISO-2022-JP",
+ 'language': 'Japanese'}
+
+ISO2022KR_CLS = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,3,0,0,0, # 20 - 27
+0,4,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,5,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022KR_ST = (
+MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
+)
+
+ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
+ 'class_factor': 6,
+ 'state_table': ISO2022KR_ST,
+ 'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
+ 'name': "ISO-2022-KR",
+ 'language': 'Korean'}
+
+
diff --git a/venv/Lib/site-packages/chardet/eucjpprober.py b/venv/Lib/site-packages/chardet/eucjpprober.py
new file mode 100644
index 000000000..20ce8f7d1
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/eucjpprober.py
@@ -0,0 +1,92 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import ProbingState, MachineState
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCJPDistributionAnalysis
+from .jpcntx import EUCJPContextAnalysis
+from .mbcssm import EUCJP_SM_MODEL
+
+
+class EUCJPProber(MultiByteCharSetProber):
+ def __init__(self):
+ super(EUCJPProber, self).__init__()
+ self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
+ self.distribution_analyzer = EUCJPDistributionAnalysis()
+ self.context_analyzer = EUCJPContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ super(EUCJPProber, self).reset()
+ self.context_analyzer.reset()
+
+ @property
+ def charset_name(self):
+ return "EUC-JP"
+
+ @property
+ def language(self):
+ return "Japanese"
+
+ def feed(self, byte_str):
+ for i in range(len(byte_str)):
+ # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
+ coding_state = self.coding_sm.next_state(byte_str[i])
+ if coding_state == MachineState.ERROR:
+ self.logger.debug('%s %s prober hit error at byte %s',
+ self.charset_name, self.language, i)
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ char_len = self.coding_sm.get_current_charlen()
+ if i == 0:
+ self._last_char[1] = byte_str[0]
+ self.context_analyzer.feed(self._last_char, char_len)
+ self.distribution_analyzer.feed(self._last_char, char_len)
+ else:
+ self.context_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+ self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+
+ self._last_char[0] = byte_str[-1]
+
+ if self.state == ProbingState.DETECTING:
+ if (self.context_analyzer.got_enough_data() and
+ (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ context_conf = self.context_analyzer.get_confidence()
+ distrib_conf = self.distribution_analyzer.get_confidence()
+ return max(context_conf, distrib_conf)
diff --git a/venv/Lib/site-packages/chardet/euckrfreq.py b/venv/Lib/site-packages/chardet/euckrfreq.py
new file mode 100644
index 000000000..b68078cb9
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/euckrfreq.py
@@ -0,0 +1,195 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+
+# 128 --> 0.79
+# 256 --> 0.92
+# 512 --> 0.986
+# 1024 --> 0.99944
+# 2048 --> 0.99999
+#
+# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
+# Random Distribution Ration = 512 / (2350-512) = 0.279.
+#
+# Typical Distribution Ratio
+
+EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
+
+EUCKR_TABLE_SIZE = 2352
+
+# Char to FreqOrder table ,
+EUCKR_CHAR_TO_FREQ_ORDER = (
+ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
+1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
+1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
+ 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
+ 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
+ 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
+1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
+ 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
+ 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
+1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
+1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
+1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
+1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
+1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
+ 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
+1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
+1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
+1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
+1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
+ 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
+1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
+ 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
+ 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
+1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
+ 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
+1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
+ 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
+ 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
+1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
+1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
+1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
+1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
+ 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
+1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
+ 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
+ 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
+1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
+1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
+1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
+1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
+1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
+1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
+ 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
+ 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
+ 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
+1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
+ 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
+1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
+ 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
+ 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
+2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
+ 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
+ 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
+2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
+2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
+2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
+ 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
+ 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
+2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
+ 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
+1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
+2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
+1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
+2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
+2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
+1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
+ 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
+2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
+2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
+ 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
+ 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
+2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
+1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
+2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
+2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
+2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
+2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
+2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
+2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
+1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
+2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
+2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
+2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
+2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
+2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
+1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
+1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
+2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
+1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
+2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
+1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
+ 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
+2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
+ 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
+2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
+ 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
+2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
+2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
+ 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
+2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
+1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
+ 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
+1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
+2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
+1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
+2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
+ 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
+2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
+1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
+2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
+1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
+2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
+1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
+ 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
+2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
+2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
+ 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
+ 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
+1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
+1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
+ 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
+2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
+2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
+ 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
+ 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
+ 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
+2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
+ 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
+ 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
+2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
+2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
+ 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
+2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
+1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
+ 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
+2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
+2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
+2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
+ 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
+ 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
+ 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
+2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
+2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
+2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
+1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
+2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
+ 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
+)
+
diff --git a/venv/Lib/site-packages/requests/packages/chardet/euckrprober.py b/venv/Lib/site-packages/chardet/euckrprober.py
similarity index 82%
rename from venv/Lib/site-packages/requests/packages/chardet/euckrprober.py
rename to venv/Lib/site-packages/chardet/euckrprober.py
index 5982a46b6..345a060d0 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/euckrprober.py
+++ b/venv/Lib/site-packages/chardet/euckrprober.py
@@ -28,15 +28,20 @@
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
-from .mbcssm import EUCKRSMModel
+from .mbcssm import EUCKR_SM_MODEL
class EUCKRProber(MultiByteCharSetProber):
def __init__(self):
- MultiByteCharSetProber.__init__(self)
- self._mCodingSM = CodingStateMachine(EUCKRSMModel)
- self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ super(EUCKRProber, self).__init__()
+ self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
+ self.distribution_analyzer = EUCKRDistributionAnalysis()
self.reset()
- def get_charset_name(self):
+ @property
+ def charset_name(self):
return "EUC-KR"
+
+ @property
+ def language(self):
+ return "Korean"
diff --git a/venv/Lib/site-packages/requests/packages/chardet/euctwfreq.py b/venv/Lib/site-packages/chardet/euctwfreq.py
similarity index 66%
rename from venv/Lib/site-packages/requests/packages/chardet/euctwfreq.py
rename to venv/Lib/site-packages/chardet/euctwfreq.py
index 576e7504d..ed7a995a3 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/euctwfreq.py
+++ b/venv/Lib/site-packages/chardet/euctwfreq.py
@@ -44,385 +44,344 @@
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
-EUCTW_TABLE_SIZE = 8102
+EUCTW_TABLE_SIZE = 5376
-EUCTWCharToFreqOrder = (
- 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
-3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
-1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
- 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
-3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
-4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
-7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
- 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
- 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
- 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
-2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
-1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
-3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
- 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
-1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
-3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
-2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
- 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
-3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
-1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
-7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
- 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
-7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
-1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
- 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
- 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
-3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
-3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
- 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
-2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
-2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
- 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
- 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
-3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
-1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
-1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
-1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
-2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
- 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
-4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
-1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
-7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
-2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
- 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
- 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
- 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
- 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
-7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
- 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
-1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
- 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
- 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
-7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
-1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
- 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
-3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
-4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
-3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
- 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
- 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
-1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
-4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
-3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
-3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
-2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
-7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
-3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
-7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
-1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
-2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
-1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
- 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
-1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
-4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
-3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
- 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
- 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
- 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
-2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
-7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
-1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
-2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
-1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
-1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
-7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
-7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
-7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
-3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
-4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
-1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
-7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
-2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
-7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
-3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
-3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
-7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
-2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
-7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
- 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
-4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
-2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
-7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
-3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
-2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
-2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
- 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
-2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
-1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
-1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
-2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
-1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
-7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
-7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
-2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
-4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
-1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
-7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
- 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
-4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
- 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
-2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
- 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
-1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
-1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
- 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
-3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
-3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
-1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
-3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
-7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
-7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
-1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
-2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
-1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
-3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
-2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
-3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
-2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
-4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
-4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
-3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
- 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
-3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
- 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
-3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
-3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
-3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
-1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
-7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
- 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
-7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
-1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
- 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
-4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
-3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
- 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
-2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
-2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
-3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
-1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
-4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
-2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
-1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
-1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
-2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
-3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
-1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
-7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
-1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
-4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
-1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
- 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
-1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
-3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
-3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
-2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
-1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
-4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
- 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
-7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
-2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
-3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
-4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
- 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
-7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
-7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
-1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
-4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
-3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
-2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
-3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
-3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
-2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
-1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
-4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
-3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
-3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
-2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
-4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
-7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
-3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
-2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
-3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
-1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
-2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
-3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
-4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
-2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
-2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
-7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
-1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
-2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
-1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
-3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
-4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
-2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
-3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
-3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
-2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
-4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
-2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
-3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
-4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
-7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
-3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
- 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
-1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
-4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
-1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
-4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
-7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
- 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
-7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
-2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
-1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
-1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
-3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
- 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
- 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
- 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
-3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
-2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
- 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
-7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
-1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
-3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
-7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
-1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
-7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
-4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
-1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
-2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
-2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
-4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
- 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
- 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
-3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
-3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
-1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
-2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
-7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
-1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
-1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
-3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
- 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
-1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
-4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
-7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
-2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
-3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
- 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
-1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
-2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
-2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
-7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
-7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
-7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
-2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
-2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
-1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
-4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
-3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
-3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
-4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
-4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
-2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
-2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
-7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
-4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
-7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
-2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
-1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
-3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
-4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
-2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
- 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
-2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
-1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
-2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
-2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
-4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
-7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
-1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
-3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
-7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
-1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
-8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
-2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
-8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
-2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
-2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
-8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
-8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
-8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
- 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
-8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
-4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
-3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
-8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
-1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
-8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
- 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
-1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
- 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
-4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
-1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
-4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
-1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
- 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
-3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
-4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
-8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
- 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
-3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
- 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
-2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
-#Everything below is of no interest for detection purpose
-2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
-2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
-8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
-8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
-8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
-8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
-8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
-8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
-8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
-8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
-8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
-8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
-8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
-8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
-8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
-8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
-8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
-8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
-8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
-8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
-8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
-8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
-8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
-8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
-8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
-8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
-8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
-8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
-8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
-8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
-8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
-8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
-8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
-8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
-8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
-8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
-8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
-8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
-8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
-8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
+EUCTW_CHAR_TO_FREQ_ORDER = (
+ 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
+3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
+1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
+ 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
+3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
+4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
+7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
+ 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
+ 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
+ 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
+2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
+1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
+3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
+ 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
+3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
+2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
+ 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
+3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
+1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
+7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
+ 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
+7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
+1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
+ 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
+ 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
+3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
+3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
+ 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
+2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
+2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
+ 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
+ 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
+3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
+1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
+1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
+1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
+2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
+ 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
+4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
+1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
+7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
+2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
+ 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
+ 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
+ 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
+ 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
+7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
+ 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
+1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
+ 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
+ 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
+7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
+1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
+ 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
+3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
+4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
+3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
+ 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
+ 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
+1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
+4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
+3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
+3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
+2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
+7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
+3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
+7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
+1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
+2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
+1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
+ 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
+1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
+4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
+3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
+ 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
+ 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
+ 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
+2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
+7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
+1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
+2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
+1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
+1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
+7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
+7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
+7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
+3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
+4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
+1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
+7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
+2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
+7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
+3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
+3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
+7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
+2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
+7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
+ 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
+4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
+2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
+7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
+3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
+2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
+2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
+ 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
+2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
+1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
+1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
+2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
+1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
+7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
+7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
+2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
+4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
+1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
+7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
+ 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
+4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
+ 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
+2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
+ 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
+1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
+1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
+ 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
+3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
+3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
+1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
+3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
+7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
+7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
+1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
+2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
+1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
+3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
+2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
+3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
+2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
+4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
+4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
+3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
+ 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
+3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
+ 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
+3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
+3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
+3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
+1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
+7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
+ 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
+7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
+1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
+ 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
+4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
+3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
+ 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
+2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
+2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
+3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
+1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
+4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
+2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
+1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
+1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
+2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
+3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
+1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
+7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
+1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
+4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
+1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
+ 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
+1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
+3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
+3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
+2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
+1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
+4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
+ 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
+7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
+2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
+3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
+4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
+ 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
+7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
+7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
+1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
+4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
+3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
+2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
+3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
+3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
+2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
+1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
+4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
+3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
+3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
+2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
+4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
+7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
+3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
+2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
+3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
+1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
+2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
+3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
+4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
+2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
+2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
+7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
+1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
+2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
+1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
+3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
+4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
+2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
+3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
+3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
+2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
+4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
+2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
+3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
+4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
+7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
+3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
+ 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
+1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
+4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
+1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
+4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
+7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
+ 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
+7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
+2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
+1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
+1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
+3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
+ 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
+ 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
+ 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
+3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
+2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
+ 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
+7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
+1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
+3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
+7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
+1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
+7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
+4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
+1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
+2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
+2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
+4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
+ 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
+ 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
+3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
+3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
+1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
+2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
+7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
+1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
+1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
+3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
+ 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
+1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
+4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
+7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
+2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
+3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
+ 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
+1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
+2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
+2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
+7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
+7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
+7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
+2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
+2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
+1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
+4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
+3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
+3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
+4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
+4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
+2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
+2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
+7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
+4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
+7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
+2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
+1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
+3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
+4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
+2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
+ 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
+2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
+1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
+2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
+2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
+4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
+7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
+1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
+3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
+7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
+1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
+8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
+2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
+8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
+2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
+2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
+8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
+8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
+8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
+ 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
+8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
+4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
+3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
+8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
+1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
+8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
+ 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
+1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
+ 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
+4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
+1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
+4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
+1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
+ 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
+3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
+4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
+8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
+ 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
+3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
+ 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
+2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
+)
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/euctwprober.py b/venv/Lib/site-packages/chardet/euctwprober.py
similarity index 82%
rename from venv/Lib/site-packages/requests/packages/chardet/euctwprober.py
rename to venv/Lib/site-packages/chardet/euctwprober.py
index fe652fe37..35669cc4d 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/euctwprober.py
+++ b/venv/Lib/site-packages/chardet/euctwprober.py
@@ -13,12 +13,12 @@
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
-#
+#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
-#
+#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
@@ -28,14 +28,19 @@
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
-from .mbcssm import EUCTWSMModel
+from .mbcssm import EUCTW_SM_MODEL
class EUCTWProber(MultiByteCharSetProber):
def __init__(self):
- MultiByteCharSetProber.__init__(self)
- self._mCodingSM = CodingStateMachine(EUCTWSMModel)
- self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
+ super(EUCTWProber, self).__init__()
+ self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
+ self.distribution_analyzer = EUCTWDistributionAnalysis()
self.reset()
- def get_charset_name(self):
+ @property
+ def charset_name(self):
return "EUC-TW"
+
+ @property
+ def language(self):
+ return "Taiwan"
diff --git a/venv/Lib/site-packages/requests/packages/chardet/gb2312freq.py b/venv/Lib/site-packages/chardet/gb2312freq.py
similarity index 57%
rename from venv/Lib/site-packages/requests/packages/chardet/gb2312freq.py
rename to venv/Lib/site-packages/chardet/gb2312freq.py
index 1238f510f..697837bd9 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/gb2312freq.py
+++ b/venv/Lib/site-packages/chardet/gb2312freq.py
@@ -43,7 +43,7 @@ GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
-GB2312CharToFreqOrder = (
+GB2312_CHAR_TO_FREQ_ORDER = (
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
@@ -278,195 +278,6 @@ GB2312CharToFreqOrder = (
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
- 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
-#Everything below is of no interest for detection purpose
-5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
-5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
-5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
-3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
-4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
-5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
-5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
-4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
-4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
-4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
-4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
-3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
-6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
-4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
-6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
-4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
-4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
-4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
-5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
-3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
-4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
-3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
-4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
-4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
-6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
-6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
-5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
-4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
-6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
-4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
-5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
-5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
-5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
-6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
-3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
-6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
-4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
-5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
-6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
-6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
-4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
-5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
-4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
-5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
-5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
-4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
-4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
-5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
-4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
-4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
-5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
-4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
-4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
-4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
-5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
-5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
-4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
-3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
-4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
-6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
-5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
-5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
-4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
-6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
-5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
-6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
-4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
-5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
-5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
-3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
-5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
-6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
-4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
-6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
-4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
-4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
-6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
-3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
-6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
-4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
-3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
-3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
-3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
-4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
-2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
-5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
-4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
-5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
-5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
-5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
-4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
-5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
-4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
-5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
-1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
-3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
-4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
-4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
-6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
-4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
-5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
-3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
-5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
-5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
-5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
-3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
-5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
-5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
-3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
-5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
-5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
-5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
-6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
-4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
-6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
-4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
-3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
-4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
-5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
-5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
-5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
-3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
-3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
-6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
-6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
-5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
-6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
-6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
-6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
-6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
-6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
-5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
-6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
-6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
-3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
-3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
-4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
-4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
-3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
-5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
-5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
-5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
-5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
-5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
-4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
-5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
-6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
-5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
-4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
-4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
-6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
-3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
-4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
-4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
-5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
-6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
-6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
-4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
-6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
-5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
-5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
-5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
-5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
-5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
-4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
-5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
-5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
-5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
-5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
-6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
-4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
-5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
-4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
-4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
-6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
-4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
-6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
-3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
-5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
-6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
-6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
-6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
-5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
-6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
-6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
-3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
-5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
-4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
+ 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512
+)
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/gb2312prober.py b/venv/Lib/site-packages/chardet/gb2312prober.py
similarity index 81%
rename from venv/Lib/site-packages/requests/packages/chardet/gb2312prober.py
rename to venv/Lib/site-packages/chardet/gb2312prober.py
index 0325a2d86..8446d2dd9 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/gb2312prober.py
+++ b/venv/Lib/site-packages/chardet/gb2312prober.py
@@ -13,12 +13,12 @@
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
-#
+#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
-#
+#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
@@ -28,14 +28,19 @@
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
-from .mbcssm import GB2312SMModel
+from .mbcssm import GB2312_SM_MODEL
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
- MultiByteCharSetProber.__init__(self)
- self._mCodingSM = CodingStateMachine(GB2312SMModel)
- self._mDistributionAnalyzer = GB2312DistributionAnalysis()
+ super(GB2312Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
+ self.distribution_analyzer = GB2312DistributionAnalysis()
self.reset()
- def get_charset_name(self):
+ @property
+ def charset_name(self):
return "GB2312"
+
+ @property
+ def language(self):
+ return "Chinese"
diff --git a/venv/Lib/site-packages/requests/packages/chardet/hebrewprober.py b/venv/Lib/site-packages/chardet/hebrewprober.py
similarity index 76%
rename from venv/Lib/site-packages/requests/packages/chardet/hebrewprober.py
rename to venv/Lib/site-packages/chardet/hebrewprober.py
index ba225c5ef..b0e1bf492 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/hebrewprober.py
+++ b/venv/Lib/site-packages/chardet/hebrewprober.py
@@ -26,8 +26,7 @@
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
-from .constants import eNotMe, eDetecting
-from .compat import wrap_ord
+from .enums import ProbingState
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
@@ -126,56 +125,59 @@ from .compat import wrap_ord
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
-# windows-1255 / ISO-8859-8 code points of interest
-FINAL_KAF = 0xea
-NORMAL_KAF = 0xeb
-FINAL_MEM = 0xed
-NORMAL_MEM = 0xee
-FINAL_NUN = 0xef
-NORMAL_NUN = 0xf0
-FINAL_PE = 0xf3
-NORMAL_PE = 0xf4
-FINAL_TSADI = 0xf5
-NORMAL_TSADI = 0xf6
-
-# Minimum Visual vs Logical final letter score difference.
-# If the difference is below this, don't rely solely on the final letter score
-# distance.
-MIN_FINAL_CHAR_DISTANCE = 5
-
-# Minimum Visual vs Logical model score difference.
-# If the difference is below this, don't rely at all on the model score
-# distance.
-MIN_MODEL_DISTANCE = 0.01
-
-VISUAL_HEBREW_NAME = "ISO-8859-8"
-LOGICAL_HEBREW_NAME = "windows-1255"
-
-
class HebrewProber(CharSetProber):
+ # windows-1255 / ISO-8859-8 code points of interest
+ FINAL_KAF = 0xea
+ NORMAL_KAF = 0xeb
+ FINAL_MEM = 0xed
+ NORMAL_MEM = 0xee
+ FINAL_NUN = 0xef
+ NORMAL_NUN = 0xf0
+ FINAL_PE = 0xf3
+ NORMAL_PE = 0xf4
+ FINAL_TSADI = 0xf5
+ NORMAL_TSADI = 0xf6
+
+ # Minimum Visual vs Logical final letter score difference.
+ # If the difference is below this, don't rely solely on the final letter score
+ # distance.
+ MIN_FINAL_CHAR_DISTANCE = 5
+
+ # Minimum Visual vs Logical model score difference.
+ # If the difference is below this, don't rely at all on the model score
+ # distance.
+ MIN_MODEL_DISTANCE = 0.01
+
+ VISUAL_HEBREW_NAME = "ISO-8859-8"
+ LOGICAL_HEBREW_NAME = "windows-1255"
+
def __init__(self):
- CharSetProber.__init__(self)
- self._mLogicalProber = None
- self._mVisualProber = None
+ super(HebrewProber, self).__init__()
+ self._final_char_logical_score = None
+ self._final_char_visual_score = None
+ self._prev = None
+ self._before_prev = None
+ self._logical_prober = None
+ self._visual_prober = None
self.reset()
def reset(self):
- self._mFinalCharLogicalScore = 0
- self._mFinalCharVisualScore = 0
+ self._final_char_logical_score = 0
+ self._final_char_visual_score = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
- self._mPrev = ' '
- self._mBeforePrev = ' '
+ self._prev = ' '
+ self._before_prev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
- self._mLogicalProber = logicalProber
- self._mVisualProber = visualProber
+ self._logical_prober = logicalProber
+ self._visual_prober = visualProber
def is_final(self, c):
- return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
- FINAL_TSADI]
+ return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN,
+ self.FINAL_PE, self.FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
@@ -188,9 +190,10 @@ class HebrewProber(CharSetProber):
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
- return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
+ return c in [self.NORMAL_KAF, self.NORMAL_MEM,
+ self.NORMAL_NUN, self.NORMAL_PE]
- def feed(self, aBuf):
+ def feed(self, byte_str):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
@@ -217,67 +220,73 @@ class HebrewProber(CharSetProber):
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
- if self.get_state() == eNotMe:
+ if self.state == ProbingState.NOT_ME:
# Both model probers say it's not them. No reason to continue.
- return eNotMe
+ return ProbingState.NOT_ME
- aBuf = self.filter_high_bit_only(aBuf)
+ byte_str = self.filter_high_byte_only(byte_str)
- for cur in aBuf:
+ for cur in byte_str:
if cur == ' ':
# We stand on a space - a word just ended
- if self._mBeforePrev != ' ':
- # next-to-last char was not a space so self._mPrev is not a
+ if self._before_prev != ' ':
+ # next-to-last char was not a space so self._prev is not a
# 1 letter word
- if self.is_final(self._mPrev):
+ if self.is_final(self._prev):
# case (1) [-2:not space][-1:final letter][cur:space]
- self._mFinalCharLogicalScore += 1
- elif self.is_non_final(self._mPrev):
+ self._final_char_logical_score += 1
+ elif self.is_non_final(self._prev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
- self._mFinalCharVisualScore += 1
+ self._final_char_visual_score += 1
else:
# Not standing on a space
- if ((self._mBeforePrev == ' ') and
- (self.is_final(self._mPrev)) and (cur != ' ')):
+ if ((self._before_prev == ' ') and
+ (self.is_final(self._prev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
- self._mFinalCharVisualScore += 1
- self._mBeforePrev = self._mPrev
- self._mPrev = cur
+ self._final_char_visual_score += 1
+ self._before_prev = self._prev
+ self._prev = cur
# Forever detecting, till the end or until both model probers return
- # eNotMe (handled above)
- return eDetecting
+ # ProbingState.NOT_ME (handled above)
+ return ProbingState.DETECTING
- def get_charset_name(self):
+ @property
+ def charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
- finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
- if finalsub >= MIN_FINAL_CHAR_DISTANCE:
- return LOGICAL_HEBREW_NAME
- if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
- return VISUAL_HEBREW_NAME
+ finalsub = self._final_char_logical_score - self._final_char_visual_score
+ if finalsub >= self.MIN_FINAL_CHAR_DISTANCE:
+ return self.LOGICAL_HEBREW_NAME
+ if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE:
+ return self.VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
- modelsub = (self._mLogicalProber.get_confidence()
- - self._mVisualProber.get_confidence())
- if modelsub > MIN_MODEL_DISTANCE:
- return LOGICAL_HEBREW_NAME
- if modelsub < -MIN_MODEL_DISTANCE:
- return VISUAL_HEBREW_NAME
+ modelsub = (self._logical_prober.get_confidence()
+ - self._visual_prober.get_confidence())
+ if modelsub > self.MIN_MODEL_DISTANCE:
+ return self.LOGICAL_HEBREW_NAME
+ if modelsub < -self.MIN_MODEL_DISTANCE:
+ return self.VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
- return VISUAL_HEBREW_NAME
+ return self.VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
- return LOGICAL_HEBREW_NAME
+ return self.LOGICAL_HEBREW_NAME
- def get_state(self):
+ @property
+ def language(self):
+ return 'Hebrew'
+
+ @property
+ def state(self):
# Remain active as long as any of the model probers are active.
- if (self._mLogicalProber.get_state() == eNotMe) and \
- (self._mVisualProber.get_state() == eNotMe):
- return eNotMe
- return eDetecting
+ if (self._logical_prober.state == ProbingState.NOT_ME) and \
+ (self._visual_prober.state == ProbingState.NOT_ME):
+ return ProbingState.NOT_ME
+ return ProbingState.DETECTING
diff --git a/venv/Lib/site-packages/requests/packages/chardet/jisfreq.py b/venv/Lib/site-packages/chardet/jisfreq.py
similarity index 54%
rename from venv/Lib/site-packages/requests/packages/chardet/jisfreq.py
rename to venv/Lib/site-packages/chardet/jisfreq.py
index 064345b08..83fc082b5 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/jisfreq.py
+++ b/venv/Lib/site-packages/chardet/jisfreq.py
@@ -46,7 +46,7 @@ JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368
-JISCharToFreqOrder = (
+JIS_CHAR_TO_FREQ_ORDER = (
40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
@@ -320,250 +320,6 @@ JISCharToFreqOrder = (
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
-#Everything below is of no interest for detection purpose
-2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
-6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
-6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
-6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
-6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
-4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
-4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
-3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
-3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
-4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
-3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
-6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
-4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
-6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
-6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
-6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
-6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
-6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
-6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
-3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
-3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
-6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
-2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
-4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
-4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
-4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
-6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
-3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
-4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
-4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
-6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
-4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
-6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
-3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
-2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
-4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
-2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
-6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
-4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
-6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
-6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
-6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
-4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
-6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
-2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
-6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
-4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
-6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
-4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
-4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
-6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
-6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
-6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
-3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
-1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
-3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
-3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
-4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
-6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
-3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
-6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
-3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
-3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
-2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
-6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
-6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
-3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
-6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
-3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
-6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
-6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
-6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
-4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
-6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
-4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
-3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
-3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
-6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
-6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
-4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
-6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
-6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
-6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
-6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
-6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
-6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
-4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
-4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
-3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
-6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
-4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
-2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
-6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
-6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
-4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
-2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
-4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
-2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
-4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
-4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
-4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
-6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
-3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
-6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
-3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
-6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
-2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
-3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
-7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
-2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
-3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
-3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
-3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
-3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
-7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
-7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
-7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
-7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
-7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
-4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
-3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
-3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
-4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
-3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
-3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
-7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
-4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
-7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
-7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
-7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
-7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
-7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
-4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
-4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
-7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
-3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
-4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
-7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
-7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
-4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
-3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
-3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
-7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
-4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
-4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
-4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
-4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
-4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
-4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
-7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
-7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
-7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
-7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
-7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
-2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
-3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
-7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
-7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
-3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
-4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
-3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
-3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
-2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
-7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
-7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
-4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
-3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
-3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
-7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
-7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
-7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
-4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
-7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
-2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
-3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
-4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
-7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
-4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
-4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
-7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
-7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
-5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
-7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
-7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
-7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
-7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
-7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
-5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
-5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
-7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
-3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
-7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
-7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
-3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
-7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
-7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
-1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
-3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
-4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
-2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
-3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
-2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
-5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
-4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
-4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
-5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
-7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
-7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
-7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
-7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
-3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
-7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
-3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
-7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
-4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
-7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
-7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
-7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
-7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
-7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
-7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
-7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
-7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
-7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
-7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
-7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
-7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
-8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
-8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
-8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
-8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
-8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
-8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
-8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
-8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
-8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
-8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
-8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
-8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
-8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
-8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
-8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
-8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
-8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
+)
+
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/jpcntx.py b/venv/Lib/site-packages/chardet/jpcntx.py
similarity index 87%
rename from venv/Lib/site-packages/requests/packages/chardet/jpcntx.py
rename to venv/Lib/site-packages/chardet/jpcntx.py
index 59aeb6a87..20044e4bc 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/jpcntx.py
+++ b/venv/Lib/site-packages/chardet/jpcntx.py
@@ -25,13 +25,6 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from .compat import wrap_ord
-
-NUM_OF_CATEGORY = 6
-DONT_KNOW = -1
-ENOUGH_REL_THRESHOLD = 100
-MAX_REL_THRESHOLD = 1000
-MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
@@ -120,24 +113,35 @@ jp2CharContext = (
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
-class JapaneseContextAnalysis:
+class JapaneseContextAnalysis(object):
+ NUM_OF_CATEGORY = 6
+ DONT_KNOW = -1
+ ENOUGH_REL_THRESHOLD = 100
+ MAX_REL_THRESHOLD = 1000
+ MINIMUM_DATA_THRESHOLD = 4
+
def __init__(self):
+ self._total_rel = None
+ self._rel_sample = None
+ self._need_to_skip_char_num = None
+ self._last_char_order = None
+ self._done = None
self.reset()
def reset(self):
- self._mTotalRel = 0 # total sequence received
- # category counters, each interger counts sequence in its category
- self._mRelSample = [0] * NUM_OF_CATEGORY
+ self._total_rel = 0 # total sequence received
+ # category counters, each integer counts sequence in its category
+ self._rel_sample = [0] * self.NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
- self._mNeedToSkipCharNum = 0
- self._mLastCharOrder = -1 # The order of previous char
+ self._need_to_skip_char_num = 0
+ self._last_char_order = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
- self._mDone = False
+ self._done = False
- def feed(self, aBuf, aLen):
- if self._mDone:
+ def feed(self, byte_str, num_bytes):
+ if self._done:
return
# The buffer we got is byte oriented, and a character may span in more than one
@@ -147,81 +151,83 @@ class JapaneseContextAnalysis:
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
- i = self._mNeedToSkipCharNum
- while i < aLen:
- order, charLen = self.get_order(aBuf[i:i + 2])
- i += charLen
- if i > aLen:
- self._mNeedToSkipCharNum = i - aLen
- self._mLastCharOrder = -1
+ i = self._need_to_skip_char_num
+ while i < num_bytes:
+ order, char_len = self.get_order(byte_str[i:i + 2])
+ i += char_len
+ if i > num_bytes:
+ self._need_to_skip_char_num = i - num_bytes
+ self._last_char_order = -1
else:
- if (order != -1) and (self._mLastCharOrder != -1):
- self._mTotalRel += 1
- if self._mTotalRel > MAX_REL_THRESHOLD:
- self._mDone = True
+ if (order != -1) and (self._last_char_order != -1):
+ self._total_rel += 1
+ if self._total_rel > self.MAX_REL_THRESHOLD:
+ self._done = True
break
- self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
- self._mLastCharOrder = order
+ self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1
+ self._last_char_order = order
def got_enough_data(self):
- return self._mTotalRel > ENOUGH_REL_THRESHOLD
+ return self._total_rel > self.ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
- if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
- return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
+ if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
+ return (self._total_rel - self._rel_sample[0]) / self._total_rel
else:
- return DONT_KNOW
+ return self.DONT_KNOW
- def get_order(self, aBuf):
+ def get_order(self, byte_str):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
- self.charset_name = "SHIFT_JIS"
+ super(SJISContextAnalysis, self).__init__()
+ self._charset_name = "SHIFT_JIS"
- def get_charset_name(self):
- return self.charset_name
+ @property
+ def charset_name(self):
+ return self._charset_name
- def get_order(self, aBuf):
- if not aBuf:
+ def get_order(self, byte_str):
+ if not byte_str:
return -1, 1
# find out current char's byte length
- first_char = wrap_ord(aBuf[0])
- if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
- charLen = 2
+ first_char = byte_str[0]
+ if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
+ char_len = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
- self.charset_name = "CP932"
+ self._charset_name = "CP932"
else:
- charLen = 1
+ char_len = 1
# return its order if it is hiragana
- if len(aBuf) > 1:
- second_char = wrap_ord(aBuf[1])
+ if len(byte_str) > 1:
+ second_char = byte_str[1]
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
- return second_char - 0x9F, charLen
+ return second_char - 0x9F, char_len
- return -1, charLen
+ return -1, char_len
class EUCJPContextAnalysis(JapaneseContextAnalysis):
- def get_order(self, aBuf):
- if not aBuf:
+ def get_order(self, byte_str):
+ if not byte_str:
return -1, 1
# find out current char's byte length
- first_char = wrap_ord(aBuf[0])
+ first_char = byte_str[0]
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
- charLen = 2
+ char_len = 2
elif first_char == 0x8F:
- charLen = 3
+ char_len = 3
else:
- charLen = 1
+ char_len = 1
# return its order if it is hiragana
- if len(aBuf) > 1:
- second_char = wrap_ord(aBuf[1])
+ if len(byte_str) > 1:
+ second_char = byte_str[1]
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
- return second_char - 0xA1, charLen
+ return second_char - 0xA1, char_len
+
+ return -1, char_len
- return -1, charLen
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/langbulgarianmodel.py b/venv/Lib/site-packages/chardet/langbulgarianmodel.py
similarity index 96%
rename from venv/Lib/site-packages/requests/packages/chardet/langbulgarianmodel.py
rename to venv/Lib/site-packages/chardet/langbulgarianmodel.py
index e5788fc64..2aa4fb2e2 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/langbulgarianmodel.py
+++ b/venv/Lib/site-packages/chardet/langbulgarianmodel.py
@@ -210,20 +210,19 @@ BulgarianLangModel = (
)
Latin5BulgarianModel = {
- 'charToOrderMap': Latin5_BulgarianCharToOrderMap,
- 'precedenceMatrix': BulgarianLangModel,
- 'mTypicalPositiveRatio': 0.969392,
- 'keepEnglishLetter': False,
- 'charsetName': "ISO-8859-5"
+ 'char_to_order_map': Latin5_BulgarianCharToOrderMap,
+ 'precedence_matrix': BulgarianLangModel,
+ 'typical_positive_ratio': 0.969392,
+ 'keep_english_letter': False,
+ 'charset_name': "ISO-8859-5",
+ 'language': 'Bulgairan',
}
Win1251BulgarianModel = {
- 'charToOrderMap': win1251BulgarianCharToOrderMap,
- 'precedenceMatrix': BulgarianLangModel,
- 'mTypicalPositiveRatio': 0.969392,
- 'keepEnglishLetter': False,
- 'charsetName': "windows-1251"
+ 'char_to_order_map': win1251BulgarianCharToOrderMap,
+ 'precedence_matrix': BulgarianLangModel,
+ 'typical_positive_ratio': 0.969392,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1251",
+ 'language': 'Bulgarian',
}
-
-
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/langcyrillicmodel.py b/venv/Lib/site-packages/chardet/langcyrillicmodel.py
similarity index 91%
rename from venv/Lib/site-packages/requests/packages/chardet/langcyrillicmodel.py
rename to venv/Lib/site-packages/chardet/langcyrillicmodel.py
index a86f54bd5..e5f9a1fd1 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/langcyrillicmodel.py
+++ b/venv/Lib/site-packages/chardet/langcyrillicmodel.py
@@ -27,7 +27,7 @@
# KOI8-R language model
# Character Mapping Table:
-KOI8R_CharToOrderMap = (
+KOI8R_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -46,7 +46,7 @@ KOI8R_CharToOrderMap = (
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
-win1251_CharToOrderMap = (
+win1251_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -65,7 +65,7 @@ win1251_CharToOrderMap = (
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
-latin5_CharToOrderMap = (
+latin5_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -84,7 +84,7 @@ latin5_CharToOrderMap = (
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
-macCyrillic_CharToOrderMap = (
+macCyrillic_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -103,7 +103,7 @@ macCyrillic_CharToOrderMap = (
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
-IBM855_CharToOrderMap = (
+IBM855_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -122,7 +122,7 @@ IBM855_CharToOrderMap = (
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
-IBM866_CharToOrderMap = (
+IBM866_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -279,51 +279,55 @@ RussianLangModel = (
)
Koi8rModel = {
- 'charToOrderMap': KOI8R_CharToOrderMap,
- 'precedenceMatrix': RussianLangModel,
- 'mTypicalPositiveRatio': 0.976601,
- 'keepEnglishLetter': False,
- 'charsetName': "KOI8-R"
+ 'char_to_order_map': KOI8R_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "KOI8-R",
+ 'language': 'Russian',
}
Win1251CyrillicModel = {
- 'charToOrderMap': win1251_CharToOrderMap,
- 'precedenceMatrix': RussianLangModel,
- 'mTypicalPositiveRatio': 0.976601,
- 'keepEnglishLetter': False,
- 'charsetName': "windows-1251"
+ 'char_to_order_map': win1251_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1251",
+ 'language': 'Russian',
}
Latin5CyrillicModel = {
- 'charToOrderMap': latin5_CharToOrderMap,
- 'precedenceMatrix': RussianLangModel,
- 'mTypicalPositiveRatio': 0.976601,
- 'keepEnglishLetter': False,
- 'charsetName': "ISO-8859-5"
+ 'char_to_order_map': latin5_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "ISO-8859-5",
+ 'language': 'Russian',
}
MacCyrillicModel = {
- 'charToOrderMap': macCyrillic_CharToOrderMap,
- 'precedenceMatrix': RussianLangModel,
- 'mTypicalPositiveRatio': 0.976601,
- 'keepEnglishLetter': False,
- 'charsetName': "MacCyrillic"
-};
+ 'char_to_order_map': macCyrillic_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "MacCyrillic",
+ 'language': 'Russian',
+}
Ibm866Model = {
- 'charToOrderMap': IBM866_CharToOrderMap,
- 'precedenceMatrix': RussianLangModel,
- 'mTypicalPositiveRatio': 0.976601,
- 'keepEnglishLetter': False,
- 'charsetName': "IBM866"
+ 'char_to_order_map': IBM866_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "IBM866",
+ 'language': 'Russian',
}
Ibm855Model = {
- 'charToOrderMap': IBM855_CharToOrderMap,
- 'precedenceMatrix': RussianLangModel,
- 'mTypicalPositiveRatio': 0.976601,
- 'keepEnglishLetter': False,
- 'charsetName': "IBM855"
+ 'char_to_order_map': IBM855_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "IBM855",
+ 'language': 'Russian',
}
-
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/langgreekmodel.py b/venv/Lib/site-packages/chardet/langgreekmodel.py
similarity index 96%
rename from venv/Lib/site-packages/requests/packages/chardet/langgreekmodel.py
rename to venv/Lib/site-packages/chardet/langgreekmodel.py
index ddb583765..533222166 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/langgreekmodel.py
+++ b/venv/Lib/site-packages/chardet/langgreekmodel.py
@@ -31,7 +31,7 @@
# 252: 0 - 9
# Character Mapping Table:
-Latin7_CharToOrderMap = (
+Latin7_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -50,7 +50,7 @@ Latin7_CharToOrderMap = (
9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
)
-win1253_CharToOrderMap = (
+win1253_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -207,19 +207,19 @@ GreekLangModel = (
)
Latin7GreekModel = {
- 'charToOrderMap': Latin7_CharToOrderMap,
- 'precedenceMatrix': GreekLangModel,
- 'mTypicalPositiveRatio': 0.982851,
- 'keepEnglishLetter': False,
- 'charsetName': "ISO-8859-7"
+ 'char_to_order_map': Latin7_char_to_order_map,
+ 'precedence_matrix': GreekLangModel,
+ 'typical_positive_ratio': 0.982851,
+ 'keep_english_letter': False,
+ 'charset_name': "ISO-8859-7",
+ 'language': 'Greek',
}
Win1253GreekModel = {
- 'charToOrderMap': win1253_CharToOrderMap,
- 'precedenceMatrix': GreekLangModel,
- 'mTypicalPositiveRatio': 0.982851,
- 'keepEnglishLetter': False,
- 'charsetName': "windows-1253"
+ 'char_to_order_map': win1253_char_to_order_map,
+ 'precedence_matrix': GreekLangModel,
+ 'typical_positive_ratio': 0.982851,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1253",
+ 'language': 'Greek',
}
-
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/langhebrewmodel.py b/venv/Lib/site-packages/chardet/langhebrewmodel.py
similarity index 97%
rename from venv/Lib/site-packages/requests/packages/chardet/langhebrewmodel.py
rename to venv/Lib/site-packages/chardet/langhebrewmodel.py
index 75f2bc7fe..58f4c875e 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/langhebrewmodel.py
+++ b/venv/Lib/site-packages/chardet/langhebrewmodel.py
@@ -34,7 +34,7 @@
# Windows-1255 language model
# Character Mapping Table:
-win1255_CharToOrderMap = (
+WIN1255_CHAR_TO_ORDER_MAP = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
@@ -59,7 +59,7 @@ win1255_CharToOrderMap = (
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
-HebrewLangModel = (
+HEBREW_LANG_MODEL = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
@@ -191,11 +191,10 @@ HebrewLangModel = (
)
Win1255HebrewModel = {
- 'charToOrderMap': win1255_CharToOrderMap,
- 'precedenceMatrix': HebrewLangModel,
- 'mTypicalPositiveRatio': 0.984004,
- 'keepEnglishLetter': False,
- 'charsetName': "windows-1255"
+ 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP,
+ 'precedence_matrix': HEBREW_LANG_MODEL,
+ 'typical_positive_ratio': 0.984004,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1255",
+ 'language': 'Hebrew',
}
-
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/langhungarianmodel.py b/venv/Lib/site-packages/chardet/langhungarianmodel.py
similarity index 96%
rename from venv/Lib/site-packages/requests/packages/chardet/langhungarianmodel.py
rename to venv/Lib/site-packages/chardet/langhungarianmodel.py
index 49d2f0fe7..bb7c095e1 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/langhungarianmodel.py
+++ b/venv/Lib/site-packages/chardet/langhungarianmodel.py
@@ -207,19 +207,19 @@ HungarianLangModel = (
)
Latin2HungarianModel = {
- 'charToOrderMap': Latin2_HungarianCharToOrderMap,
- 'precedenceMatrix': HungarianLangModel,
- 'mTypicalPositiveRatio': 0.947368,
- 'keepEnglishLetter': True,
- 'charsetName': "ISO-8859-2"
+ 'char_to_order_map': Latin2_HungarianCharToOrderMap,
+ 'precedence_matrix': HungarianLangModel,
+ 'typical_positive_ratio': 0.947368,
+ 'keep_english_letter': True,
+ 'charset_name': "ISO-8859-2",
+ 'language': 'Hungarian',
}
Win1250HungarianModel = {
- 'charToOrderMap': win1250HungarianCharToOrderMap,
- 'precedenceMatrix': HungarianLangModel,
- 'mTypicalPositiveRatio': 0.947368,
- 'keepEnglishLetter': True,
- 'charsetName': "windows-1250"
+ 'char_to_order_map': win1250HungarianCharToOrderMap,
+ 'precedence_matrix': HungarianLangModel,
+ 'typical_positive_ratio': 0.947368,
+ 'keep_english_letter': True,
+ 'charset_name': "windows-1250",
+ 'language': 'Hungarian',
}
-
-# flake8: noqa
diff --git a/venv/Lib/site-packages/requests/packages/chardet/langthaimodel.py b/venv/Lib/site-packages/chardet/langthaimodel.py
similarity index 98%
rename from venv/Lib/site-packages/requests/packages/chardet/langthaimodel.py
rename to venv/Lib/site-packages/chardet/langthaimodel.py
index 0508b1b1a..15f94c2df 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/langthaimodel.py
+++ b/venv/Lib/site-packages/chardet/langthaimodel.py
@@ -190,11 +190,10 @@ ThaiLangModel = (
)
TIS620ThaiModel = {
- 'charToOrderMap': TIS620CharToOrderMap,
- 'precedenceMatrix': ThaiLangModel,
- 'mTypicalPositiveRatio': 0.926386,
- 'keepEnglishLetter': False,
- 'charsetName': "TIS-620"
+ 'char_to_order_map': TIS620CharToOrderMap,
+ 'precedence_matrix': ThaiLangModel,
+ 'typical_positive_ratio': 0.926386,
+ 'keep_english_letter': False,
+ 'charset_name': "TIS-620",
+ 'language': 'Thai',
}
-
-# flake8: noqa
diff --git a/venv/Lib/site-packages/chardet/langturkishmodel.py b/venv/Lib/site-packages/chardet/langturkishmodel.py
new file mode 100644
index 000000000..a427a4573
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/langturkishmodel.py
@@ -0,0 +1,193 @@
+# -*- coding: utf-8 -*-
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Özgür Baskın - Turkish Language Model
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin5_TurkishCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42,
+ 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255,
+255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15,
+ 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255,
+180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,
+164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106,
+150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136,
+ 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125,
+124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119,
+ 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86,
+ 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96,
+ 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107,
+)
+
+TurkishLangModel = (
+3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3,
+3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
+3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3,
+3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1,
+3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3,
+3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1,
+3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2,
+2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1,
+3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2,
+2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,
+1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1,
+2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2,
+3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1,
+3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2,
+2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1,
+3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2,
+2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,
+3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2,
+3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
+3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3,
+0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,
+3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1,
+0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,
+3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1,
+3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3,
+2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3,
+2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
+3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0,
+0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0,
+1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2,
+3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1,
+1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,
+3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2,
+2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0,
+0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0,
+3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1,
+0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1,
+1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,
+1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3,
+2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1,
+2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,
+0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1,
+2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
+3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0,
+0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,
+3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1,
+1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2,
+0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1,
+3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1,
+0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0,
+3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,
+3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,
+1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2,
+2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1,
+0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0,
+3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0,
+0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0,
+0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0,
+3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0,
+0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0,
+0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0,
+3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0,
+0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1,
+3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,
+0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1,
+0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,
+3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
+0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0,
+0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0,
+3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0,
+0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0,
+0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0,
+3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0,
+0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
+3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,
+0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
+0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0,
+0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0,
+0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0,
+1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0,
+0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1,
+0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0,
+3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0,
+0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,
+2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,
+2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0,
+0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,
+0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin5TurkishModel = {
+ 'char_to_order_map': Latin5_TurkishCharToOrderMap,
+ 'precedence_matrix': TurkishLangModel,
+ 'typical_positive_ratio': 0.970290,
+ 'keep_english_letter': True,
+ 'charset_name': "ISO-8859-9",
+ 'language': 'Turkish',
+}
diff --git a/venv/Lib/site-packages/requests/packages/chardet/latin1prober.py b/venv/Lib/site-packages/chardet/latin1prober.py
similarity index 81%
rename from venv/Lib/site-packages/requests/packages/chardet/latin1prober.py
rename to venv/Lib/site-packages/chardet/latin1prober.py
index eef357354..7d1e8c20f 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/latin1prober.py
+++ b/venv/Lib/site-packages/chardet/latin1prober.py
@@ -27,8 +27,7 @@
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
-from .constants import eNotMe
-from .compat import wrap_ord
+from .enums import ProbingState
FREQ_CAT_NUM = 4
@@ -82,7 +81,7 @@ Latin1_CharToClass = (
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
- # UDF OTH ASC ASS ACV ACO ASV ASO
+# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
@@ -96,40 +95,47 @@ Latin1ClassModel = (
class Latin1Prober(CharSetProber):
def __init__(self):
- CharSetProber.__init__(self)
+ super(Latin1Prober, self).__init__()
+ self._last_char_class = None
+ self._freq_counter = None
self.reset()
def reset(self):
- self._mLastCharClass = OTH
- self._mFreqCounter = [0] * FREQ_CAT_NUM
+ self._last_char_class = OTH
+ self._freq_counter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
- def get_charset_name(self):
- return "windows-1252"
+ @property
+ def charset_name(self):
+ return "ISO-8859-1"
- def feed(self, aBuf):
- aBuf = self.filter_with_english_letters(aBuf)
- for c in aBuf:
- charClass = Latin1_CharToClass[wrap_ord(c)]
- freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
- + charClass]
+ @property
+ def language(self):
+ return ""
+
+ def feed(self, byte_str):
+ byte_str = self.filter_with_english_letters(byte_str)
+ for c in byte_str:
+ char_class = Latin1_CharToClass[c]
+ freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM)
+ + char_class]
if freq == 0:
- self._mState = eNotMe
+ self._state = ProbingState.NOT_ME
break
- self._mFreqCounter[freq] += 1
- self._mLastCharClass = charClass
+ self._freq_counter[freq] += 1
+ self._last_char_class = char_class
- return self.get_state()
+ return self.state
def get_confidence(self):
- if self.get_state() == eNotMe:
+ if self.state == ProbingState.NOT_ME:
return 0.01
- total = sum(self._mFreqCounter)
+ total = sum(self._freq_counter)
if total < 0.01:
confidence = 0.0
else:
- confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
+ confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0)
/ total)
if confidence < 0.0:
confidence = 0.0
diff --git a/venv/Lib/site-packages/chardet/mbcharsetprober.py b/venv/Lib/site-packages/chardet/mbcharsetprober.py
new file mode 100644
index 000000000..6256ecfd1
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/mbcharsetprober.py
@@ -0,0 +1,91 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState, MachineState
+
+
+class MultiByteCharSetProber(CharSetProber):
+ """
+ MultiByteCharSetProber
+ """
+
+ def __init__(self, lang_filter=None):
+ super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter)
+ self.distribution_analyzer = None
+ self.coding_sm = None
+ self._last_char = [0, 0]
+
+ def reset(self):
+ super(MultiByteCharSetProber, self).reset()
+ if self.coding_sm:
+ self.coding_sm.reset()
+ if self.distribution_analyzer:
+ self.distribution_analyzer.reset()
+ self._last_char = [0, 0]
+
+ @property
+ def charset_name(self):
+ raise NotImplementedError
+
+ @property
+ def language(self):
+ raise NotImplementedError
+
+ def feed(self, byte_str):
+ for i in range(len(byte_str)):
+ coding_state = self.coding_sm.next_state(byte_str[i])
+ if coding_state == MachineState.ERROR:
+ self.logger.debug('%s %s prober hit error at byte %s',
+ self.charset_name, self.language, i)
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ char_len = self.coding_sm.get_current_charlen()
+ if i == 0:
+ self._last_char[1] = byte_str[0]
+ self.distribution_analyzer.feed(self._last_char, char_len)
+ else:
+ self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+
+ self._last_char[0] = byte_str[-1]
+
+ if self.state == ProbingState.DETECTING:
+ if (self.distribution_analyzer.got_enough_data() and
+ (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ return self.distribution_analyzer.get_confidence()
diff --git a/venv/Lib/site-packages/requests/packages/chardet/mbcsgroupprober.py b/venv/Lib/site-packages/chardet/mbcsgroupprober.py
similarity index 93%
rename from venv/Lib/site-packages/requests/packages/chardet/mbcsgroupprober.py
rename to venv/Lib/site-packages/chardet/mbcsgroupprober.py
index 03c9dcf3e..530abe75e 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/mbcsgroupprober.py
+++ b/venv/Lib/site-packages/chardet/mbcsgroupprober.py
@@ -39,9 +39,9 @@ from .euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
- def __init__(self):
- CharSetGroupProber.__init__(self)
- self._mProbers = [
+ def __init__(self, lang_filter=None):
+ super(MBCSGroupProber, self).__init__(lang_filter=lang_filter)
+ self.probers = [
UTF8Prober(),
SJISProber(),
EUCJPProber(),
diff --git a/venv/Lib/site-packages/chardet/mbcssm.py b/venv/Lib/site-packages/chardet/mbcssm.py
new file mode 100644
index 000000000..8360d0f28
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/mbcssm.py
@@ -0,0 +1,572 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import MachineState
+
+# BIG5
+
+BIG5_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 4,4,4,4,4,4,4,4, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 4,3,3,3,3,3,3,3, # a0 - a7
+ 3,3,3,3,3,3,3,3, # a8 - af
+ 3,3,3,3,3,3,3,3, # b0 - b7
+ 3,3,3,3,3,3,3,3, # b8 - bf
+ 3,3,3,3,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+BIG5_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17
+)
+
+BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0)
+
+BIG5_SM_MODEL = {'class_table': BIG5_CLS,
+ 'class_factor': 5,
+ 'state_table': BIG5_ST,
+ 'char_len_table': BIG5_CHAR_LEN_TABLE,
+ 'name': 'Big5'}
+
+# CP949
+
+CP949_CLS = (
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
+ 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
+ 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
+ 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
+ 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
+ 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
+ 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
+ 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
+ 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
+ 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
+ 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
+)
+
+CP949_ST = (
+#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
+ MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6
+)
+
+CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
+
+CP949_SM_MODEL = {'class_table': CP949_CLS,
+ 'class_factor': 10,
+ 'state_table': CP949_ST,
+ 'char_len_table': CP949_CHAR_LEN_TABLE,
+ 'name': 'CP949'}
+
+# EUC-JP
+
+EUCJP_CLS = (
+ 4,4,4,4,4,4,4,4, # 00 - 07
+ 4,4,4,4,4,4,5,5, # 08 - 0f
+ 4,4,4,4,4,4,4,4, # 10 - 17
+ 4,4,4,5,4,4,4,4, # 18 - 1f
+ 4,4,4,4,4,4,4,4, # 20 - 27
+ 4,4,4,4,4,4,4,4, # 28 - 2f
+ 4,4,4,4,4,4,4,4, # 30 - 37
+ 4,4,4,4,4,4,4,4, # 38 - 3f
+ 4,4,4,4,4,4,4,4, # 40 - 47
+ 4,4,4,4,4,4,4,4, # 48 - 4f
+ 4,4,4,4,4,4,4,4, # 50 - 57
+ 4,4,4,4,4,4,4,4, # 58 - 5f
+ 4,4,4,4,4,4,4,4, # 60 - 67
+ 4,4,4,4,4,4,4,4, # 68 - 6f
+ 4,4,4,4,4,4,4,4, # 70 - 77
+ 4,4,4,4,4,4,4,4, # 78 - 7f
+ 5,5,5,5,5,5,5,5, # 80 - 87
+ 5,5,5,5,5,5,1,3, # 88 - 8f
+ 5,5,5,5,5,5,5,5, # 90 - 97
+ 5,5,5,5,5,5,5,5, # 98 - 9f
+ 5,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,0,5 # f8 - ff
+)
+
+EUCJP_ST = (
+ 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f
+ 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27
+)
+
+EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0)
+
+EUCJP_SM_MODEL = {'class_table': EUCJP_CLS,
+ 'class_factor': 6,
+ 'state_table': EUCJP_ST,
+ 'char_len_table': EUCJP_CHAR_LEN_TABLE,
+ 'name': 'EUC-JP'}
+
+# EUC-KR
+
+EUCKR_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,3,3,3, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,3,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 2,2,2,2,2,2,2,2, # e0 - e7
+ 2,2,2,2,2,2,2,2, # e8 - ef
+ 2,2,2,2,2,2,2,2, # f0 - f7
+ 2,2,2,2,2,2,2,0 # f8 - ff
+)
+
+EUCKR_ST = (
+ MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f
+)
+
+EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0)
+
+EUCKR_SM_MODEL = {'class_table': EUCKR_CLS,
+ 'class_factor': 4,
+ 'state_table': EUCKR_ST,
+ 'char_len_table': EUCKR_CHAR_LEN_TABLE,
+ 'name': 'EUC-KR'}
+
+# EUC-TW
+
+EUCTW_CLS = (
+ 2,2,2,2,2,2,2,2, # 00 - 07
+ 2,2,2,2,2,2,0,0, # 08 - 0f
+ 2,2,2,2,2,2,2,2, # 10 - 17
+ 2,2,2,0,2,2,2,2, # 18 - 1f
+ 2,2,2,2,2,2,2,2, # 20 - 27
+ 2,2,2,2,2,2,2,2, # 28 - 2f
+ 2,2,2,2,2,2,2,2, # 30 - 37
+ 2,2,2,2,2,2,2,2, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,2, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,6,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,3,4,4,4,4,4,4, # a0 - a7
+ 5,5,1,1,1,1,1,1, # a8 - af
+ 1,1,1,1,1,1,1,1, # b0 - b7
+ 1,1,1,1,1,1,1,1, # b8 - bf
+ 1,1,3,1,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+EUCTW_ST = (
+ MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17
+ MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+ 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27
+ MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
+)
+
+EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3)
+
+EUCTW_SM_MODEL = {'class_table': EUCTW_CLS,
+ 'class_factor': 7,
+ 'state_table': EUCTW_ST,
+ 'char_len_table': EUCTW_CHAR_LEN_TABLE,
+ 'name': 'x-euc-tw'}
+
+# GB2312
+
+GB2312_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 3,3,3,3,3,3,3,3, # 30 - 37
+ 3,3,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,4, # 78 - 7f
+ 5,6,6,6,6,6,6,6, # 80 - 87
+ 6,6,6,6,6,6,6,6, # 88 - 8f
+ 6,6,6,6,6,6,6,6, # 90 - 97
+ 6,6,6,6,6,6,6,6, # 98 - 9f
+ 6,6,6,6,6,6,6,6, # a0 - a7
+ 6,6,6,6,6,6,6,6, # a8 - af
+ 6,6,6,6,6,6,6,6, # b0 - b7
+ 6,6,6,6,6,6,6,6, # b8 - bf
+ 6,6,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 6,6,6,6,6,6,6,6, # e0 - e7
+ 6,6,6,6,6,6,6,6, # e8 - ef
+ 6,6,6,6,6,6,6,6, # f0 - f7
+ 6,6,6,6,6,6,6,0 # f8 - ff
+)
+
+GB2312_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17
+ 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+ MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
+)
+
+# To be accurate, the length of class 6 can be either 2 or 4.
+# But it is not necessary to discriminate between the two since
+# it is used for frequency analysis only, and we are validating
+# each code range there as well. So it is safe to set it to be
+# 2 here.
+GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2)
+
+GB2312_SM_MODEL = {'class_table': GB2312_CLS,
+ 'class_factor': 7,
+ 'state_table': GB2312_ST,
+ 'char_len_table': GB2312_CHAR_LEN_TABLE,
+ 'name': 'GB2312'}
+
+# Shift_JIS
+
+SJIS_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 3,3,3,3,3,2,2,3, # 80 - 87
+ 3,3,3,3,3,3,3,3, # 88 - 8f
+ 3,3,3,3,3,3,3,3, # 90 - 97
+ 3,3,3,3,3,3,3,3, # 98 - 9f
+ #0xa0 is illegal in sjis encoding, but some pages does
+ #contain such byte. We need to be more error forgiven.
+ 2,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,4,4,4, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,0,0,0) # f8 - ff
+
+
+SJIS_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17
+)
+
+SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0)
+
+SJIS_SM_MODEL = {'class_table': SJIS_CLS,
+ 'class_factor': 6,
+ 'state_table': SJIS_ST,
+ 'char_len_table': SJIS_CHAR_LEN_TABLE,
+ 'name': 'Shift_JIS'}
+
+# UCS2-BE
+
+UCS2BE_CLS = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2BE_ST = (
+ 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17
+ 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f
+ 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27
+ 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f
+ 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37
+)
+
+UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2)
+
+UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS,
+ 'class_factor': 6,
+ 'state_table': UCS2BE_ST,
+ 'char_len_table': UCS2BE_CHAR_LEN_TABLE,
+ 'name': 'UTF-16BE'}
+
+# UCS2-LE
+
+UCS2LE_CLS = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2LE_ST = (
+ 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17
+ 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f
+ 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27
+ 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f
+ 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37
+)
+
+UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2)
+
+UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS,
+ 'class_factor': 6,
+ 'state_table': UCS2LE_ST,
+ 'char_len_table': UCS2LE_CHAR_LEN_TABLE,
+ 'name': 'UTF-16LE'}
+
+# UTF-8
+
+UTF8_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 2,2,2,2,3,3,3,3, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 5,5,5,5,5,5,5,5, # a0 - a7
+ 5,5,5,5,5,5,5,5, # a8 - af
+ 5,5,5,5,5,5,5,5, # b0 - b7
+ 5,5,5,5,5,5,5,5, # b8 - bf
+ 0,0,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 7,8,8,8,8,8,8,8, # e0 - e7
+ 8,8,8,8,8,9,8,8, # e8 - ef
+ 10,11,11,11,11,11,11,11, # f0 - f7
+ 12,13,13,13,14,15,0,0 # f8 - ff
+)
+
+UTF8_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07
+ 9, 11, 8, 7, 6, 5, 4, 3,#08-0f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f
+ MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f
+ MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f
+ MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f
+ MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af
+ MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf
+)
+
+UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
+
+UTF8_SM_MODEL = {'class_table': UTF8_CLS,
+ 'class_factor': 16,
+ 'state_table': UTF8_ST,
+ 'char_len_table': UTF8_CHAR_LEN_TABLE,
+ 'name': 'UTF-8'}
diff --git a/venv/Lib/site-packages/chardet/sbcharsetprober.py b/venv/Lib/site-packages/chardet/sbcharsetprober.py
new file mode 100644
index 000000000..0adb51de5
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/sbcharsetprober.py
@@ -0,0 +1,132 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import CharacterCategory, ProbingState, SequenceLikelihood
+
+
+class SingleByteCharSetProber(CharSetProber):
+ SAMPLE_SIZE = 64
+ SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2
+ POSITIVE_SHORTCUT_THRESHOLD = 0.95
+ NEGATIVE_SHORTCUT_THRESHOLD = 0.05
+
+ def __init__(self, model, reversed=False, name_prober=None):
+ super(SingleByteCharSetProber, self).__init__()
+ self._model = model
+ # TRUE if we need to reverse every pair in the model lookup
+ self._reversed = reversed
+ # Optional auxiliary prober for name decision
+ self._name_prober = name_prober
+ self._last_order = None
+ self._seq_counters = None
+ self._total_seqs = None
+ self._total_char = None
+ self._freq_char = None
+ self.reset()
+
+ def reset(self):
+ super(SingleByteCharSetProber, self).reset()
+ # char order of last character
+ self._last_order = 255
+ self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
+ self._total_seqs = 0
+ self._total_char = 0
+ # characters that fall in our sampling range
+ self._freq_char = 0
+
+ @property
+ def charset_name(self):
+ if self._name_prober:
+ return self._name_prober.charset_name
+ else:
+ return self._model['charset_name']
+
+ @property
+ def language(self):
+ if self._name_prober:
+ return self._name_prober.language
+ else:
+ return self._model.get('language')
+
+ def feed(self, byte_str):
+ if not self._model['keep_english_letter']:
+ byte_str = self.filter_international_words(byte_str)
+ if not byte_str:
+ return self.state
+ char_to_order_map = self._model['char_to_order_map']
+ for i, c in enumerate(byte_str):
+ # XXX: Order is in range 1-64, so one would think we want 0-63 here,
+ # but that leads to 27 more test failures than before.
+ order = char_to_order_map[c]
+ # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
+ # CharacterCategory.SYMBOL is actually 253, so we use CONTROL
+ # to make it closer to the original intent. The only difference
+ # is whether or not we count digits and control characters for
+ # _total_char purposes.
+ if order < CharacterCategory.CONTROL:
+ self._total_char += 1
+ if order < self.SAMPLE_SIZE:
+ self._freq_char += 1
+ if self._last_order < self.SAMPLE_SIZE:
+ self._total_seqs += 1
+ if not self._reversed:
+ i = (self._last_order * self.SAMPLE_SIZE) + order
+ model = self._model['precedence_matrix'][i]
+ else: # reverse the order of the letters in the lookup
+ i = (order * self.SAMPLE_SIZE) + self._last_order
+ model = self._model['precedence_matrix'][i]
+ self._seq_counters[model] += 1
+ self._last_order = order
+
+ charset_name = self._model['charset_name']
+ if self.state == ProbingState.DETECTING:
+ if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
+ confidence = self.get_confidence()
+ if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
+ self.logger.debug('%s confidence = %s, we have a winner',
+ charset_name, confidence)
+ self._state = ProbingState.FOUND_IT
+ elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
+ self.logger.debug('%s confidence = %s, below negative '
+ 'shortcut threshhold %s', charset_name,
+ confidence,
+ self.NEGATIVE_SHORTCUT_THRESHOLD)
+ self._state = ProbingState.NOT_ME
+
+ return self.state
+
+ def get_confidence(self):
+ r = 0.01
+ if self._total_seqs > 0:
+ r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
+ self._total_seqs / self._model['typical_positive_ratio'])
+ r = r * self._freq_char / self._total_char
+ if r >= 1.0:
+ r = 0.99
+ return r
diff --git a/venv/Lib/site-packages/requests/packages/chardet/sbcsgroupprober.py b/venv/Lib/site-packages/chardet/sbcsgroupprober.py
similarity index 70%
rename from venv/Lib/site-packages/requests/packages/chardet/sbcsgroupprober.py
rename to venv/Lib/site-packages/chardet/sbcsgroupprober.py
index 1b6196cd1..98e95dc1a 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/sbcsgroupprober.py
+++ b/venv/Lib/site-packages/chardet/sbcsgroupprober.py
@@ -33,16 +33,17 @@ from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
Ibm866Model, Ibm855Model)
from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
-from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
+# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
from .langthaimodel import TIS620ThaiModel
from .langhebrewmodel import Win1255HebrewModel
from .hebrewprober import HebrewProber
+from .langturkishmodel import Latin5TurkishModel
class SBCSGroupProber(CharSetGroupProber):
def __init__(self):
- CharSetGroupProber.__init__(self)
- self._mProbers = [
+ super(SBCSGroupProber, self).__init__()
+ self.probers = [
SingleByteCharSetProber(Win1251CyrillicModel),
SingleByteCharSetProber(Koi8rModel),
SingleByteCharSetProber(Latin5CyrillicModel),
@@ -53,17 +54,20 @@ class SBCSGroupProber(CharSetGroupProber):
SingleByteCharSetProber(Win1253GreekModel),
SingleByteCharSetProber(Latin5BulgarianModel),
SingleByteCharSetProber(Win1251BulgarianModel),
- SingleByteCharSetProber(Latin2HungarianModel),
- SingleByteCharSetProber(Win1250HungarianModel),
+ # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
+ # after we retrain model.
+ # SingleByteCharSetProber(Latin2HungarianModel),
+ # SingleByteCharSetProber(Win1250HungarianModel),
SingleByteCharSetProber(TIS620ThaiModel),
+ SingleByteCharSetProber(Latin5TurkishModel),
]
- hebrewProber = HebrewProber()
- logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,
- False, hebrewProber)
- visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,
- hebrewProber)
- hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
- self._mProbers.extend([hebrewProber, logicalHebrewProber,
- visualHebrewProber])
+ hebrew_prober = HebrewProber()
+ logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel,
+ False, hebrew_prober)
+ visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True,
+ hebrew_prober)
+ hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
+ self.probers.extend([hebrew_prober, logical_hebrew_prober,
+ visual_hebrew_prober])
self.reset()
diff --git a/venv/Lib/site-packages/chardet/sjisprober.py b/venv/Lib/site-packages/chardet/sjisprober.py
new file mode 100644
index 000000000..9e29623bd
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/sjisprober.py
@@ -0,0 +1,92 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import SJISDistributionAnalysis
+from .jpcntx import SJISContextAnalysis
+from .mbcssm import SJIS_SM_MODEL
+from .enums import ProbingState, MachineState
+
+
+class SJISProber(MultiByteCharSetProber):
+ def __init__(self):
+ super(SJISProber, self).__init__()
+ self.coding_sm = CodingStateMachine(SJIS_SM_MODEL)
+ self.distribution_analyzer = SJISDistributionAnalysis()
+ self.context_analyzer = SJISContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ super(SJISProber, self).reset()
+ self.context_analyzer.reset()
+
+ @property
+ def charset_name(self):
+ return self.context_analyzer.charset_name
+
+ @property
+ def language(self):
+ return "Japanese"
+
+ def feed(self, byte_str):
+ for i in range(len(byte_str)):
+ coding_state = self.coding_sm.next_state(byte_str[i])
+ if coding_state == MachineState.ERROR:
+ self.logger.debug('%s %s prober hit error at byte %s',
+ self.charset_name, self.language, i)
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ char_len = self.coding_sm.get_current_charlen()
+ if i == 0:
+ self._last_char[1] = byte_str[0]
+ self.context_analyzer.feed(self._last_char[2 - char_len:],
+ char_len)
+ self.distribution_analyzer.feed(self._last_char, char_len)
+ else:
+ self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3
+ - char_len], char_len)
+ self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+
+ self._last_char[0] = byte_str[-1]
+
+ if self.state == ProbingState.DETECTING:
+ if (self.context_analyzer.got_enough_data() and
+ (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ context_conf = self.context_analyzer.get_confidence()
+ distrib_conf = self.distribution_analyzer.get_confidence()
+ return max(context_conf, distrib_conf)
diff --git a/venv/Lib/site-packages/chardet/universaldetector.py b/venv/Lib/site-packages/chardet/universaldetector.py
new file mode 100644
index 000000000..7b4e92d61
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/universaldetector.py
@@ -0,0 +1,286 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+"""
+Module containing the UniversalDetector detector class, which is the primary
+class a user of ``chardet`` should use.
+
+:author: Mark Pilgrim (initial port to Python)
+:author: Shy Shalom (original C code)
+:author: Dan Blanchard (major refactoring for 3.0)
+:author: Ian Cordasco
+"""
+
+
+import codecs
+import logging
+import re
+
+from .charsetgroupprober import CharSetGroupProber
+from .enums import InputState, LanguageFilter, ProbingState
+from .escprober import EscCharSetProber
+from .latin1prober import Latin1Prober
+from .mbcsgroupprober import MBCSGroupProber
+from .sbcsgroupprober import SBCSGroupProber
+
+
+class UniversalDetector(object):
+ """
+ The ``UniversalDetector`` class underlies the ``chardet.detect`` function
+ and coordinates all of the different charset probers.
+
+ To get a ``dict`` containing an encoding and its confidence, you can simply
+ run:
+
+ .. code::
+
+ u = UniversalDetector()
+ u.feed(some_bytes)
+ u.close()
+ detected = u.result
+
+ """
+
+ MINIMUM_THRESHOLD = 0.20
+ HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]')
+ ESC_DETECTOR = re.compile(b'(\033|~{)')
+ WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]')
+ ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252',
+ 'iso-8859-2': 'Windows-1250',
+ 'iso-8859-5': 'Windows-1251',
+ 'iso-8859-6': 'Windows-1256',
+ 'iso-8859-7': 'Windows-1253',
+ 'iso-8859-8': 'Windows-1255',
+ 'iso-8859-9': 'Windows-1254',
+ 'iso-8859-13': 'Windows-1257'}
+
+ def __init__(self, lang_filter=LanguageFilter.ALL):
+ self._esc_charset_prober = None
+ self._charset_probers = []
+ self.result = None
+ self.done = None
+ self._got_data = None
+ self._input_state = None
+ self._last_char = None
+ self.lang_filter = lang_filter
+ self.logger = logging.getLogger(__name__)
+ self._has_win_bytes = None
+ self.reset()
+
+ def reset(self):
+ """
+ Reset the UniversalDetector and all of its probers back to their
+ initial states. This is called by ``__init__``, so you only need to
+ call this directly in between analyses of different documents.
+ """
+ self.result = {'encoding': None, 'confidence': 0.0, 'language': None}
+ self.done = False
+ self._got_data = False
+ self._has_win_bytes = False
+ self._input_state = InputState.PURE_ASCII
+ self._last_char = b''
+ if self._esc_charset_prober:
+ self._esc_charset_prober.reset()
+ for prober in self._charset_probers:
+ prober.reset()
+
+ def feed(self, byte_str):
+ """
+ Takes a chunk of a document and feeds it through all of the relevant
+ charset probers.
+
+ After calling ``feed``, you can check the value of the ``done``
+ attribute to see if you need to continue feeding the
+ ``UniversalDetector`` more data, or if it has made a prediction
+ (in the ``result`` attribute).
+
+ .. note::
+ You should always call ``close`` when you're done feeding in your
+ document if ``done`` is not already ``True``.
+ """
+ if self.done:
+ return
+
+ if not len(byte_str):
+ return
+
+ if not isinstance(byte_str, bytearray):
+ byte_str = bytearray(byte_str)
+
+ # First check for known BOMs, since these are guaranteed to be correct
+ if not self._got_data:
+ # If the data starts with BOM, we know it is UTF
+ if byte_str.startswith(codecs.BOM_UTF8):
+ # EF BB BF UTF-8 with BOM
+ self.result = {'encoding': "UTF-8-SIG",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith((codecs.BOM_UTF32_LE,
+ codecs.BOM_UTF32_BE)):
+ # FF FE 00 00 UTF-32, little-endian BOM
+ # 00 00 FE FF UTF-32, big-endian BOM
+ self.result = {'encoding': "UTF-32",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith(b'\xFE\xFF\x00\x00'):
+ # FE FF 00 00 UCS-4, unusual octet order BOM (3412)
+ self.result = {'encoding': "X-ISO-10646-UCS-4-3412",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith(b'\x00\x00\xFF\xFE'):
+ # 00 00 FF FE UCS-4, unusual octet order BOM (2143)
+ self.result = {'encoding': "X-ISO-10646-UCS-4-2143",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
+ # FF FE UTF-16, little endian BOM
+ # FE FF UTF-16, big endian BOM
+ self.result = {'encoding': "UTF-16",
+ 'confidence': 1.0,
+ 'language': ''}
+
+ self._got_data = True
+ if self.result['encoding'] is not None:
+ self.done = True
+ return
+
+ # If none of those matched and we've only see ASCII so far, check
+ # for high bytes and escape sequences
+ if self._input_state == InputState.PURE_ASCII:
+ if self.HIGH_BYTE_DETECTOR.search(byte_str):
+ self._input_state = InputState.HIGH_BYTE
+ elif self._input_state == InputState.PURE_ASCII and \
+ self.ESC_DETECTOR.search(self._last_char + byte_str):
+ self._input_state = InputState.ESC_ASCII
+
+ self._last_char = byte_str[-1:]
+
+ # If we've seen escape sequences, use the EscCharSetProber, which
+ # uses a simple state machine to check for known escape sequences in
+ # HZ and ISO-2022 encodings, since those are the only encodings that
+ # use such sequences.
+ if self._input_state == InputState.ESC_ASCII:
+ if not self._esc_charset_prober:
+ self._esc_charset_prober = EscCharSetProber(self.lang_filter)
+ if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
+ self.result = {'encoding':
+ self._esc_charset_prober.charset_name,
+ 'confidence':
+ self._esc_charset_prober.get_confidence(),
+ 'language':
+ self._esc_charset_prober.language}
+ self.done = True
+ # If we've seen high bytes (i.e., those with values greater than 127),
+ # we need to do more complicated checks using all our multi-byte and
+ # single-byte probers that are left. The single-byte probers
+ # use character bigram distributions to determine the encoding, whereas
+ # the multi-byte probers use a combination of character unigram and
+ # bigram distributions.
+ elif self._input_state == InputState.HIGH_BYTE:
+ if not self._charset_probers:
+ self._charset_probers = [MBCSGroupProber(self.lang_filter)]
+ # If we're checking non-CJK encodings, use single-byte prober
+ if self.lang_filter & LanguageFilter.NON_CJK:
+ self._charset_probers.append(SBCSGroupProber())
+ self._charset_probers.append(Latin1Prober())
+ for prober in self._charset_probers:
+ if prober.feed(byte_str) == ProbingState.FOUND_IT:
+ self.result = {'encoding': prober.charset_name,
+ 'confidence': prober.get_confidence(),
+ 'language': prober.language}
+ self.done = True
+ break
+ if self.WIN_BYTE_DETECTOR.search(byte_str):
+ self._has_win_bytes = True
+
+ def close(self):
+ """
+ Stop analyzing the current document and come up with a final
+ prediction.
+
+ :returns: The ``result`` attribute, a ``dict`` with the keys
+ `encoding`, `confidence`, and `language`.
+ """
+ # Don't bother with checks if we're already done
+ if self.done:
+ return self.result
+ self.done = True
+
+ if not self._got_data:
+ self.logger.debug('no data received!')
+
+ # Default to ASCII if it is all we've seen so far
+ elif self._input_state == InputState.PURE_ASCII:
+ self.result = {'encoding': 'ascii',
+ 'confidence': 1.0,
+ 'language': ''}
+
+ # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
+ elif self._input_state == InputState.HIGH_BYTE:
+ prober_confidence = None
+ max_prober_confidence = 0.0
+ max_prober = None
+ for prober in self._charset_probers:
+ if not prober:
+ continue
+ prober_confidence = prober.get_confidence()
+ if prober_confidence > max_prober_confidence:
+ max_prober_confidence = prober_confidence
+ max_prober = prober
+ if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
+ charset_name = max_prober.charset_name
+ lower_charset_name = max_prober.charset_name.lower()
+ confidence = max_prober.get_confidence()
+ # Use Windows encoding name instead of ISO-8859 if we saw any
+ # extra Windows-specific bytes
+ if lower_charset_name.startswith('iso-8859'):
+ if self._has_win_bytes:
+ charset_name = self.ISO_WIN_MAP.get(lower_charset_name,
+ charset_name)
+ self.result = {'encoding': charset_name,
+ 'confidence': confidence,
+ 'language': max_prober.language}
+
+ # Log all prober confidences if none met MINIMUM_THRESHOLD
+ if self.logger.getEffectiveLevel() == logging.DEBUG:
+ if self.result['encoding'] is None:
+ self.logger.debug('no probers hit minimum threshold')
+ for group_prober in self._charset_probers:
+ if not group_prober:
+ continue
+ if isinstance(group_prober, CharSetGroupProber):
+ for prober in group_prober.probers:
+ self.logger.debug('%s %s confidence = %s',
+ prober.charset_name,
+ prober.language,
+ prober.get_confidence())
+ else:
+ self.logger.debug('%s %s confidence = %s',
+ prober.charset_name,
+ prober.language,
+ prober.get_confidence())
+ return self.result
diff --git a/venv/Lib/site-packages/requests/packages/chardet/utf8prober.py b/venv/Lib/site-packages/chardet/utf8prober.py
similarity index 56%
rename from venv/Lib/site-packages/requests/packages/chardet/utf8prober.py
rename to venv/Lib/site-packages/chardet/utf8prober.py
index 1c0bb5d8f..6c3196cc2 100644
--- a/venv/Lib/site-packages/requests/packages/chardet/utf8prober.py
+++ b/venv/Lib/site-packages/chardet/utf8prober.py
@@ -25,52 +25,58 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from . import constants
from .charsetprober import CharSetProber
+from .enums import ProbingState, MachineState
from .codingstatemachine import CodingStateMachine
-from .mbcssm import UTF8SMModel
+from .mbcssm import UTF8_SM_MODEL
-ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
+ ONE_CHAR_PROB = 0.5
+
def __init__(self):
- CharSetProber.__init__(self)
- self._mCodingSM = CodingStateMachine(UTF8SMModel)
+ super(UTF8Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(UTF8_SM_MODEL)
+ self._num_mb_chars = None
self.reset()
def reset(self):
- CharSetProber.reset(self)
- self._mCodingSM.reset()
- self._mNumOfMBChar = 0
+ super(UTF8Prober, self).reset()
+ self.coding_sm.reset()
+ self._num_mb_chars = 0
- def get_charset_name(self):
+ @property
+ def charset_name(self):
return "utf-8"
- def feed(self, aBuf):
- for c in aBuf:
- codingState = self._mCodingSM.next_state(c)
- if codingState == constants.eError:
- self._mState = constants.eNotMe
- break
- elif codingState == constants.eItsMe:
- self._mState = constants.eFoundIt
- break
- elif codingState == constants.eStart:
- if self._mCodingSM.get_current_charlen() >= 2:
- self._mNumOfMBChar += 1
+ @property
+ def language(self):
+ return ""
- if self.get_state() == constants.eDetecting:
- if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
- self._mState = constants.eFoundIt
+ def feed(self, byte_str):
+ for c in byte_str:
+ coding_state = self.coding_sm.next_state(c)
+ if coding_state == MachineState.ERROR:
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ if self.coding_sm.get_current_charlen() >= 2:
+ self._num_mb_chars += 1
- return self.get_state()
+ if self.state == ProbingState.DETECTING:
+ if self.get_confidence() > self.SHORTCUT_THRESHOLD:
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
def get_confidence(self):
unlike = 0.99
- if self._mNumOfMBChar < 6:
- for i in range(0, self._mNumOfMBChar):
- unlike = unlike * ONE_CHAR_PROB
+ if self._num_mb_chars < 6:
+ unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars
return 1.0 - unlike
else:
return unlike
diff --git a/venv/Lib/site-packages/chardet/version.py b/venv/Lib/site-packages/chardet/version.py
new file mode 100644
index 000000000..bb2a34a70
--- /dev/null
+++ b/venv/Lib/site-packages/chardet/version.py
@@ -0,0 +1,9 @@
+"""
+This module exists only to simplify retrieving the version number of chardet
+from within setup.py and from chardet subpackages.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+__version__ = "3.0.4"
+VERSION = __version__.split('.')
diff --git a/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/INSTALLER b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/LICENSE b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/METADATA b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/METADATA
new file mode 100644
index 000000000..b168bea41
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/METADATA
@@ -0,0 +1,27 @@
+Metadata-Version: 2.1
+Name: firebase-admin
+Version: 4.4.0
+Summary: Firebase Admin Python SDK
+Home-page: https://firebase.google.com/docs/admin/setup/
+Author: Firebase
+License: Apache License 2.0
+Keywords: firebase cloud development
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: License :: OSI Approved :: Apache Software License
+Requires-Python: >=3.5
+Requires-Dist: cachecontrol (>=0.12.6)
+Requires-Dist: google-api-python-client (>=1.7.8)
+Requires-Dist: google-cloud-storage (>=1.18.0)
+Requires-Dist: google-api-core[grpc] (<2.0.0dev,>=1.14.0) ; platform_python_implementation != "PyPy"
+Requires-Dist: google-cloud-firestore (>=1.4.0) ; platform_python_implementation != "PyPy"
+
+The Firebase Admin Python SDK enables server-side (backend) Python developers to integrate Firebase into their services and applications.
+
+
diff --git a/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/RECORD b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/RECORD
new file mode 100644
index 000000000..0b2d9d260
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/RECORD
@@ -0,0 +1,59 @@
+firebase_admin-4.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+firebase_admin-4.4.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+firebase_admin-4.4.0.dist-info/METADATA,sha256=-8pgZa6XSLYrRE4KAAIktoN9MNaTzBg8oczVmFMbOoE,1160
+firebase_admin-4.4.0.dist-info/RECORD,,
+firebase_admin-4.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+firebase_admin-4.4.0.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92
+firebase_admin-4.4.0.dist-info/top_level.txt,sha256=j8IXOwvHIaefIBBzAehVx1lROs_uinrBtfYXG-wDO_A,15
+firebase_admin/__about__.py,sha256=5mU5mINso34PbcaxMmj1aX7BUjTyc8pgxAVfhEF8xZQ,808
+firebase_admin/__init__.py,sha256=PnuQ64TnrHDQdKtVAbVDMU9Pr9AuAJ2pOwrG-jjqxmg,11961
+firebase_admin/__pycache__/__about__.cpython-36.pyc,,
+firebase_admin/__pycache__/__init__.cpython-36.pyc,,
+firebase_admin/__pycache__/_auth_client.cpython-36.pyc,,
+firebase_admin/__pycache__/_auth_providers.cpython-36.pyc,,
+firebase_admin/__pycache__/_auth_utils.cpython-36.pyc,,
+firebase_admin/__pycache__/_http_client.cpython-36.pyc,,
+firebase_admin/__pycache__/_messaging_encoder.cpython-36.pyc,,
+firebase_admin/__pycache__/_messaging_utils.cpython-36.pyc,,
+firebase_admin/__pycache__/_rfc3339.cpython-36.pyc,,
+firebase_admin/__pycache__/_sseclient.cpython-36.pyc,,
+firebase_admin/__pycache__/_token_gen.cpython-36.pyc,,
+firebase_admin/__pycache__/_user_identifier.cpython-36.pyc,,
+firebase_admin/__pycache__/_user_import.cpython-36.pyc,,
+firebase_admin/__pycache__/_user_mgt.cpython-36.pyc,,
+firebase_admin/__pycache__/_utils.cpython-36.pyc,,
+firebase_admin/__pycache__/auth.cpython-36.pyc,,
+firebase_admin/__pycache__/credentials.cpython-36.pyc,,
+firebase_admin/__pycache__/db.cpython-36.pyc,,
+firebase_admin/__pycache__/exceptions.cpython-36.pyc,,
+firebase_admin/__pycache__/firestore.cpython-36.pyc,,
+firebase_admin/__pycache__/instance_id.cpython-36.pyc,,
+firebase_admin/__pycache__/messaging.cpython-36.pyc,,
+firebase_admin/__pycache__/ml.cpython-36.pyc,,
+firebase_admin/__pycache__/project_management.cpython-36.pyc,,
+firebase_admin/__pycache__/storage.cpython-36.pyc,,
+firebase_admin/__pycache__/tenant_mgt.cpython-36.pyc,,
+firebase_admin/_auth_client.py,sha256=Ni04rDESj1nJ44JxQKkMaErCX4y4BG5pgcT9tdov2NA,33179
+firebase_admin/_auth_providers.py,sha256=kf0_zIkUnnAq_pCvJN12oWJSqtafoOCoTRvmVBp7MUo,15358
+firebase_admin/_auth_utils.py,sha256=j1yI_0o7KxYDCpqux8rSiHtH2uA-2iekxdfAaT-OQFk,16481
+firebase_admin/_http_client.py,sha256=2NcFOqJBUfgtnbPjFZWlS7lIKkUC_DhTJJXr-f1GnLM,5459
+firebase_admin/_messaging_encoder.py,sha256=-F-59VH037rM7n5HVHiZujIGz73E0LCjABJjx8w1vuw,32878
+firebase_admin/_messaging_utils.py,sha256=8kPD2Vrxl-R3AVHt4O2puZa8HY5pf_cg6zwHlIUgtn0,23448
+firebase_admin/_rfc3339.py,sha256=mh_P5KPRwSEbzhryMfx5-CvVocnmc7wN8Q1YLplRwzw,3189
+firebase_admin/_sseclient.py,sha256=JgG34KhR_nMyAhqyyY6JUfhiHy9YUqNmKXP86wkiq5c,6886
+firebase_admin/_token_gen.py,sha256=AZAbBLW6IAUjORXNtu9PorkszAdooQ5kDDbCkDcayxM,17633
+firebase_admin/_user_identifier.py,sha256=jiV5Y-q99-35HE5PZ-V-FHhH-I2uOQeyjsqtfMoQ0Xk,2721
+firebase_admin/_user_import.py,sha256=7h4efZDR9LEsIuMRJbR4maJbLYi5DSi-DTy0xfi_7B0,17392
+firebase_admin/_user_mgt.py,sha256=w8trY-mHdGPfdOop0wS-f2_bXGwSH15NXEdES1UsXoA,32316
+firebase_admin/_utils.py,sha256=jxZ4wxp9dIAng5DmHAWhznbkQ8TFDn-nn42SddRi0As,13330
+firebase_admin/auth.py,sha256=7HYMDM3JYL12-rWmfdMWZkg4pLwc032NHB6y9wfYE6Q,35606
+firebase_admin/credentials.py,sha256=JH2HfY2tXBpyt1AzSEs2lGDZ0KAM4eOMx3OUB4XwsM8,7986
+firebase_admin/db.py,sha256=8Vnd-RXFdkdUSjAyRSRvJ2UNTTYxr7q-fOmoWKcgCbs,38298
+firebase_admin/exceptions.py,sha256=K6QCnP2fT2x23Na9-lYhq2AfU_yCkmsXmG7XCJkBZss,8503
+firebase_admin/firestore.py,sha256=ClQU0Bw6ZvlfpL8lvqWBz-deciSTTZ7KQDxPZ3YBuZo,2789
+firebase_admin/instance_id.py,sha256=CrJhUKrc-CZSziqB2QO54v7wG9ae2N6ziPZmoF6wSNk,3795
+firebase_admin/messaging.py,sha256=DrM0awnCwLKin3tN6uUEwQ6hZdxOhhHKDHVvtYyNQvU,18135
+firebase_admin/ml.py,sha256=VZUgL_L1yRZp1C_U00H-kv30rvokNXYAWVb6f1cwhqk,36536
+firebase_admin/project_management.py,sha256=0-8k-h6_mfR0NGK3pURxQtzWitNS3_1UnAQ2pDlkGek,24991
+firebase_admin/storage.py,sha256=lC7evcKOE033GCx-tt_Db2ZAPR0mamzxcUmq769E2ng,3335
+firebase_admin/tenant_mgt.py,sha256=1t-RHPRQP4zZFFmAdJLZalO89B9NWtbKsqPwh5DajV8,16976
diff --git a/venv/Lib/site-packages/requests/packages/urllib3/contrib/__init__.py b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/REQUESTED
similarity index 100%
rename from venv/Lib/site-packages/requests/packages/urllib3/contrib/__init__.py
rename to venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/REQUESTED
diff --git a/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/WHEEL b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/WHEEL
new file mode 100644
index 000000000..83ff02e96
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/top_level.txt b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/top_level.txt
new file mode 100644
index 000000000..9b59c15ec
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin-4.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+firebase_admin
diff --git a/venv/Lib/site-packages/firebase_admin/__about__.py b/venv/Lib/site-packages/firebase_admin/__about__.py
new file mode 100644
index 000000000..de6a75223
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/__about__.py
@@ -0,0 +1,21 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""About information (version, etc) for Firebase Admin SDK."""
+
+__version__ = '4.4.0'
+__title__ = 'firebase_admin'
+__author__ = 'Firebase'
+__license__ = 'Apache License 2.0'
+__url__ = 'https://firebase.google.com/docs/admin/setup/'
diff --git a/venv/Lib/site-packages/firebase_admin/__init__.py b/venv/Lib/site-packages/firebase_admin/__init__.py
new file mode 100644
index 000000000..7e3b2eab0
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/__init__.py
@@ -0,0 +1,309 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Admin SDK for Python."""
+import datetime
+import json
+import os
+import threading
+
+from firebase_admin import credentials
+from firebase_admin.__about__ import __version__
+
+
+_apps = {}
+_apps_lock = threading.RLock()
+_clock = datetime.datetime.utcnow
+
+_DEFAULT_APP_NAME = '[DEFAULT]'
+_FIREBASE_CONFIG_ENV_VAR = 'FIREBASE_CONFIG'
+_CONFIG_VALID_KEYS = ['databaseAuthVariableOverride', 'databaseURL', 'httpTimeout', 'projectId',
+ 'storageBucket']
+
+def initialize_app(credential=None, options=None, name=_DEFAULT_APP_NAME):
+ """Initializes and returns a new App instance.
+
+ Creates a new App instance using the specified options
+ and the app name. If an instance already exists by the same
+ app name a ValueError is raised.
+ If options are not provided an attempt is made to load the options from the environment.
+ This is done by looking up the ``FIREBASE_CONFIG`` environment variable. If the value of
+ the variable starts with ``"{"``, it is parsed as a JSON object. Otherwise it is treated
+ as a file name and the JSON content is read from the corresponding file.
+ Use this function whenever a new App instance is required. Do not directly invoke the
+ App constructor.
+
+ Args:
+ credential: A credential object used to initialize the SDK (optional). If none is provided,
+ Google Application Default Credentials are used.
+ options: A dictionary of configuration options (optional). Supported options include
+ ``databaseURL``, ``storageBucket``, ``projectId``, ``databaseAuthVariableOverride``,
+ ``serviceAccountId`` and ``httpTimeout``. If ``httpTimeout`` is not set, the SDK
+ uses a default timeout of 120 seconds.
+ name: Name of the app (optional).
+ Returns:
+ App: A newly initialized instance of App.
+
+ Raises:
+ ValueError: If the app name is already in use, or any of the
+ provided arguments are invalid.
+ """
+ if credential is None:
+ credential = credentials.ApplicationDefault()
+ app = App(name, credential, options)
+ with _apps_lock:
+ if app.name not in _apps:
+ _apps[app.name] = app
+ return app
+
+ if name == _DEFAULT_APP_NAME:
+ raise ValueError((
+ 'The default Firebase app already exists. This means you called '
+ 'initialize_app() more than once without providing an app name as '
+ 'the second argument. In most cases you only need to call '
+ 'initialize_app() once. But if you do want to initialize multiple '
+ 'apps, pass a second argument to initialize_app() to give each app '
+ 'a unique name.'))
+
+ raise ValueError((
+ 'Firebase app named "{0}" already exists. This means you called '
+ 'initialize_app() more than once with the same app name as the '
+ 'second argument. Make sure you provide a unique name every time '
+ 'you call initialize_app().').format(name))
+
+
+def delete_app(app):
+ """Gracefully deletes an App instance.
+
+ Args:
+ app: The app instance to be deleted.
+
+ Raises:
+ ValueError: If the app is not initialized.
+ """
+ if not isinstance(app, App):
+ raise ValueError('Illegal app argument type: "{}". Argument must be of '
+ 'type App.'.format(type(app)))
+ with _apps_lock:
+ if _apps.get(app.name) is app:
+ del _apps[app.name]
+ app._cleanup() # pylint: disable=protected-access
+ return
+ if app.name == _DEFAULT_APP_NAME:
+ raise ValueError(
+ 'The default Firebase app is not initialized. Make sure to initialize '
+ 'the default app by calling initialize_app().')
+
+ raise ValueError(
+ ('Firebase app named "{0}" is not initialized. Make sure to initialize '
+ 'the app by calling initialize_app() with your app name as the '
+ 'second argument.').format(app.name))
+
+
+def get_app(name=_DEFAULT_APP_NAME):
+ """Retrieves an App instance by name.
+
+ Args:
+ name: Name of the App instance to retrieve (optional).
+
+ Returns:
+ App: An App instance with the given name.
+
+ Raises:
+ ValueError: If the specified name is not a string, or if the specified
+ app does not exist.
+ """
+ if not isinstance(name, str):
+ raise ValueError('Illegal app name argument type: "{}". App name '
+ 'must be a string.'.format(type(name)))
+ with _apps_lock:
+ if name in _apps:
+ return _apps[name]
+
+ if name == _DEFAULT_APP_NAME:
+ raise ValueError(
+ 'The default Firebase app does not exist. Make sure to initialize '
+ 'the SDK by calling initialize_app().')
+
+ raise ValueError(
+ ('Firebase app named "{0}" does not exist. Make sure to initialize '
+ 'the SDK by calling initialize_app() with your app name as the '
+ 'second argument.').format(name))
+
+
+class _AppOptions:
+ """A collection of configuration options for an App."""
+
+ def __init__(self, options):
+ if options is None:
+ options = self._load_from_environment()
+
+ if not isinstance(options, dict):
+ raise ValueError('Illegal Firebase app options type: {0}. Options '
+ 'must be a dictionary.'.format(type(options)))
+ self._options = options
+
+ def get(self, key, default=None):
+ """Returns the option identified by the provided key."""
+ return self._options.get(key, default)
+
+ def _load_from_environment(self):
+ """Invoked when no options are passed to __init__, loads options from FIREBASE_CONFIG.
+
+ If the value of the FIREBASE_CONFIG environment variable starts with "{" an attempt is made
+ to parse it as a JSON object, otherwise it is assumed to be pointing to a JSON file.
+ """
+
+ config_file = os.getenv(_FIREBASE_CONFIG_ENV_VAR)
+ if not config_file:
+ return {}
+ if config_file.startswith('{'):
+ json_str = config_file
+ else:
+ try:
+ with open(config_file, 'r') as json_file:
+ json_str = json_file.read()
+ except Exception as err:
+ raise ValueError('Unable to read file {}. {}'.format(config_file, err))
+ try:
+ json_data = json.loads(json_str)
+ except Exception as err:
+ raise ValueError('JSON string "{0}" is not valid json. {1}'.format(json_str, err))
+ return {k: v for k, v in json_data.items() if k in _CONFIG_VALID_KEYS}
+
+
+class App:
+ """The entry point for Firebase Python SDK.
+
+ Represents a Firebase app, while holding the configuration and state
+ common to all Firebase APIs.
+ """
+
+ def __init__(self, name, credential, options):
+ """Constructs a new App using the provided name and options.
+
+ Args:
+ name: Name of the application.
+ credential: A credential object.
+ options: A dictionary of configuration options.
+
+ Raises:
+ ValueError: If an argument is None or invalid.
+ """
+ if not name or not isinstance(name, str):
+ raise ValueError('Illegal Firebase app name "{0}" provided. App name must be a '
+ 'non-empty string.'.format(name))
+ self._name = name
+
+ if not isinstance(credential, credentials.Base):
+ raise ValueError('Illegal Firebase credential provided. App must be initialized '
+ 'with a valid credential instance.')
+ self._credential = credential
+ self._options = _AppOptions(options)
+ self._lock = threading.RLock()
+ self._services = {}
+
+ App._validate_project_id(self._options.get('projectId'))
+ self._project_id_initialized = False
+
+ @classmethod
+ def _validate_project_id(cls, project_id):
+ if project_id is not None and not isinstance(project_id, str):
+ raise ValueError(
+ 'Invalid project ID: "{0}". project ID must be a string.'.format(project_id))
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def credential(self):
+ return self._credential
+
+ @property
+ def options(self):
+ return self._options
+
+ @property
+ def project_id(self):
+ if not self._project_id_initialized:
+ self._project_id = self._lookup_project_id()
+ self._project_id_initialized = True
+ return self._project_id
+
+ def _lookup_project_id(self):
+ """Looks up the Firebase project ID associated with an App.
+
+ If a ``projectId`` is specified in app options, it is returned. Then tries to
+ get the project ID from the credential used to initialize the app. If that also fails,
+ attempts to look up the ``GOOGLE_CLOUD_PROJECT`` and ``GCLOUD_PROJECT`` environment
+ variables.
+
+ Returns:
+ str: A project ID string or None.
+ """
+ project_id = self._options.get('projectId')
+ if not project_id:
+ try:
+ project_id = self._credential.project_id
+ except AttributeError:
+ pass
+ if not project_id:
+ project_id = os.environ.get('GOOGLE_CLOUD_PROJECT',
+ os.environ.get('GCLOUD_PROJECT'))
+ App._validate_project_id(self._options.get('projectId'))
+ return project_id
+
+ def _get_service(self, name, initializer):
+ """Returns the service instance identified by the given name.
+
+ Services are functional entities exposed by the Admin SDK (e.g. auth, database). Each
+ service instance is associated with exactly one App. If the named service
+ instance does not exist yet, _get_service() calls the provided initializer function to
+ create the service instance. The created instance will be cached, so that subsequent
+ calls would always fetch it from the cache.
+
+ Args:
+ name: Name of the service to retrieve.
+ initializer: A function that can be used to initialize a service for the first time.
+
+ Returns:
+ object: The specified service instance.
+
+ Raises:
+ ValueError: If the provided name is invalid, or if the App is already deleted.
+ """
+ if not name or not isinstance(name, str):
+ raise ValueError(
+ 'Illegal name argument: "{0}". Name must be a non-empty string.'.format(name))
+ with self._lock:
+ if self._services is None:
+ raise ValueError(
+ 'Service requested from deleted Firebase App: "{0}".'.format(self._name))
+ if name not in self._services:
+ self._services[name] = initializer(self)
+ return self._services[name]
+
+ def _cleanup(self):
+ """Cleans up any services associated with this App.
+
+ Checks whether each service contains a close() method, and calls it if available.
+ This is to be called when an App is being deleted, thus ensuring graceful termination of
+ any services started by the App.
+ """
+ with self._lock:
+ for service in self._services.values():
+ if hasattr(service, 'close') and hasattr(service.close, '__call__'):
+ service.close()
+ self._services = None
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/__about__.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/__about__.cpython-36.pyc
new file mode 100644
index 000000000..bbec15774
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/__about__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..fa18ae3b3
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_client.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_client.cpython-36.pyc
new file mode 100644
index 000000000..49ee1ef0d
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_providers.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_providers.cpython-36.pyc
new file mode 100644
index 000000000..8e54fbc77
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_providers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_utils.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_utils.cpython-36.pyc
new file mode 100644
index 000000000..5ef7c81ac
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_auth_utils.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_http_client.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_http_client.cpython-36.pyc
new file mode 100644
index 000000000..0aaf0c9c8
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_http_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_messaging_encoder.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_messaging_encoder.cpython-36.pyc
new file mode 100644
index 000000000..b8e0beae0
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_messaging_encoder.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_messaging_utils.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_messaging_utils.cpython-36.pyc
new file mode 100644
index 000000000..33b6f6ac8
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_messaging_utils.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_rfc3339.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_rfc3339.cpython-36.pyc
new file mode 100644
index 000000000..782f56e2a
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_rfc3339.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_sseclient.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_sseclient.cpython-36.pyc
new file mode 100644
index 000000000..d8d3be45f
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_sseclient.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_token_gen.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_token_gen.cpython-36.pyc
new file mode 100644
index 000000000..9a467f0b4
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_token_gen.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_user_identifier.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_user_identifier.cpython-36.pyc
new file mode 100644
index 000000000..cd7daa362
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_user_identifier.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_user_import.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_user_import.cpython-36.pyc
new file mode 100644
index 000000000..125689f17
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_user_import.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_user_mgt.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_user_mgt.cpython-36.pyc
new file mode 100644
index 000000000..29aae9fcb
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_user_mgt.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/_utils.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/_utils.cpython-36.pyc
new file mode 100644
index 000000000..f92c27f49
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/_utils.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/auth.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/auth.cpython-36.pyc
new file mode 100644
index 000000000..f2af078c6
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/auth.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/credentials.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/credentials.cpython-36.pyc
new file mode 100644
index 000000000..3a8f5a97c
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/credentials.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/db.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/db.cpython-36.pyc
new file mode 100644
index 000000000..a57f7ed09
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/db.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/exceptions.cpython-36.pyc
new file mode 100644
index 000000000..e2e79c95e
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/exceptions.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/firestore.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/firestore.cpython-36.pyc
new file mode 100644
index 000000000..5f220f580
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/firestore.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/instance_id.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/instance_id.cpython-36.pyc
new file mode 100644
index 000000000..fffdf8563
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/instance_id.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/messaging.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/messaging.cpython-36.pyc
new file mode 100644
index 000000000..df3ffe44e
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/messaging.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/ml.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/ml.cpython-36.pyc
new file mode 100644
index 000000000..3993f9c66
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/ml.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/project_management.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/project_management.cpython-36.pyc
new file mode 100644
index 000000000..6322d940a
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/project_management.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/storage.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/storage.cpython-36.pyc
new file mode 100644
index 000000000..bbce402d4
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/storage.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/__pycache__/tenant_mgt.cpython-36.pyc b/venv/Lib/site-packages/firebase_admin/__pycache__/tenant_mgt.cpython-36.pyc
new file mode 100644
index 000000000..2bb274789
Binary files /dev/null and b/venv/Lib/site-packages/firebase_admin/__pycache__/tenant_mgt.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/firebase_admin/_auth_client.py b/venv/Lib/site-packages/firebase_admin/_auth_client.py
new file mode 100644
index 000000000..1c9b37082
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_auth_client.py
@@ -0,0 +1,703 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase auth client sub module."""
+
+import time
+
+import firebase_admin
+from firebase_admin import _auth_providers
+from firebase_admin import _auth_utils
+from firebase_admin import _http_client
+from firebase_admin import _token_gen
+from firebase_admin import _user_identifier
+from firebase_admin import _user_import
+from firebase_admin import _user_mgt
+
+
+class Client:
+ """Firebase Authentication client scoped to a specific tenant."""
+
+ def __init__(self, app, tenant_id=None):
+ if not app.project_id:
+ raise ValueError("""A project ID is required to access the auth service.
+ 1. Use a service account credential, or
+ 2. set the project ID explicitly via Firebase App options, or
+ 3. set the project ID via the GOOGLE_CLOUD_PROJECT environment variable.""")
+
+ credential = app.credential.get_credential()
+ version_header = 'Python/Admin/{0}'.format(firebase_admin.__version__)
+ http_client = _http_client.JsonHttpClient(
+ credential=credential, headers={'X-Client-Version': version_header})
+
+ self._tenant_id = tenant_id
+ self._token_generator = _token_gen.TokenGenerator(app, http_client)
+ self._token_verifier = _token_gen.TokenVerifier(app)
+ self._user_manager = _user_mgt.UserManager(http_client, app.project_id, tenant_id)
+ self._provider_manager = _auth_providers.ProviderConfigClient(
+ http_client, app.project_id, tenant_id)
+
+ @property
+ def tenant_id(self):
+ """Tenant ID associated with this client."""
+ return self._tenant_id
+
+ def create_custom_token(self, uid, developer_claims=None):
+ """Builds and signs a Firebase custom auth token.
+
+ Args:
+ uid: ID of the user for whom the token is created.
+ developer_claims: A dictionary of claims to be included in the token
+ (optional).
+
+ Returns:
+ bytes: A token minted from the input parameters.
+
+ Raises:
+ ValueError: If input parameters are invalid.
+ TokenSignError: If an error occurs while signing the token using the remote IAM service.
+ """
+ return self._token_generator.create_custom_token(
+ uid, developer_claims, tenant_id=self.tenant_id)
+
+ def verify_id_token(self, id_token, check_revoked=False):
+ """Verifies the signature and data for the provided JWT.
+
+ Accepts a signed token string, verifies that it is current, was issued
+ to this project, and that it was correctly signed by Google.
+
+ Args:
+ id_token: A string of the encoded JWT.
+ check_revoked: Boolean, If true, checks whether the token has been revoked (optional).
+
+ Returns:
+ dict: A dictionary of key-value pairs parsed from the decoded JWT.
+
+ Raises:
+ ValueError: If ``id_token`` is a not a string or is empty.
+ InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token.
+ ExpiredIdTokenError: If the specified ID token has expired.
+ RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been
+ revoked.
+ TenantIdMismatchError: If ``id_token`` belongs to a tenant that is different than
+ this ``Client`` instance.
+ CertificateFetchError: If an error occurs while fetching the public key certificates
+ required to verify the ID token.
+ """
+ if not isinstance(check_revoked, bool):
+ # guard against accidental wrong assignment.
+ raise ValueError('Illegal check_revoked argument. Argument must be of type '
+ ' bool, but given "{0}".'.format(type(check_revoked)))
+
+ verified_claims = self._token_verifier.verify_id_token(id_token)
+ if self.tenant_id:
+ token_tenant_id = verified_claims.get('firebase', {}).get('tenant')
+ if self.tenant_id != token_tenant_id:
+ raise _auth_utils.TenantIdMismatchError(
+ 'Invalid tenant ID: {0}'.format(token_tenant_id))
+
+ if check_revoked:
+ self._check_jwt_revoked(verified_claims, _token_gen.RevokedIdTokenError, 'ID token')
+ return verified_claims
+
+ def revoke_refresh_tokens(self, uid):
+ """Revokes all refresh tokens for an existing user.
+
+ This method updates the user's ``tokens_valid_after_timestamp`` to the current UTC
+ in seconds since the epoch. It is important that the server on which this is called has its
+ clock set correctly and synchronized.
+
+ While this revokes all sessions for a specified user and disables any new ID tokens for
+ existing sessions from getting minted, existing ID tokens may remain active until their
+ natural expiration (one hour). To verify that ID tokens are revoked, use
+ ``verify_id_token(idToken, check_revoked=True)``.
+
+ Args:
+ uid: A user ID string.
+
+ Raises:
+ ValueError: If the user ID is None, empty or malformed.
+ FirebaseError: If an error occurs while revoking the refresh token.
+ """
+ self._user_manager.update_user(uid, valid_since=int(time.time()))
+
+ def get_user(self, uid):
+ """Gets the user data corresponding to the specified user ID.
+
+ Args:
+ uid: A user ID string.
+
+ Returns:
+ UserRecord: A user record instance.
+
+ Raises:
+ ValueError: If the user ID is None, empty or malformed.
+ UserNotFoundError: If the specified user ID does not exist.
+ FirebaseError: If an error occurs while retrieving the user.
+ """
+ response = self._user_manager.get_user(uid=uid)
+ return _user_mgt.UserRecord(response)
+
+ def get_user_by_email(self, email):
+ """Gets the user data corresponding to the specified user email.
+
+ Args:
+ email: A user email address string.
+
+ Returns:
+ UserRecord: A user record instance.
+
+ Raises:
+ ValueError: If the email is None, empty or malformed.
+ UserNotFoundError: If no user exists by the specified email address.
+ FirebaseError: If an error occurs while retrieving the user.
+ """
+ response = self._user_manager.get_user(email=email)
+ return _user_mgt.UserRecord(response)
+
+ def get_user_by_phone_number(self, phone_number):
+ """Gets the user data corresponding to the specified phone number.
+
+ Args:
+ phone_number: A phone number string.
+
+ Returns:
+ UserRecord: A user record instance.
+
+ Raises:
+ ValueError: If the phone number is ``None``, empty or malformed.
+ UserNotFoundError: If no user exists by the specified phone number.
+ FirebaseError: If an error occurs while retrieving the user.
+ """
+ response = self._user_manager.get_user(phone_number=phone_number)
+ return _user_mgt.UserRecord(response)
+
+ def get_users(self, identifiers):
+ """Gets the user data corresponding to the specified identifiers.
+
+ There are no ordering guarantees; in particular, the nth entry in the
+ result list is not guaranteed to correspond to the nth entry in the input
+ parameters list.
+
+ A maximum of 100 identifiers may be supplied. If more than 100
+ identifiers are supplied, this method raises a `ValueError`.
+
+ Args:
+ identifiers (list[Identifier]): A list of ``Identifier`` instances used
+ to indicate which user records should be returned. Must have <= 100
+ entries.
+
+ Returns:
+ GetUsersResult: A ``GetUsersResult`` instance corresponding to the
+ specified identifiers.
+
+ Raises:
+ ValueError: If any of the identifiers are invalid or if more than 100
+ identifiers are specified.
+ """
+ response = self._user_manager.get_users(identifiers=identifiers)
+
+ def _matches(identifier, user_record):
+ if isinstance(identifier, _user_identifier.UidIdentifier):
+ return identifier.uid == user_record.uid
+ if isinstance(identifier, _user_identifier.EmailIdentifier):
+ return identifier.email == user_record.email
+ if isinstance(identifier, _user_identifier.PhoneIdentifier):
+ return identifier.phone_number == user_record.phone_number
+ if isinstance(identifier, _user_identifier.ProviderIdentifier):
+ return next((
+ True
+ for user_info in user_record.provider_data
+ if identifier.provider_id == user_info.provider_id
+ and identifier.provider_uid == user_info.uid
+ ), False)
+ raise TypeError("Unexpected type: {}".format(type(identifier)))
+
+ def _is_user_found(identifier, user_records):
+ return any(_matches(identifier, user_record) for user_record in user_records)
+
+ users = [_user_mgt.UserRecord(user) for user in response]
+ not_found = [
+ identifier for identifier in identifiers if not _is_user_found(identifier, users)]
+
+ return _user_mgt.GetUsersResult(users=users, not_found=not_found)
+
+ def list_users(self, page_token=None, max_results=_user_mgt.MAX_LIST_USERS_RESULTS):
+ """Retrieves a page of user accounts from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of user accounts that may be included in the returned
+ page. This function never returns ``None``. If there are no user accounts in the Firebase
+ project, this returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the
+ page (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in
+ the returned page (optional). Defaults to 1000, which is also the maximum number
+ allowed.
+
+ Returns:
+ ListUsersPage: A page of user accounts.
+
+ Raises:
+ ValueError: If max_results or page_token are invalid.
+ FirebaseError: If an error occurs while retrieving the user accounts.
+ """
+ def download(page_token, max_results):
+ return self._user_manager.list_users(page_token, max_results)
+ return _user_mgt.ListUsersPage(download, page_token, max_results)
+
+ def create_user(self, **kwargs): # pylint: disable=differing-param-doc
+ """Creates a new user account with the specified properties.
+
+ Args:
+ kwargs: A series of keyword arguments (optional).
+
+ Keyword Args:
+ uid: User ID to assign to the newly created user (optional).
+ display_name: The user's display name (optional).
+ email: The user's primary email (optional).
+ email_verified: A boolean indicating whether or not the user's primary email is
+ verified (optional).
+ phone_number: The user's primary phone number (optional).
+ photo_url: The user's photo URL (optional).
+ password: The user's raw, unhashed password. (optional).
+ disabled: A boolean indicating whether or not the user account is disabled (optional).
+
+ Returns:
+ UserRecord: A UserRecord instance for the newly created user.
+
+ Raises:
+ ValueError: If the specified user properties are invalid.
+ FirebaseError: If an error occurs while creating the user account.
+ """
+ uid = self._user_manager.create_user(**kwargs)
+ return self.get_user(uid=uid)
+
+ def update_user(self, uid, **kwargs): # pylint: disable=differing-param-doc
+ """Updates an existing user account with the specified properties.
+
+ Args:
+ uid: A user ID string.
+ kwargs: A series of keyword arguments (optional).
+
+ Keyword Args:
+ display_name: The user's display name (optional). Can be removed by explicitly passing
+ ``auth.DELETE_ATTRIBUTE``.
+ email: The user's primary email (optional).
+ email_verified: A boolean indicating whether or not the user's primary email is
+ verified (optional).
+ phone_number: The user's primary phone number (optional). Can be removed by explicitly
+ passing ``auth.DELETE_ATTRIBUTE``.
+ photo_url: The user's photo URL (optional). Can be removed by explicitly passing
+ ``auth.DELETE_ATTRIBUTE``.
+ password: The user's raw, unhashed password. (optional).
+ disabled: A boolean indicating whether or not the user account is disabled (optional).
+ custom_claims: A dictionary or a JSON string contining the custom claims to be set on
+ the user account (optional). To remove all custom claims, pass
+ ``auth.DELETE_ATTRIBUTE``.
+ valid_since: An integer signifying the seconds since the epoch (optional). This field
+ is set by ``revoke_refresh_tokens`` and it is discouraged to set this field
+ directly.
+
+ Returns:
+ UserRecord: An updated UserRecord instance for the user.
+
+ Raises:
+ ValueError: If the specified user ID or properties are invalid.
+ FirebaseError: If an error occurs while updating the user account.
+ """
+ self._user_manager.update_user(uid, **kwargs)
+ return self.get_user(uid=uid)
+
+ def set_custom_user_claims(self, uid, custom_claims):
+ """Sets additional claims on an existing user account.
+
+ Custom claims set via this function can be used to define user roles and privilege levels.
+ These claims propagate to all the devices where the user is already signed in (after token
+ expiration or when token refresh is forced), and next time the user signs in. The claims
+ can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub,
+ iat, iss, etc), an error is thrown. Claims payload must also not be larger then 1000
+ characters when serialized into a JSON string.
+
+ Args:
+ uid: A user ID string.
+ custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any
+ claims set previously.
+
+ Raises:
+ ValueError: If the specified user ID or the custom claims are invalid.
+ FirebaseError: If an error occurs while updating the user account.
+ """
+ if custom_claims is None:
+ custom_claims = _user_mgt.DELETE_ATTRIBUTE
+ self._user_manager.update_user(uid, custom_claims=custom_claims)
+
+ def delete_user(self, uid):
+ """Deletes the user identified by the specified user ID.
+
+ Args:
+ uid: A user ID string.
+
+ Raises:
+ ValueError: If the user ID is None, empty or malformed.
+ FirebaseError: If an error occurs while deleting the user account.
+ """
+ self._user_manager.delete_user(uid)
+
+ def delete_users(self, uids):
+ """Deletes the users specified by the given identifiers.
+
+ Deleting a non-existing user does not generate an error (the method is
+ idempotent.) Non-existing users are considered to be successfully
+ deleted and are therefore included in the
+ `DeleteUserResult.success_count` value.
+
+ A maximum of 1000 identifiers may be supplied. If more than 1000
+ identifiers are supplied, this method raises a `ValueError`.
+
+ Args:
+ uids: A list of strings indicating the uids of the users to be deleted.
+ Must have <= 1000 entries.
+
+ Returns:
+ DeleteUsersResult: The total number of successful/failed deletions, as
+ well as the array of errors that correspond to the failed
+ deletions.
+
+ Raises:
+ ValueError: If any of the identifiers are invalid or if more than 1000
+ identifiers are specified.
+ """
+ result = self._user_manager.delete_users(uids, force_delete=True)
+ return _user_mgt.DeleteUsersResult(result, len(uids))
+
+ def import_users(self, users, hash_alg=None):
+ """Imports the specified list of users into Firebase Auth.
+
+ At most 1000 users can be imported at a time. This operation is optimized for bulk imports
+ and ignores checks on identifier uniqueness, which could result in duplications. The
+ ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the
+ ``UserImportHash`` class for supported hash algorithms.
+
+ Args:
+ users: A list of ``ImportUserRecord`` instances to import. Length of the list must not
+ exceed 1000.
+ hash_alg: A ``UserImportHash`` object (optional). Required when importing users with
+ passwords.
+
+ Returns:
+ UserImportResult: An object summarizing the result of the import operation.
+
+ Raises:
+ ValueError: If the provided arguments are invalid.
+ FirebaseError: If an error occurs while importing users.
+ """
+ result = self._user_manager.import_users(users, hash_alg)
+ return _user_import.UserImportResult(result, len(users))
+
+ def generate_password_reset_link(self, email, action_code_settings=None):
+ """Generates the out-of-band email action link for password reset flows for the specified
+ email address.
+
+ Args:
+ email: The email of the user whose password is to be reset.
+ action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether
+ the link is to be handled by a mobile app and the additional state information to
+ be passed in the deep link.
+
+ Returns:
+ link: The password reset link created by the API
+
+ Raises:
+ ValueError: If the provided arguments are invalid
+ FirebaseError: If an error occurs while generating the link
+ """
+ return self._user_manager.generate_email_action_link(
+ 'PASSWORD_RESET', email, action_code_settings=action_code_settings)
+
+ def generate_email_verification_link(self, email, action_code_settings=None):
+ """Generates the out-of-band email action link for email verification flows for the
+ specified email address.
+
+ Args:
+ email: The email of the user to be verified.
+ action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether
+ the link is to be handled by a mobile app and the additional state information to
+ be passed in the deep link.
+
+ Returns:
+ link: The email verification link created by the API
+
+ Raises:
+ ValueError: If the provided arguments are invalid
+ FirebaseError: If an error occurs while generating the link
+ """
+ return self._user_manager.generate_email_action_link(
+ 'VERIFY_EMAIL', email, action_code_settings=action_code_settings)
+
+ def generate_sign_in_with_email_link(self, email, action_code_settings):
+ """Generates the out-of-band email action link for email link sign-in flows, using the
+ action code settings provided.
+
+ Args:
+ email: The email of the user signing in.
+ action_code_settings: ``ActionCodeSettings`` instance. Defines whether
+ the link is to be handled by a mobile app and the additional state information to be
+ passed in the deep link.
+
+ Returns:
+ link: The email sign-in link created by the API
+
+ Raises:
+ ValueError: If the provided arguments are invalid
+ FirebaseError: If an error occurs while generating the link
+ """
+ return self._user_manager.generate_email_action_link(
+ 'EMAIL_SIGNIN', email, action_code_settings=action_code_settings)
+
+ def get_oidc_provider_config(self, provider_id):
+ """Returns the ``OIDCProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+
+ Returns:
+ SAMLProviderConfig: An OIDC provider config instance.
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix.
+ ConfigurationNotFoundError: If no OIDC provider is available with the given identifier.
+ FirebaseError: If an error occurs while retrieving the OIDC provider.
+ """
+ return self._provider_manager.get_oidc_provider_config(provider_id)
+
+ def create_oidc_provider_config(
+ self, provider_id, client_id, issuer, display_name=None, enabled=None):
+ """Creates a new OIDC provider config from the given parameters.
+
+ OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about
+ GCIP, including pricing and features, see https://cloud.google.com/identity-platform.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``oidc.``.
+ client_id: Client ID of the new config.
+ issuer: Issuer of the new config. Must be a valid URL.
+ display_name: The user-friendly display name to the current configuration (optional).
+ This name is also used as the provider label in the Cloud Console.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional). A user cannot sign in using a disabled provider.
+
+ Returns:
+ OIDCProviderConfig: The newly created OIDC provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while creating the new OIDC provider config.
+ """
+ return self._provider_manager.create_oidc_provider_config(
+ provider_id, client_id=client_id, issuer=issuer, display_name=display_name,
+ enabled=enabled)
+
+ def update_oidc_provider_config(
+ self, provider_id, client_id=None, issuer=None, display_name=None, enabled=None):
+ """Updates an existing OIDC provider config with the given parameters.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``oidc.``.
+ client_id: Client ID of the new config (optional).
+ issuer: Issuer of the new config (optional). Must be a valid URL.
+ display_name: The user-friendly display name to the current configuration (optional).
+ Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional).
+
+ Returns:
+ OIDCProviderConfig: The updated OIDC provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while updating the OIDC provider config.
+ """
+ return self._provider_manager.update_oidc_provider_config(
+ provider_id, client_id=client_id, issuer=issuer, display_name=display_name,
+ enabled=enabled)
+
+ def delete_oidc_provider_config(self, provider_id):
+ """Deletes the ``OIDCProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix.
+ ConfigurationNotFoundError: If no OIDC provider is available with the given identifier.
+ FirebaseError: If an error occurs while deleting the OIDC provider.
+ """
+ self._provider_manager.delete_oidc_provider_config(provider_id)
+
+ def list_oidc_provider_configs(
+ self, page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS):
+ """Retrieves a page of OIDC provider configs from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of configs that may be included in the returned
+ page. This function never returns ``None``. If there are no OIDC configs in the Firebase
+ project, this returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the
+ page (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in
+ the returned page (optional). Defaults to 100, which is also the maximum number
+ allowed.
+
+ Returns:
+ ListProviderConfigsPage: A page of OIDC provider config instances.
+
+ Raises:
+ ValueError: If ``max_results`` or ``page_token`` are invalid.
+ FirebaseError: If an error occurs while retrieving the OIDC provider configs.
+ """
+ return self._provider_manager.list_oidc_provider_configs(page_token, max_results)
+
+ def get_saml_provider_config(self, provider_id):
+ """Returns the ``SAMLProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+
+ Returns:
+ SAMLProviderConfig: A SAML provider config instance.
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix.
+ ConfigurationNotFoundError: If no SAML provider is available with the given identifier.
+ FirebaseError: If an error occurs while retrieving the SAML provider.
+ """
+ return self._provider_manager.get_saml_provider_config(provider_id)
+
+ def create_saml_provider_config(
+ self, provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id,
+ callback_url, display_name=None, enabled=None):
+ """Creates a new SAML provider config from the given parameters.
+
+ SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about
+ GCIP, including pricing and features, see https://cloud.google.com/identity-platform.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``saml.``.
+ idp_entity_id: The SAML IdP entity identifier.
+ sso_url: The SAML IdP SSO URL. Must be a valid URL.
+ x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this
+ provider. Multiple certificates are accepted to prevent outages during IdP key
+ rotation (for example ADFS rotates every 10 days). When the Auth server receives a
+ SAML response, it will match the SAML response with the certificate on record.
+ Otherwise the response is rejected. Developers are expected to manage the
+ certificate updates as keys are rotated.
+ rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by
+ the developer but needs to be provided to the SAML IdP.
+ callback_url: Callback URL string. This is fixed and must always be the same as the
+ OAuth redirect URL provisioned by Firebase Auth, unless a custom authDomain is
+ used.
+ display_name: The user-friendly display name to the current configuration (optional).
+ This name is also used as the provider label in the Cloud Console.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional). A user cannot sign in using a disabled provider.
+
+ Returns:
+ SAMLProviderConfig: The newly created SAML provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while creating the new SAML provider config.
+ """
+ return self._provider_manager.create_saml_provider_config(
+ provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url,
+ x509_certificates=x509_certificates, rp_entity_id=rp_entity_id,
+ callback_url=callback_url, display_name=display_name, enabled=enabled)
+
+ def update_saml_provider_config(
+ self, provider_id, idp_entity_id=None, sso_url=None, x509_certificates=None,
+ rp_entity_id=None, callback_url=None, display_name=None, enabled=None):
+ """Updates an existing SAML provider config with the given parameters.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``saml.``.
+ idp_entity_id: The SAML IdP entity identifier (optional).
+ sso_url: The SAML IdP SSO URL. Must be a valid URL (optional).
+ x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this
+ provider (optional).
+ rp_entity_id: The SAML relying party entity ID (optional).
+ callback_url: Callback URL string (optional).
+ display_name: The user-friendly display name of the current configuration (optional).
+ Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional).
+
+ Returns:
+ SAMLProviderConfig: The updated SAML provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while updating the SAML provider config.
+ """
+ return self._provider_manager.update_saml_provider_config(
+ provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url,
+ x509_certificates=x509_certificates, rp_entity_id=rp_entity_id,
+ callback_url=callback_url, display_name=display_name, enabled=enabled)
+
+ def delete_saml_provider_config(self, provider_id):
+ """Deletes the ``SAMLProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix.
+ ConfigurationNotFoundError: If no SAML provider is available with the given identifier.
+ FirebaseError: If an error occurs while deleting the SAML provider.
+ """
+ self._provider_manager.delete_saml_provider_config(provider_id)
+
+ def list_saml_provider_configs(
+ self, page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS):
+ """Retrieves a page of SAML provider configs from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of configs that may be included in the returned
+ page. This function never returns ``None``. If there are no SAML configs in the Firebase
+ project, this returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the
+ page (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in
+ the returned page (optional). Defaults to 100, which is also the maximum number
+ allowed.
+
+ Returns:
+ ListProviderConfigsPage: A page of SAML provider config instances.
+
+ Raises:
+ ValueError: If ``max_results`` or ``page_token`` are invalid.
+ FirebaseError: If an error occurs while retrieving the SAML provider configs.
+ """
+ return self._provider_manager.list_saml_provider_configs(page_token, max_results)
+
+ def _check_jwt_revoked(self, verified_claims, exc_type, label):
+ user = self.get_user(verified_claims.get('uid'))
+ if verified_claims.get('iat') * 1000 < user.tokens_valid_after_timestamp:
+ raise exc_type('The Firebase {0} has been revoked.'.format(label))
diff --git a/venv/Lib/site-packages/firebase_admin/_auth_providers.py b/venv/Lib/site-packages/firebase_admin/_auth_providers.py
new file mode 100644
index 000000000..46de6fe5f
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_auth_providers.py
@@ -0,0 +1,390 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase auth providers management sub module."""
+
+from urllib import parse
+
+import requests
+
+from firebase_admin import _auth_utils
+from firebase_admin import _user_mgt
+
+
+MAX_LIST_CONFIGS_RESULTS = 100
+
+
+class ProviderConfig:
+ """Parent type for all authentication provider config types."""
+
+ def __init__(self, data):
+ self._data = data
+
+ @property
+ def provider_id(self):
+ name = self._data['name']
+ return name.split('/')[-1]
+
+ @property
+ def display_name(self):
+ return self._data.get('displayName')
+
+ @property
+ def enabled(self):
+ return self._data.get('enabled', False)
+
+
+class OIDCProviderConfig(ProviderConfig):
+ """Represents the OIDC auth provider configuration.
+
+ See https://openid.net/specs/openid-connect-core-1_0-final.html.
+ """
+
+ @property
+ def issuer(self):
+ return self._data['issuer']
+
+ @property
+ def client_id(self):
+ return self._data['clientId']
+
+
+class SAMLProviderConfig(ProviderConfig):
+ """Represents he SAML auth provider configuration.
+
+ See http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-tech-overview-2.0.html.
+ """
+
+ @property
+ def idp_entity_id(self):
+ return self._data.get('idpConfig', {})['idpEntityId']
+
+ @property
+ def sso_url(self):
+ return self._data.get('idpConfig', {})['ssoUrl']
+
+ @property
+ def x509_certificates(self):
+ certs = self._data.get('idpConfig', {})['idpCertificates']
+ return [c['x509Certificate'] for c in certs]
+
+ @property
+ def callback_url(self):
+ return self._data.get('spConfig', {})['callbackUri']
+
+ @property
+ def rp_entity_id(self):
+ return self._data.get('spConfig', {})['spEntityId']
+
+
+class ListProviderConfigsPage:
+ """Represents a page of AuthProviderConfig instances retrieved from a Firebase project.
+
+ Provides methods for traversing the provider configs included in this page, as well as
+ retrieving subsequent pages. The iterator returned by ``iterate_all()`` can be used to iterate
+ through all provider configs in the Firebase project starting from this page.
+ """
+
+ def __init__(self, download, page_token, max_results):
+ self._download = download
+ self._max_results = max_results
+ self._current = download(page_token, max_results)
+
+ @property
+ def provider_configs(self):
+ """A list of ``AuthProviderConfig`` instances available in this page."""
+ raise NotImplementedError
+
+ @property
+ def next_page_token(self):
+ """Page token string for the next page (empty string indicates no more pages)."""
+ return self._current.get('nextPageToken', '')
+
+ @property
+ def has_next_page(self):
+ """A boolean indicating whether more pages are available."""
+ return bool(self.next_page_token)
+
+ def get_next_page(self):
+ """Retrieves the next page of provider configs, if available.
+
+ Returns:
+ ListProviderConfigsPage: Next page of provider configs, or None if this is the last
+ page.
+ """
+ if self.has_next_page:
+ return self.__class__(self._download, self.next_page_token, self._max_results)
+ return None
+
+ def iterate_all(self):
+ """Retrieves an iterator for provider configs.
+
+ Returned iterator will iterate through all the provider configs in the Firebase project
+ starting from this page. The iterator will never buffer more than one page of configs
+ in memory at a time.
+
+ Returns:
+ iterator: An iterator of AuthProviderConfig instances.
+ """
+ return _ProviderConfigIterator(self)
+
+
+class _ListOIDCProviderConfigsPage(ListProviderConfigsPage):
+
+ @property
+ def provider_configs(self):
+ return [OIDCProviderConfig(data) for data in self._current.get('oauthIdpConfigs', [])]
+
+
+class _ListSAMLProviderConfigsPage(ListProviderConfigsPage):
+
+ @property
+ def provider_configs(self):
+ return [SAMLProviderConfig(data) for data in self._current.get('inboundSamlConfigs', [])]
+
+
+class _ProviderConfigIterator(_auth_utils.PageIterator):
+
+ @property
+ def items(self):
+ return self._current_page.provider_configs
+
+
+class ProviderConfigClient:
+ """Client for managing Auth provider configurations."""
+
+ PROVIDER_CONFIG_URL = 'https://identitytoolkit.googleapis.com/v2beta1'
+
+ def __init__(self, http_client, project_id, tenant_id=None):
+ self.http_client = http_client
+ self.base_url = '{0}/projects/{1}'.format(self.PROVIDER_CONFIG_URL, project_id)
+ if tenant_id:
+ self.base_url += '/tenants/{0}'.format(tenant_id)
+
+ def get_oidc_provider_config(self, provider_id):
+ _validate_oidc_provider_id(provider_id)
+ body = self._make_request('get', '/oauthIdpConfigs/{0}'.format(provider_id))
+ return OIDCProviderConfig(body)
+
+ def create_oidc_provider_config(
+ self, provider_id, client_id, issuer, display_name=None, enabled=None):
+ """Creates a new OIDC provider config from the given parameters."""
+ _validate_oidc_provider_id(provider_id)
+ req = {
+ 'clientId': _validate_non_empty_string(client_id, 'client_id'),
+ 'issuer': _validate_url(issuer, 'issuer'),
+ }
+ if display_name is not None:
+ req['displayName'] = _auth_utils.validate_string(display_name, 'display_name')
+ if enabled is not None:
+ req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled')
+
+ params = 'oauthIdpConfigId={0}'.format(provider_id)
+ body = self._make_request('post', '/oauthIdpConfigs', json=req, params=params)
+ return OIDCProviderConfig(body)
+
+ def update_oidc_provider_config(
+ self, provider_id, client_id=None, issuer=None, display_name=None, enabled=None):
+ """Updates an existing OIDC provider config with the given parameters."""
+ _validate_oidc_provider_id(provider_id)
+ req = {}
+ if display_name is not None:
+ if display_name == _user_mgt.DELETE_ATTRIBUTE:
+ req['displayName'] = None
+ else:
+ req['displayName'] = _auth_utils.validate_string(display_name, 'display_name')
+ if enabled is not None:
+ req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled')
+ if client_id:
+ req['clientId'] = _validate_non_empty_string(client_id, 'client_id')
+ if issuer:
+ req['issuer'] = _validate_url(issuer, 'issuer')
+
+ if not req:
+ raise ValueError('At least one parameter must be specified for update.')
+
+ update_mask = _auth_utils.build_update_mask(req)
+ params = 'updateMask={0}'.format(','.join(update_mask))
+ url = '/oauthIdpConfigs/{0}'.format(provider_id)
+ body = self._make_request('patch', url, json=req, params=params)
+ return OIDCProviderConfig(body)
+
+ def delete_oidc_provider_config(self, provider_id):
+ _validate_oidc_provider_id(provider_id)
+ self._make_request('delete', '/oauthIdpConfigs/{0}'.format(provider_id))
+
+ def list_oidc_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS):
+ return _ListOIDCProviderConfigsPage(
+ self._fetch_oidc_provider_configs, page_token, max_results)
+
+ def _fetch_oidc_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS):
+ return self._fetch_provider_configs('/oauthIdpConfigs', page_token, max_results)
+
+ def get_saml_provider_config(self, provider_id):
+ _validate_saml_provider_id(provider_id)
+ body = self._make_request('get', '/inboundSamlConfigs/{0}'.format(provider_id))
+ return SAMLProviderConfig(body)
+
+ def create_saml_provider_config(
+ self, provider_id, idp_entity_id, sso_url, x509_certificates,
+ rp_entity_id, callback_url, display_name=None, enabled=None):
+ """Creates a new SAML provider config from the given parameters."""
+ _validate_saml_provider_id(provider_id)
+ req = {
+ 'idpConfig': {
+ 'idpEntityId': _validate_non_empty_string(idp_entity_id, 'idp_entity_id'),
+ 'ssoUrl': _validate_url(sso_url, 'sso_url'),
+ 'idpCertificates': _validate_x509_certificates(x509_certificates),
+ },
+ 'spConfig': {
+ 'spEntityId': _validate_non_empty_string(rp_entity_id, 'rp_entity_id'),
+ 'callbackUri': _validate_url(callback_url, 'callback_url'),
+ },
+ }
+ if display_name is not None:
+ req['displayName'] = _auth_utils.validate_string(display_name, 'display_name')
+ if enabled is not None:
+ req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled')
+
+ params = 'inboundSamlConfigId={0}'.format(provider_id)
+ body = self._make_request('post', '/inboundSamlConfigs', json=req, params=params)
+ return SAMLProviderConfig(body)
+
+ def update_saml_provider_config(
+ self, provider_id, idp_entity_id=None, sso_url=None, x509_certificates=None,
+ rp_entity_id=None, callback_url=None, display_name=None, enabled=None):
+ """Updates an existing SAML provider config with the given parameters."""
+ _validate_saml_provider_id(provider_id)
+ idp_config = {}
+ if idp_entity_id is not None:
+ idp_config['idpEntityId'] = _validate_non_empty_string(idp_entity_id, 'idp_entity_id')
+ if sso_url is not None:
+ idp_config['ssoUrl'] = _validate_url(sso_url, 'sso_url')
+ if x509_certificates is not None:
+ idp_config['idpCertificates'] = _validate_x509_certificates(x509_certificates)
+
+ sp_config = {}
+ if rp_entity_id is not None:
+ sp_config['spEntityId'] = _validate_non_empty_string(rp_entity_id, 'rp_entity_id')
+ if callback_url is not None:
+ sp_config['callbackUri'] = _validate_url(callback_url, 'callback_url')
+
+ req = {}
+ if display_name is not None:
+ if display_name == _user_mgt.DELETE_ATTRIBUTE:
+ req['displayName'] = None
+ else:
+ req['displayName'] = _auth_utils.validate_string(display_name, 'display_name')
+ if enabled is not None:
+ req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled')
+ if idp_config:
+ req['idpConfig'] = idp_config
+ if sp_config:
+ req['spConfig'] = sp_config
+
+ if not req:
+ raise ValueError('At least one parameter must be specified for update.')
+
+ update_mask = _auth_utils.build_update_mask(req)
+ params = 'updateMask={0}'.format(','.join(update_mask))
+ url = '/inboundSamlConfigs/{0}'.format(provider_id)
+ body = self._make_request('patch', url, json=req, params=params)
+ return SAMLProviderConfig(body)
+
+ def delete_saml_provider_config(self, provider_id):
+ _validate_saml_provider_id(provider_id)
+ self._make_request('delete', '/inboundSamlConfigs/{0}'.format(provider_id))
+
+ def list_saml_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS):
+ return _ListSAMLProviderConfigsPage(
+ self._fetch_saml_provider_configs, page_token, max_results)
+
+ def _fetch_saml_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS):
+ return self._fetch_provider_configs('/inboundSamlConfigs', page_token, max_results)
+
+ def _fetch_provider_configs(self, path, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS):
+ """Fetches a page of auth provider configs"""
+ if page_token is not None:
+ if not isinstance(page_token, str) or not page_token:
+ raise ValueError('Page token must be a non-empty string.')
+ if not isinstance(max_results, int):
+ raise ValueError('Max results must be an integer.')
+ if max_results < 1 or max_results > MAX_LIST_CONFIGS_RESULTS:
+ raise ValueError(
+ 'Max results must be a positive integer less than or equal to '
+ '{0}.'.format(MAX_LIST_CONFIGS_RESULTS))
+
+ params = 'pageSize={0}'.format(max_results)
+ if page_token:
+ params += '&pageToken={0}'.format(page_token)
+ return self._make_request('get', path, params=params)
+
+ def _make_request(self, method, path, **kwargs):
+ url = '{0}{1}'.format(self.base_url, path)
+ try:
+ return self.http_client.body(method, url, **kwargs)
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+
+
+def _validate_oidc_provider_id(provider_id):
+ if not isinstance(provider_id, str):
+ raise ValueError(
+ 'Invalid OIDC provider ID: {0}. Provider ID must be a non-empty string.'.format(
+ provider_id))
+ if not provider_id.startswith('oidc.'):
+ raise ValueError('Invalid OIDC provider ID: {0}.'.format(provider_id))
+ return provider_id
+
+
+def _validate_saml_provider_id(provider_id):
+ if not isinstance(provider_id, str):
+ raise ValueError(
+ 'Invalid SAML provider ID: {0}. Provider ID must be a non-empty string.'.format(
+ provider_id))
+ if not provider_id.startswith('saml.'):
+ raise ValueError('Invalid SAML provider ID: {0}.'.format(provider_id))
+ return provider_id
+
+
+def _validate_non_empty_string(value, label):
+ """Validates that the given value is a non-empty string."""
+ if not isinstance(value, str):
+ raise ValueError('Invalid type for {0}: {1}.'.format(label, value))
+ if not value:
+ raise ValueError('{0} must not be empty.'.format(label))
+ return value
+
+
+def _validate_url(url, label):
+ """Validates that the given value is a well-formed URL string."""
+ if not isinstance(url, str) or not url:
+ raise ValueError(
+ 'Invalid photo URL: "{0}". {1} must be a non-empty '
+ 'string.'.format(url, label))
+ try:
+ parsed = parse.urlparse(url)
+ if not parsed.netloc:
+ raise ValueError('Malformed {0}: "{1}".'.format(label, url))
+ return url
+ except Exception:
+ raise ValueError('Malformed {0}: "{1}".'.format(label, url))
+
+
+def _validate_x509_certificates(x509_certificates):
+ if not isinstance(x509_certificates, list) or not x509_certificates:
+ raise ValueError('x509_certificates must be a non-empty list.')
+ if not all([isinstance(cert, str) and cert for cert in x509_certificates]):
+ raise ValueError('x509_certificates must only contain non-empty strings.')
+ return [{'x509Certificate': cert} for cert in x509_certificates]
diff --git a/venv/Lib/site-packages/firebase_admin/_auth_utils.py b/venv/Lib/site-packages/firebase_admin/_auth_utils.py
new file mode 100644
index 000000000..2226675f9
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_auth_utils.py
@@ -0,0 +1,422 @@
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase auth utils."""
+
+import json
+import re
+from urllib import parse
+
+from firebase_admin import exceptions
+from firebase_admin import _utils
+
+
+MAX_CLAIMS_PAYLOAD_SIZE = 1000
+RESERVED_CLAIMS = set([
+ 'acr', 'amr', 'at_hash', 'aud', 'auth_time', 'azp', 'cnf', 'c_hash', 'exp', 'iat',
+ 'iss', 'jti', 'nbf', 'nonce', 'sub', 'firebase',
+])
+VALID_EMAIL_ACTION_TYPES = set(['VERIFY_EMAIL', 'EMAIL_SIGNIN', 'PASSWORD_RESET'])
+
+
+class PageIterator:
+ """An iterator that allows iterating over a sequence of items, one at a time.
+
+ This implementation loads a page of items into memory, and iterates on them. When the whole
+ page has been traversed, it loads another page. This class never keeps more than one page
+ of entries in memory.
+ """
+
+ def __init__(self, current_page):
+ if not current_page:
+ raise ValueError('Current page must not be None.')
+ self._current_page = current_page
+ self._index = 0
+
+ def next(self):
+ if self._index == len(self.items):
+ if self._current_page.has_next_page:
+ self._current_page = self._current_page.get_next_page()
+ self._index = 0
+ if self._index < len(self.items):
+ result = self.items[self._index]
+ self._index += 1
+ return result
+ raise StopIteration
+
+ @property
+ def items(self):
+ raise NotImplementedError
+
+ def __next__(self):
+ return self.next()
+
+ def __iter__(self):
+ return self
+
+
+def validate_uid(uid, required=False):
+ if uid is None and not required:
+ return None
+ if not isinstance(uid, str) or not uid or len(uid) > 128:
+ raise ValueError(
+ 'Invalid uid: "{0}". The uid must be a non-empty string with no more than 128 '
+ 'characters.'.format(uid))
+ return uid
+
+def validate_email(email, required=False):
+ if email is None and not required:
+ return None
+ if not isinstance(email, str) or not email:
+ raise ValueError(
+ 'Invalid email: "{0}". Email must be a non-empty string.'.format(email))
+ parts = email.split('@')
+ if len(parts) != 2 or not parts[0] or not parts[1]:
+ raise ValueError('Malformed email address string: "{0}".'.format(email))
+ return email
+
+def validate_phone(phone, required=False):
+ """Validates the specified phone number.
+
+ Phone number vlidation is very lax here. Backend will enforce E.164 spec compliance, and
+ normalize accordingly. Here we check if the number starts with + sign, and contains at
+ least one alphanumeric character.
+ """
+ if phone is None and not required:
+ return None
+ if not isinstance(phone, str) or not phone:
+ raise ValueError('Invalid phone number: "{0}". Phone number must be a non-empty '
+ 'string.'.format(phone))
+ if not phone.startswith('+') or not re.search('[a-zA-Z0-9]', phone):
+ raise ValueError('Invalid phone number: "{0}". Phone number must be a valid, E.164 '
+ 'compliant identifier.'.format(phone))
+ return phone
+
+def validate_password(password, required=False):
+ if password is None and not required:
+ return None
+ if not isinstance(password, str) or len(password) < 6:
+ raise ValueError(
+ 'Invalid password string. Password must be a string at least 6 characters long.')
+ return password
+
+def validate_bytes(value, label, required=False):
+ if value is None and not required:
+ return None
+ if not isinstance(value, bytes) or not value:
+ raise ValueError('{0} must be a non-empty byte sequence.'.format(label))
+ return value
+
+def validate_display_name(display_name, required=False):
+ if display_name is None and not required:
+ return None
+ if not isinstance(display_name, str) or not display_name:
+ raise ValueError(
+ 'Invalid display name: "{0}". Display name must be a non-empty '
+ 'string.'.format(display_name))
+ return display_name
+
+def validate_provider_id(provider_id, required=True):
+ if provider_id is None and not required:
+ return None
+ if not isinstance(provider_id, str) or not provider_id:
+ raise ValueError(
+ 'Invalid provider ID: "{0}". Provider ID must be a non-empty '
+ 'string.'.format(provider_id))
+ return provider_id
+
+def validate_provider_uid(provider_uid, required=True):
+ if provider_uid is None and not required:
+ return None
+ if not isinstance(provider_uid, str) or not provider_uid:
+ raise ValueError(
+ 'Invalid provider UID: "{0}". Provider UID must be a non-empty '
+ 'string.'.format(provider_uid))
+ return provider_uid
+
+def validate_photo_url(photo_url, required=False):
+ """Parses and validates the given URL string."""
+ if photo_url is None and not required:
+ return None
+ if not isinstance(photo_url, str) or not photo_url:
+ raise ValueError(
+ 'Invalid photo URL: "{0}". Photo URL must be a non-empty '
+ 'string.'.format(photo_url))
+ try:
+ parsed = parse.urlparse(photo_url)
+ if not parsed.netloc:
+ raise ValueError('Malformed photo URL: "{0}".'.format(photo_url))
+ return photo_url
+ except Exception:
+ raise ValueError('Malformed photo URL: "{0}".'.format(photo_url))
+
+def validate_timestamp(timestamp, label, required=False):
+ """Validates the given timestamp value. Timestamps must be positive integers."""
+ if timestamp is None and not required:
+ return None
+ if isinstance(timestamp, bool):
+ raise ValueError('Boolean value specified as timestamp.')
+ try:
+ timestamp_int = int(timestamp)
+ except TypeError:
+ raise ValueError('Invalid type for timestamp value: {0}.'.format(timestamp))
+ else:
+ if timestamp_int != timestamp:
+ raise ValueError('{0} must be a numeric value and a whole number.'.format(label))
+ if timestamp_int <= 0:
+ raise ValueError('{0} timestamp must be a positive interger.'.format(label))
+ return timestamp_int
+
+def validate_int(value, label, low=None, high=None):
+ """Validates that the given value represents an integer.
+
+ There are several ways to represent an integer in Python (e.g. 2, 2L, 2.0). This method allows
+ for all such representations except for booleans. Booleans also behave like integers, but
+ always translate to 1 and 0. Passing a boolean to an API that expects integers is most likely
+ a developer error.
+ """
+ if value is None or isinstance(value, bool):
+ raise ValueError('Invalid type for integer value: {0}.'.format(value))
+ try:
+ val_int = int(value)
+ except TypeError:
+ raise ValueError('Invalid type for integer value: {0}.'.format(value))
+ else:
+ if val_int != value:
+ # This will be True for non-numeric values like '2' and non-whole numbers like 2.5.
+ raise ValueError('{0} must be a numeric value and a whole number.'.format(label))
+ if low is not None and val_int < low:
+ raise ValueError('{0} must not be smaller than {1}.'.format(label, low))
+ if high is not None and val_int > high:
+ raise ValueError('{0} must not be larger than {1}.'.format(label, high))
+ return val_int
+
+def validate_string(value, label):
+ """Validates that the given value is a string."""
+ if not isinstance(value, str):
+ raise ValueError('Invalid type for {0}: {1}.'.format(label, value))
+ return value
+
+def validate_boolean(value, label):
+ """Validates that the given value is a boolean."""
+ if not isinstance(value, bool):
+ raise ValueError('Invalid type for {0}: {1}.'.format(label, value))
+ return value
+
+def validate_custom_claims(custom_claims, required=False):
+ """Validates the specified custom claims.
+
+ Custom claims must be specified as a JSON string. The string must not exceed 1000
+ characters, and the parsed JSON payload must not contain reserved JWT claims.
+ """
+ if custom_claims is None and not required:
+ return None
+ claims_str = str(custom_claims)
+ if len(claims_str) > MAX_CLAIMS_PAYLOAD_SIZE:
+ raise ValueError(
+ 'Custom claims payload must not exceed {0} characters.'.format(
+ MAX_CLAIMS_PAYLOAD_SIZE))
+ try:
+ parsed = json.loads(claims_str)
+ except Exception:
+ raise ValueError('Failed to parse custom claims string as JSON.')
+
+ if not isinstance(parsed, dict):
+ raise ValueError('Custom claims must be parseable as a JSON object.')
+ invalid_claims = RESERVED_CLAIMS.intersection(set(parsed.keys()))
+ if len(invalid_claims) > 1:
+ joined = ', '.join(sorted(invalid_claims))
+ raise ValueError('Claims "{0}" are reserved, and must not be set.'.format(joined))
+ if len(invalid_claims) == 1:
+ raise ValueError(
+ 'Claim "{0}" is reserved, and must not be set.'.format(invalid_claims.pop()))
+ return claims_str
+
+def validate_action_type(action_type):
+ if action_type not in VALID_EMAIL_ACTION_TYPES:
+ raise ValueError('Invalid action type provided action_type: {0}. \
+ Valid values are {1}'.format(action_type, ', '.join(VALID_EMAIL_ACTION_TYPES)))
+ return action_type
+
+def build_update_mask(params):
+ """Creates an update mask list from the given dictionary."""
+ mask = []
+ for key, value in params.items():
+ if isinstance(value, dict):
+ child_mask = build_update_mask(value)
+ for child in child_mask:
+ mask.append('{0}.{1}'.format(key, child))
+ else:
+ mask.append(key)
+
+ return sorted(mask)
+
+
+class UidAlreadyExistsError(exceptions.AlreadyExistsError):
+ """The user with the provided uid already exists."""
+
+ default_message = 'The user with the provided uid already exists'
+
+ def __init__(self, message, cause, http_response):
+ exceptions.AlreadyExistsError.__init__(self, message, cause, http_response)
+
+
+class EmailAlreadyExistsError(exceptions.AlreadyExistsError):
+ """The user with the provided email already exists."""
+
+ default_message = 'The user with the provided email already exists'
+
+ def __init__(self, message, cause, http_response):
+ exceptions.AlreadyExistsError.__init__(self, message, cause, http_response)
+
+
+class InsufficientPermissionError(exceptions.PermissionDeniedError):
+ """The credential used to initialize the SDK lacks required permissions."""
+
+ default_message = ('The credential used to initialize the SDK has insufficient '
+ 'permissions to perform the requested operation. See '
+ 'https://firebase.google.com/docs/admin/setup for details '
+ 'on how to initialize the Admin SDK with appropriate permissions')
+
+ def __init__(self, message, cause, http_response):
+ exceptions.PermissionDeniedError.__init__(self, message, cause, http_response)
+
+
+class InvalidDynamicLinkDomainError(exceptions.InvalidArgumentError):
+ """Dynamic link domain in ActionCodeSettings is not authorized."""
+
+ default_message = 'Dynamic link domain specified in ActionCodeSettings is not authorized'
+
+ def __init__(self, message, cause, http_response):
+ exceptions.InvalidArgumentError.__init__(self, message, cause, http_response)
+
+
+class InvalidIdTokenError(exceptions.InvalidArgumentError):
+ """The provided ID token is not a valid Firebase ID token."""
+
+ default_message = 'The provided ID token is invalid'
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.InvalidArgumentError.__init__(self, message, cause, http_response)
+
+
+class PhoneNumberAlreadyExistsError(exceptions.AlreadyExistsError):
+ """The user with the provided phone number already exists."""
+
+ default_message = 'The user with the provided phone number already exists'
+
+ def __init__(self, message, cause, http_response):
+ exceptions.AlreadyExistsError.__init__(self, message, cause, http_response)
+
+
+class UnexpectedResponseError(exceptions.UnknownError):
+ """Backend service responded with an unexpected or malformed response."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.UnknownError.__init__(self, message, cause, http_response)
+
+
+class UserNotFoundError(exceptions.NotFoundError):
+ """No user record found for the specified identifier."""
+
+ default_message = 'No user record found for the given identifier'
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.NotFoundError.__init__(self, message, cause, http_response)
+
+
+class TenantNotFoundError(exceptions.NotFoundError):
+ """No tenant found for the specified identifier."""
+
+ default_message = 'No tenant found for the given identifier'
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.NotFoundError.__init__(self, message, cause, http_response)
+
+
+class TenantIdMismatchError(exceptions.InvalidArgumentError):
+ """Missing or invalid tenant ID field in the given JWT."""
+
+ def __init__(self, message):
+ exceptions.InvalidArgumentError.__init__(self, message)
+
+
+class ConfigurationNotFoundError(exceptions.NotFoundError):
+ """No auth provider found for the specified identifier."""
+
+ default_message = 'No auth provider found for the given identifier'
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.NotFoundError.__init__(self, message, cause, http_response)
+
+
+_CODE_TO_EXC_TYPE = {
+ 'CONFIGURATION_NOT_FOUND': ConfigurationNotFoundError,
+ 'DUPLICATE_EMAIL': EmailAlreadyExistsError,
+ 'DUPLICATE_LOCAL_ID': UidAlreadyExistsError,
+ 'EMAIL_EXISTS': EmailAlreadyExistsError,
+ 'INSUFFICIENT_PERMISSION': InsufficientPermissionError,
+ 'INVALID_DYNAMIC_LINK_DOMAIN': InvalidDynamicLinkDomainError,
+ 'INVALID_ID_TOKEN': InvalidIdTokenError,
+ 'PHONE_NUMBER_EXISTS': PhoneNumberAlreadyExistsError,
+ 'TENANT_NOT_FOUND': TenantNotFoundError,
+ 'USER_NOT_FOUND': UserNotFoundError,
+}
+
+
+def handle_auth_backend_error(error):
+ """Converts a requests error received from the Firebase Auth service into a FirebaseError."""
+ if error.response is None:
+ return _utils.handle_requests_error(error)
+
+ code, custom_message = _parse_error_body(error.response)
+ if not code:
+ msg = 'Unexpected error response: {0}'.format(error.response.content.decode())
+ return _utils.handle_requests_error(error, message=msg)
+
+ exc_type = _CODE_TO_EXC_TYPE.get(code)
+ msg = _build_error_message(code, exc_type, custom_message)
+ if not exc_type:
+ return _utils.handle_requests_error(error, message=msg)
+
+ return exc_type(msg, cause=error, http_response=error.response)
+
+
+def _parse_error_body(response):
+ """Parses the given error response to extract Auth error code and message."""
+ error_dict = {}
+ try:
+ parsed_body = response.json()
+ if isinstance(parsed_body, dict):
+ error_dict = parsed_body.get('error', {})
+ except ValueError:
+ pass
+
+ # Auth error response format: {"error": {"message": "AUTH_ERROR_CODE: Optional text"}}
+ code = error_dict.get('message') if isinstance(error_dict, dict) else None
+ custom_message = None
+ if code:
+ separator = code.find(':')
+ if separator != -1:
+ custom_message = code[separator + 1:].strip()
+ code = code[:separator]
+
+ return code, custom_message
+
+
+def _build_error_message(code, exc_type, custom_message):
+ default_message = exc_type.default_message if (
+ exc_type and hasattr(exc_type, 'default_message')) else 'Error while calling Auth service'
+ ext = ' {0}'.format(custom_message) if custom_message else ''
+ return '{0} ({1}).{2}'.format(default_message, code, ext)
diff --git a/venv/Lib/site-packages/firebase_admin/_http_client.py b/venv/Lib/site-packages/firebase_admin/_http_client.py
new file mode 100644
index 000000000..f6f0d89fa
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_http_client.py
@@ -0,0 +1,148 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Internal HTTP client module.
+
+ This module provides utilities for making HTTP calls using the requests library.
+ """
+
+from google.auth import transport
+import requests
+from requests.packages.urllib3.util import retry # pylint: disable=import-error
+
+
+_ANY_METHOD = None
+
+# Default retry configuration: Retries once on low-level connection and socket read errors.
+# Retries up to 4 times on HTTP 500 and 503 errors, with exponential backoff. Returns the
+# last response upon exhausting all retries.
+DEFAULT_RETRY_CONFIG = retry.Retry(
+ connect=1, read=1, status=4, status_forcelist=[500, 503], method_whitelist=_ANY_METHOD,
+ raise_on_status=False, backoff_factor=0.5)
+
+
+DEFAULT_TIMEOUT_SECONDS = 120
+
+
+class HttpClient:
+ """Base HTTP client used to make HTTP calls.
+
+ HttpClient maintains an HTTP session, and handles request authentication and retries if
+ necessary.
+ """
+
+ def __init__(
+ self, credential=None, session=None, base_url='', headers=None,
+ retries=DEFAULT_RETRY_CONFIG, timeout=DEFAULT_TIMEOUT_SECONDS):
+ """Creates a new HttpClient instance from the provided arguments.
+
+ If a credential is provided, initializes a new HTTP session authorized with it. If neither
+ a credential nor a session is provided, initializes a new unauthorized session.
+
+ Args:
+ credential: A Google credential that can be used to authenticate requests (optional).
+ session: A custom HTTP session (optional).
+ base_url: A URL prefix to be added to all outgoing requests (optional).
+ headers: A map of headers to be added to all outgoing requests (optional).
+ retries: A urllib retry configuration. Default settings would retry once for low-level
+ connection and socket read errors, and up to 4 times for HTTP 500 and 503 errors.
+ Pass a False value to disable retries (optional).
+ timeout: HTTP timeout in seconds. Defaults to 120 seconds when not specified. Set to
+ None to disable timeouts (optional).
+ """
+ if credential:
+ self._session = transport.requests.AuthorizedSession(credential)
+ elif session:
+ self._session = session
+ else:
+ self._session = requests.Session() # pylint: disable=redefined-variable-type
+
+ if headers:
+ self._session.headers.update(headers)
+ if retries:
+ self._session.mount('http://', requests.adapters.HTTPAdapter(max_retries=retries))
+ self._session.mount('https://', requests.adapters.HTTPAdapter(max_retries=retries))
+ self._base_url = base_url
+ self._timeout = timeout
+
+ @property
+ def session(self):
+ return self._session
+
+ @property
+ def base_url(self):
+ return self._base_url
+
+ @property
+ def timeout(self):
+ return self._timeout
+
+ def parse_body(self, resp):
+ raise NotImplementedError
+
+ def request(self, method, url, **kwargs):
+ """Makes an HTTP call using the Python requests library.
+
+ This is the sole entry point to the requests library. All other helper methods in this
+ class call this method to send HTTP requests out. Refer to
+ http://docs.python-requests.org/en/master/api/ for more information on supported options
+ and features.
+
+ Args:
+ method: HTTP method name as a string (e.g. get, post).
+ url: URL of the remote endpoint.
+ kwargs: An additional set of keyword arguments to be passed into the requests API
+ (e.g. json, params, timeout).
+
+ Returns:
+ Response: An HTTP response object.
+
+ Raises:
+ RequestException: Any requests exceptions encountered while making the HTTP call.
+ """
+ if 'timeout' not in kwargs:
+ kwargs['timeout'] = self.timeout
+ resp = self._session.request(method, self.base_url + url, **kwargs)
+ resp.raise_for_status()
+ return resp
+
+ def headers(self, method, url, **kwargs):
+ resp = self.request(method, url, **kwargs)
+ return resp.headers
+
+ def body_and_response(self, method, url, **kwargs):
+ resp = self.request(method, url, **kwargs)
+ return self.parse_body(resp), resp
+
+ def body(self, method, url, **kwargs):
+ resp = self.request(method, url, **kwargs)
+ return self.parse_body(resp)
+
+ def headers_and_body(self, method, url, **kwargs):
+ resp = self.request(method, url, **kwargs)
+ return resp.headers, self.parse_body(resp)
+
+ def close(self):
+ self._session.close()
+ self._session = None
+
+
+class JsonHttpClient(HttpClient):
+ """An HTTP client that parses response messages as JSON."""
+
+ def __init__(self, **kwargs):
+ HttpClient.__init__(self, **kwargs)
+
+ def parse_body(self, resp):
+ return resp.json()
diff --git a/venv/Lib/site-packages/firebase_admin/_messaging_encoder.py b/venv/Lib/site-packages/firebase_admin/_messaging_encoder.py
new file mode 100644
index 000000000..48a3dd3cd
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_messaging_encoder.py
@@ -0,0 +1,696 @@
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Encoding and validation utils for the messaging (FCM) module."""
+
+import datetime
+import json
+import math
+import numbers
+import re
+
+import firebase_admin._messaging_utils as _messaging_utils
+
+
+class Message:
+ """A message that can be sent via Firebase Cloud Messaging.
+
+ Contains payload information as well as recipient information. In particular, the message must
+ contain exactly one of token, topic or condition fields.
+
+ Args:
+ data: A dictionary of data fields (optional). All keys and values in the dictionary must be
+ strings.
+ notification: An instance of ``messaging.Notification`` (optional).
+ android: An instance of ``messaging.AndroidConfig`` (optional).
+ webpush: An instance of ``messaging.WebpushConfig`` (optional).
+ apns: An instance of ``messaging.ApnsConfig`` (optional).
+ fcm_options: An instance of ``messaging.FCMOptions`` (optional).
+ token: The registration token of the device to which the message should be sent (optional).
+ topic: Name of the FCM topic to which the message should be sent (optional). Topic name
+ may contain the ``/topics/`` prefix.
+ condition: The FCM condition to which the message should be sent (optional).
+ """
+
+ def __init__(self, data=None, notification=None, android=None, webpush=None, apns=None,
+ fcm_options=None, token=None, topic=None, condition=None):
+ self.data = data
+ self.notification = notification
+ self.android = android
+ self.webpush = webpush
+ self.apns = apns
+ self.fcm_options = fcm_options
+ self.token = token
+ self.topic = topic
+ self.condition = condition
+
+ def __str__(self):
+ return json.dumps(self, cls=MessageEncoder, sort_keys=True)
+
+
+class MulticastMessage:
+ """A message that can be sent to multiple tokens via Firebase Cloud Messaging.
+
+ Args:
+ tokens: A list of registration tokens of targeted devices.
+ data: A dictionary of data fields (optional). All keys and values in the dictionary must be
+ strings.
+ notification: An instance of ``messaging.Notification`` (optional).
+ android: An instance of ``messaging.AndroidConfig`` (optional).
+ webpush: An instance of ``messaging.WebpushConfig`` (optional).
+ apns: An instance of ``messaging.ApnsConfig`` (optional).
+ fcm_options: An instance of ``messaging.FCMOptions`` (optional).
+ """
+ def __init__(self, tokens, data=None, notification=None, android=None, webpush=None, apns=None,
+ fcm_options=None):
+ _Validators.check_string_list('MulticastMessage.tokens', tokens)
+ if len(tokens) > 500:
+ raise ValueError('MulticastMessage.tokens must not contain more than 500 tokens.')
+ self.tokens = tokens
+ self.data = data
+ self.notification = notification
+ self.android = android
+ self.webpush = webpush
+ self.apns = apns
+ self.fcm_options = fcm_options
+
+
+class _Validators:
+ """A collection of data validation utilities.
+
+ Methods provided in this class raise ``ValueErrors`` if any validations fail.
+ """
+
+ @classmethod
+ def check_string(cls, label, value, non_empty=False):
+ """Checks if the given value is a string."""
+ if value is None:
+ return None
+ if not isinstance(value, str):
+ if non_empty:
+ raise ValueError('{0} must be a non-empty string.'.format(label))
+ raise ValueError('{0} must be a string.'.format(label))
+ if non_empty and not value:
+ raise ValueError('{0} must be a non-empty string.'.format(label))
+ return value
+
+ @classmethod
+ def check_number(cls, label, value):
+ if value is None:
+ return None
+ if not isinstance(value, numbers.Number):
+ raise ValueError('{0} must be a number.'.format(label))
+ return value
+
+ @classmethod
+ def check_string_dict(cls, label, value):
+ """Checks if the given value is a dictionary comprised only of string keys and values."""
+ if value is None or value == {}:
+ return None
+ if not isinstance(value, dict):
+ raise ValueError('{0} must be a dictionary.'.format(label))
+ non_str = [k for k in value if not isinstance(k, str)]
+ if non_str:
+ raise ValueError('{0} must not contain non-string keys.'.format(label))
+ non_str = [v for v in value.values() if not isinstance(v, str)]
+ if non_str:
+ raise ValueError('{0} must not contain non-string values.'.format(label))
+ return value
+
+ @classmethod
+ def check_string_list(cls, label, value):
+ """Checks if the given value is a list comprised only of strings."""
+ if value is None or value == []:
+ return None
+ if not isinstance(value, list):
+ raise ValueError('{0} must be a list of strings.'.format(label))
+ non_str = [k for k in value if not isinstance(k, str)]
+ if non_str:
+ raise ValueError('{0} must not contain non-string values.'.format(label))
+ return value
+
+ @classmethod
+ def check_number_list(cls, label, value):
+ """Checks if the given value is a list comprised only of numbers."""
+ if value is None or value == []:
+ return None
+ if not isinstance(value, list):
+ raise ValueError('{0} must be a list of numbers.'.format(label))
+ non_number = [k for k in value if not isinstance(k, numbers.Number)]
+ if non_number:
+ raise ValueError('{0} must not contain non-number values.'.format(label))
+ return value
+
+ @classmethod
+ def check_analytics_label(cls, label, value):
+ """Checks if the given value is a valid analytics label."""
+ value = _Validators.check_string(label, value)
+ if value is not None and not re.match(r'^[a-zA-Z0-9-_.~%]{1,50}$', value):
+ raise ValueError('Malformed {}.'.format(label))
+ return value
+
+ @classmethod
+ def check_datetime(cls, label, value):
+ """Checks if the given value is a datetime."""
+ if value is None:
+ return None
+ if not isinstance(value, datetime.datetime):
+ raise ValueError('{0} must be a datetime.'.format(label))
+ return value
+
+
+class MessageEncoder(json.JSONEncoder):
+ """A custom ``JSONEncoder`` implementation for serializing Message instances into JSON."""
+
+ @classmethod
+ def remove_null_values(cls, dict_value):
+ return {k: v for k, v in dict_value.items() if v not in [None, [], {}]}
+
+ @classmethod
+ def encode_android(cls, android):
+ """Encodes an ``AndroidConfig`` instance into JSON."""
+ if android is None:
+ return None
+ if not isinstance(android, _messaging_utils.AndroidConfig):
+ raise ValueError('Message.android must be an instance of AndroidConfig class.')
+ result = {
+ 'collapse_key': _Validators.check_string(
+ 'AndroidConfig.collapse_key', android.collapse_key),
+ 'data': _Validators.check_string_dict(
+ 'AndroidConfig.data', android.data),
+ 'notification': cls.encode_android_notification(android.notification),
+ 'priority': _Validators.check_string(
+ 'AndroidConfig.priority', android.priority, non_empty=True),
+ 'restricted_package_name': _Validators.check_string(
+ 'AndroidConfig.restricted_package_name', android.restricted_package_name),
+ 'ttl': cls.encode_ttl(android.ttl),
+ 'fcm_options': cls.encode_android_fcm_options(android.fcm_options),
+ }
+ result = cls.remove_null_values(result)
+ priority = result.get('priority')
+ if priority and priority not in ('high', 'normal'):
+ raise ValueError('AndroidConfig.priority must be "high" or "normal".')
+ return result
+
+ @classmethod
+ def encode_android_fcm_options(cls, fcm_options):
+ """Encodes an ``AndroidFCMOptions`` instance into JSON."""
+ if fcm_options is None:
+ return None
+ if not isinstance(fcm_options, _messaging_utils.AndroidFCMOptions):
+ raise ValueError('AndroidConfig.fcm_options must be an instance of '
+ 'AndroidFCMOptions class.')
+ result = {
+ 'analytics_label': _Validators.check_analytics_label(
+ 'AndroidFCMOptions.analytics_label', fcm_options.analytics_label),
+ }
+ result = cls.remove_null_values(result)
+ return result
+
+ @classmethod
+ def encode_ttl(cls, ttl):
+ """Encodes an ``AndroidConfig`` ``TTL`` duration into a string."""
+ if ttl is None:
+ return None
+ if isinstance(ttl, numbers.Number):
+ ttl = datetime.timedelta(seconds=ttl)
+ if not isinstance(ttl, datetime.timedelta):
+ raise ValueError('AndroidConfig.ttl must be a duration in seconds or an instance of '
+ 'datetime.timedelta.')
+ total_seconds = ttl.total_seconds()
+ if total_seconds < 0:
+ raise ValueError('AndroidConfig.ttl must not be negative.')
+ seconds = int(math.floor(total_seconds))
+ nanos = int((total_seconds - seconds) * 1e9)
+ if nanos:
+ return '{0}.{1}s'.format(seconds, str(nanos).zfill(9))
+ return '{0}s'.format(seconds)
+
+ @classmethod
+ def encode_milliseconds(cls, label, msec):
+ """Encodes a duration in milliseconds into a string."""
+ if msec is None:
+ return None
+ if isinstance(msec, numbers.Number):
+ msec = datetime.timedelta(milliseconds=msec)
+ if not isinstance(msec, datetime.timedelta):
+ raise ValueError('{0} must be a duration in milliseconds or an instance of '
+ 'datetime.timedelta.'.format(label))
+ total_seconds = msec.total_seconds()
+ if total_seconds < 0:
+ raise ValueError('{0} must not be negative.'.format(label))
+ seconds = int(math.floor(total_seconds))
+ nanos = int((total_seconds - seconds) * 1e9)
+ if nanos:
+ return '{0}.{1}s'.format(seconds, str(nanos).zfill(9))
+ return '{0}s'.format(seconds)
+
+ @classmethod
+ def encode_android_notification(cls, notification):
+ """Encodes an ``AndroidNotification`` instance into JSON."""
+ if notification is None:
+ return None
+ if not isinstance(notification, _messaging_utils.AndroidNotification):
+ raise ValueError('AndroidConfig.notification must be an instance of '
+ 'AndroidNotification class.')
+ result = {
+ 'body': _Validators.check_string(
+ 'AndroidNotification.body', notification.body),
+ 'body_loc_args': _Validators.check_string_list(
+ 'AndroidNotification.body_loc_args', notification.body_loc_args),
+ 'body_loc_key': _Validators.check_string(
+ 'AndroidNotification.body_loc_key', notification.body_loc_key),
+ 'click_action': _Validators.check_string(
+ 'AndroidNotification.click_action', notification.click_action),
+ 'color': _Validators.check_string(
+ 'AndroidNotification.color', notification.color, non_empty=True),
+ 'icon': _Validators.check_string(
+ 'AndroidNotification.icon', notification.icon),
+ 'sound': _Validators.check_string(
+ 'AndroidNotification.sound', notification.sound),
+ 'tag': _Validators.check_string(
+ 'AndroidNotification.tag', notification.tag),
+ 'title': _Validators.check_string(
+ 'AndroidNotification.title', notification.title),
+ 'title_loc_args': _Validators.check_string_list(
+ 'AndroidNotification.title_loc_args', notification.title_loc_args),
+ 'title_loc_key': _Validators.check_string(
+ 'AndroidNotification.title_loc_key', notification.title_loc_key),
+ 'channel_id': _Validators.check_string(
+ 'AndroidNotification.channel_id', notification.channel_id),
+ 'image': _Validators.check_string(
+ 'image', notification.image),
+ 'ticker': _Validators.check_string(
+ 'AndroidNotification.ticker', notification.ticker),
+ 'sticky': notification.sticky,
+ 'event_time': _Validators.check_datetime(
+ 'AndroidNotification.event_timestamp', notification.event_timestamp),
+ 'local_only': notification.local_only,
+ 'notification_priority': _Validators.check_string(
+ 'AndroidNotification.priority', notification.priority, non_empty=True),
+ 'vibrate_timings': _Validators.check_number_list(
+ 'AndroidNotification.vibrate_timings_millis', notification.vibrate_timings_millis),
+ 'default_vibrate_timings': notification.default_vibrate_timings,
+ 'default_sound': notification.default_sound,
+ 'default_light_settings': notification.default_light_settings,
+ 'light_settings': cls.encode_light_settings(notification.light_settings),
+ 'visibility': _Validators.check_string(
+ 'AndroidNotification.visibility', notification.visibility, non_empty=True),
+ 'notification_count': _Validators.check_number(
+ 'AndroidNotification.notification_count', notification.notification_count)
+ }
+ result = cls.remove_null_values(result)
+ color = result.get('color')
+ if color and not re.match(r'^#[0-9a-fA-F]{6}$', color):
+ raise ValueError(
+ 'AndroidNotification.color must be in the form #RRGGBB.')
+ if result.get('body_loc_args') and not result.get('body_loc_key'):
+ raise ValueError(
+ 'AndroidNotification.body_loc_key is required when specifying body_loc_args.')
+ if result.get('title_loc_args') and not result.get('title_loc_key'):
+ raise ValueError(
+ 'AndroidNotification.title_loc_key is required when specifying title_loc_args.')
+
+ event_time = result.get('event_time')
+ if event_time:
+ # if the datetime instance is not naive (tzinfo is present), convert to UTC
+ # otherwise (tzinfo is None) assume the datetime instance is already in UTC
+ if event_time.tzinfo is not None:
+ event_time = event_time.astimezone(datetime.timezone.utc)
+ result['event_time'] = event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
+
+ priority = result.get('notification_priority')
+ if priority:
+ if priority not in ('min', 'low', 'default', 'high', 'max'):
+ raise ValueError('AndroidNotification.priority must be "default", "min", "low", '
+ '"high" or "max".')
+ result['notification_priority'] = 'PRIORITY_' + priority.upper()
+
+ visibility = result.get('visibility')
+ if visibility:
+ if visibility not in ('private', 'public', 'secret'):
+ raise ValueError(
+ 'AndroidNotification.visibility must be "private", "public" or "secret".')
+ result['visibility'] = visibility.upper()
+
+ vibrate_timings_millis = result.get('vibrate_timings')
+ if vibrate_timings_millis:
+ vibrate_timing_strings = []
+ for msec in vibrate_timings_millis:
+ formated_string = cls.encode_milliseconds(
+ 'AndroidNotification.vibrate_timings_millis', msec)
+ vibrate_timing_strings.append(formated_string)
+ result['vibrate_timings'] = vibrate_timing_strings
+ return result
+
+ @classmethod
+ def encode_light_settings(cls, light_settings):
+ """Encodes a ``LightSettings`` instance into JSON."""
+ if light_settings is None:
+ return None
+ if not isinstance(light_settings, _messaging_utils.LightSettings):
+ raise ValueError(
+ 'AndroidNotification.light_settings must be an instance of LightSettings class.')
+ result = {
+ 'color': _Validators.check_string(
+ 'LightSettings.color', light_settings.color, non_empty=True),
+ 'light_on_duration': cls.encode_milliseconds(
+ 'LightSettings.light_on_duration_millis', light_settings.light_on_duration_millis),
+ 'light_off_duration': cls.encode_milliseconds(
+ 'LightSettings.light_off_duration_millis',
+ light_settings.light_off_duration_millis),
+ }
+ result = cls.remove_null_values(result)
+ light_on_duration = result.get('light_on_duration')
+ if not light_on_duration:
+ raise ValueError(
+ 'LightSettings.light_on_duration_millis is required.')
+
+ light_off_duration = result.get('light_off_duration')
+ if not light_off_duration:
+ raise ValueError(
+ 'LightSettings.light_off_duration_millis is required.')
+
+ color = result.get('color')
+ if not color:
+ raise ValueError('LightSettings.color is required.')
+ if not re.match(r'^#[0-9a-fA-F]{6}$', color) and not re.match(r'^#[0-9a-fA-F]{8}$', color):
+ raise ValueError(
+ 'LightSettings.color must be in the form #RRGGBB or #RRGGBBAA.')
+ if len(color) == 7:
+ color = (color+'FF')
+ rgba = [int(color[i:i + 2], 16) / 255.0 for i in (1, 3, 5, 7)]
+ result['color'] = {'red': rgba[0], 'green': rgba[1],
+ 'blue': rgba[2], 'alpha': rgba[3]}
+ return result
+
+ @classmethod
+ def encode_webpush(cls, webpush):
+ """Encodes a ``WebpushConfig`` instance into JSON."""
+ if webpush is None:
+ return None
+ if not isinstance(webpush, _messaging_utils.WebpushConfig):
+ raise ValueError('Message.webpush must be an instance of WebpushConfig class.')
+ result = {
+ 'data': _Validators.check_string_dict(
+ 'WebpushConfig.data', webpush.data),
+ 'headers': _Validators.check_string_dict(
+ 'WebpushConfig.headers', webpush.headers),
+ 'notification': cls.encode_webpush_notification(webpush.notification),
+ 'fcm_options': cls.encode_webpush_fcm_options(webpush.fcm_options),
+ }
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_webpush_notification(cls, notification):
+ """Encodes a ``WebpushNotification`` instance into JSON."""
+ if notification is None:
+ return None
+ if not isinstance(notification, _messaging_utils.WebpushNotification):
+ raise ValueError('WebpushConfig.notification must be an instance of '
+ 'WebpushNotification class.')
+ result = {
+ 'actions': cls.encode_webpush_notification_actions(notification.actions),
+ 'badge': _Validators.check_string(
+ 'WebpushNotification.badge', notification.badge),
+ 'body': _Validators.check_string(
+ 'WebpushNotification.body', notification.body),
+ 'data': notification.data,
+ 'dir': _Validators.check_string(
+ 'WebpushNotification.direction', notification.direction),
+ 'icon': _Validators.check_string(
+ 'WebpushNotification.icon', notification.icon),
+ 'image': _Validators.check_string(
+ 'WebpushNotification.image', notification.image),
+ 'lang': _Validators.check_string(
+ 'WebpushNotification.language', notification.language),
+ 'renotify': notification.renotify,
+ 'requireInteraction': notification.require_interaction,
+ 'silent': notification.silent,
+ 'tag': _Validators.check_string(
+ 'WebpushNotification.tag', notification.tag),
+ 'timestamp': _Validators.check_number(
+ 'WebpushNotification.timestamp_millis', notification.timestamp_millis),
+ 'title': _Validators.check_string(
+ 'WebpushNotification.title', notification.title),
+ 'vibrate': notification.vibrate,
+ }
+ direction = result.get('dir')
+ if direction and direction not in ('auto', 'ltr', 'rtl'):
+ raise ValueError('WebpushNotification.direction must be "auto", "ltr" or "rtl".')
+ if notification.custom_data is not None:
+ if not isinstance(notification.custom_data, dict):
+ raise ValueError('WebpushNotification.custom_data must be a dict.')
+ for key, value in notification.custom_data.items():
+ if key in result:
+ raise ValueError(
+ 'Multiple specifications for {0} in WebpushNotification.'.format(key))
+ result[key] = value
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_webpush_notification_actions(cls, actions):
+ """Encodes a list of ``WebpushNotificationActions`` into JSON."""
+ if actions is None:
+ return None
+ if not isinstance(actions, list):
+ raise ValueError('WebpushConfig.notification.actions must be a list of '
+ 'WebpushNotificationAction instances.')
+ results = []
+ for action in actions:
+ if not isinstance(action, _messaging_utils.WebpushNotificationAction):
+ raise ValueError('WebpushConfig.notification.actions must be a list of '
+ 'WebpushNotificationAction instances.')
+ result = {
+ 'action': _Validators.check_string(
+ 'WebpushNotificationAction.action', action.action),
+ 'title': _Validators.check_string(
+ 'WebpushNotificationAction.title', action.title),
+ 'icon': _Validators.check_string(
+ 'WebpushNotificationAction.icon', action.icon),
+ }
+ results.append(cls.remove_null_values(result))
+ return results
+
+ @classmethod
+ def encode_webpush_fcm_options(cls, options):
+ """Encodes a ``WebpushFCMOptions`` instance into JSON."""
+ if options is None:
+ return None
+ result = {
+ 'link': _Validators.check_string('WebpushConfig.fcm_options.link', options.link),
+ }
+ result = cls.remove_null_values(result)
+ link = result.get('link')
+ if link is not None and not link.startswith('https://'):
+ raise ValueError('WebpushFCMOptions.link must be a HTTPS URL.')
+ return result
+
+ @classmethod
+ def encode_apns(cls, apns):
+ """Encodes an ``APNSConfig`` instance into JSON."""
+ if apns is None:
+ return None
+ if not isinstance(apns, _messaging_utils.APNSConfig):
+ raise ValueError('Message.apns must be an instance of APNSConfig class.')
+ result = {
+ 'headers': _Validators.check_string_dict(
+ 'APNSConfig.headers', apns.headers),
+ 'payload': cls.encode_apns_payload(apns.payload),
+ 'fcm_options': cls.encode_apns_fcm_options(apns.fcm_options),
+ }
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_apns_payload(cls, payload):
+ """Encodes an ``APNSPayload`` instance into JSON."""
+ if payload is None:
+ return None
+ if not isinstance(payload, _messaging_utils.APNSPayload):
+ raise ValueError('APNSConfig.payload must be an instance of APNSPayload class.')
+ result = {
+ 'aps': cls.encode_aps(payload.aps)
+ }
+ for key, value in payload.custom_data.items():
+ result[key] = value
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_apns_fcm_options(cls, fcm_options):
+ """Encodes an ``APNSFCMOptions`` instance into JSON."""
+ if fcm_options is None:
+ return None
+ if not isinstance(fcm_options, _messaging_utils.APNSFCMOptions):
+ raise ValueError('APNSConfig.fcm_options must be an instance of APNSFCMOptions class.')
+ result = {
+ 'analytics_label': _Validators.check_analytics_label(
+ 'APNSFCMOptions.analytics_label', fcm_options.analytics_label),
+ 'image': _Validators.check_string('APNSFCMOptions.image', fcm_options.image)
+ }
+ result = cls.remove_null_values(result)
+ return result
+
+ @classmethod
+ def encode_aps(cls, aps):
+ """Encodes an ``Aps`` instance into JSON."""
+ if not isinstance(aps, _messaging_utils.Aps):
+ raise ValueError('APNSPayload.aps must be an instance of Aps class.')
+ result = {
+ 'alert': cls.encode_aps_alert(aps.alert),
+ 'badge': _Validators.check_number('Aps.badge', aps.badge),
+ 'sound': cls.encode_aps_sound(aps.sound),
+ 'category': _Validators.check_string('Aps.category', aps.category),
+ 'thread-id': _Validators.check_string('Aps.thread_id', aps.thread_id),
+ }
+ if aps.content_available is True:
+ result['content-available'] = 1
+ if aps.mutable_content is True:
+ result['mutable-content'] = 1
+ if aps.custom_data is not None:
+ if not isinstance(aps.custom_data, dict):
+ raise ValueError('Aps.custom_data must be a dict.')
+ for key, val in aps.custom_data.items():
+ _Validators.check_string('Aps.custom_data key', key)
+ if key in result:
+ raise ValueError('Multiple specifications for {0} in Aps.'.format(key))
+ result[key] = val
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_aps_sound(cls, sound):
+ """Encodes an APNs sound configuration into JSON."""
+ if sound is None:
+ return None
+ if sound and isinstance(sound, str):
+ return sound
+ if not isinstance(sound, _messaging_utils.CriticalSound):
+ raise ValueError(
+ 'Aps.sound must be a non-empty string or an instance of CriticalSound class.')
+ result = {
+ 'name': _Validators.check_string('CriticalSound.name', sound.name, non_empty=True),
+ 'volume': _Validators.check_number('CriticalSound.volume', sound.volume),
+ }
+ if sound.critical:
+ result['critical'] = 1
+ if not result['name']:
+ raise ValueError('CriticalSond.name must be a non-empty string.')
+ volume = result['volume']
+ if volume is not None and (volume < 0 or volume > 1):
+ raise ValueError('CriticalSound.volume must be in the interval [0,1].')
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_aps_alert(cls, alert):
+ """Encodes an ``ApsAlert`` instance into JSON."""
+ if alert is None:
+ return None
+ if isinstance(alert, str):
+ return alert
+ if not isinstance(alert, _messaging_utils.ApsAlert):
+ raise ValueError('Aps.alert must be a string or an instance of ApsAlert class.')
+ result = {
+ 'title': _Validators.check_string('ApsAlert.title', alert.title),
+ 'subtitle': _Validators.check_string('ApsAlert.subtitle', alert.subtitle),
+ 'body': _Validators.check_string('ApsAlert.body', alert.body),
+ 'title-loc-key': _Validators.check_string(
+ 'ApsAlert.title_loc_key', alert.title_loc_key),
+ 'title-loc-args': _Validators.check_string_list(
+ 'ApsAlert.title_loc_args', alert.title_loc_args),
+ 'loc-key': _Validators.check_string(
+ 'ApsAlert.loc_key', alert.loc_key),
+ 'loc-args': _Validators.check_string_list(
+ 'ApsAlert.loc_args', alert.loc_args),
+ 'action-loc-key': _Validators.check_string(
+ 'ApsAlert.action_loc_key', alert.action_loc_key),
+ 'launch-image': _Validators.check_string(
+ 'ApsAlert.launch_image', alert.launch_image),
+ }
+ if result.get('loc-args') and not result.get('loc-key'):
+ raise ValueError(
+ 'ApsAlert.loc_key is required when specifying loc_args.')
+ if result.get('title-loc-args') and not result.get('title-loc-key'):
+ raise ValueError(
+ 'ApsAlert.title_loc_key is required when specifying title_loc_args.')
+ if alert.custom_data is not None:
+ if not isinstance(alert.custom_data, dict):
+ raise ValueError('ApsAlert.custom_data must be a dict.')
+ for key, val in alert.custom_data.items():
+ _Validators.check_string('ApsAlert.custom_data key', key)
+ # allow specifying key override because Apple could update API so that key
+ # could have unexpected value type
+ result[key] = val
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def encode_notification(cls, notification):
+ """Encodes a ``Notification`` instance into JSON."""
+ if notification is None:
+ return None
+ if not isinstance(notification, _messaging_utils.Notification):
+ raise ValueError('Message.notification must be an instance of Notification class.')
+ result = {
+ 'body': _Validators.check_string('Notification.body', notification.body),
+ 'title': _Validators.check_string('Notification.title', notification.title),
+ 'image': _Validators.check_string('Notification.image', notification.image)
+ }
+ return cls.remove_null_values(result)
+
+ @classmethod
+ def sanitize_topic_name(cls, topic):
+ """Removes the /topics/ prefix from the topic name, if present."""
+ if not topic:
+ return None
+ prefix = '/topics/'
+ if topic.startswith(prefix):
+ topic = topic[len(prefix):]
+ # Checks for illegal characters and empty string.
+ if not re.match(r'^[a-zA-Z0-9-_\.~%]+$', topic):
+ raise ValueError('Malformed topic name.')
+ return topic
+
+ def default(self, o): # pylint: disable=method-hidden
+ if not isinstance(o, Message):
+ return json.JSONEncoder.default(self, o)
+ result = {
+ 'android': MessageEncoder.encode_android(o.android),
+ 'apns': MessageEncoder.encode_apns(o.apns),
+ 'condition': _Validators.check_string(
+ 'Message.condition', o.condition, non_empty=True),
+ 'data': _Validators.check_string_dict('Message.data', o.data),
+ 'notification': MessageEncoder.encode_notification(o.notification),
+ 'token': _Validators.check_string('Message.token', o.token, non_empty=True),
+ 'topic': _Validators.check_string('Message.topic', o.topic, non_empty=True),
+ 'webpush': MessageEncoder.encode_webpush(o.webpush),
+ 'fcm_options': MessageEncoder.encode_fcm_options(o.fcm_options),
+ }
+ result['topic'] = MessageEncoder.sanitize_topic_name(result.get('topic'))
+ result = MessageEncoder.remove_null_values(result)
+ target_count = sum([t in result for t in ['token', 'topic', 'condition']])
+ if target_count != 1:
+ raise ValueError('Exactly one of token, topic or condition must be specified.')
+ return result
+
+ @classmethod
+ def encode_fcm_options(cls, fcm_options):
+ """Encodes an ``FCMOptions`` instance into JSON."""
+ if fcm_options is None:
+ return None
+ if not isinstance(fcm_options, _messaging_utils.FCMOptions):
+ raise ValueError('Message.fcm_options must be an instance of FCMOptions class.')
+ result = {
+ 'analytics_label': _Validators.check_analytics_label(
+ 'FCMOptions.analytics_label', fcm_options.analytics_label),
+ }
+ result = cls.remove_null_values(result)
+ return result
diff --git a/venv/Lib/site-packages/firebase_admin/_messaging_utils.py b/venv/Lib/site-packages/firebase_admin/_messaging_utils.py
new file mode 100644
index 000000000..d25ba5520
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_messaging_utils.py
@@ -0,0 +1,494 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Types and utilities used by the messaging (FCM) module."""
+
+from firebase_admin import exceptions
+
+
+class Notification:
+ """A notification that can be included in a message.
+
+ Args:
+ title: Title of the notification (optional).
+ body: Body of the notification (optional).
+ image: Image url of the notification (optional)
+ """
+
+ def __init__(self, title=None, body=None, image=None):
+ self.title = title
+ self.body = body
+ self.image = image
+
+
+class AndroidConfig:
+ """Android-specific options that can be included in a message.
+
+ Args:
+ collapse_key: Collapse key string for the message (optional). This is an identifier for a
+ group of messages that can be collapsed, so that only the last message is sent when
+ delivery can be resumed. A maximum of 4 different collapse keys may be active at a
+ given time.
+ priority: Priority of the message (optional). Must be one of ``high`` or ``normal``.
+ ttl: The time-to-live duration of the message (optional). This can be specified
+ as a numeric seconds value or a ``datetime.timedelta`` instance.
+ restricted_package_name: The package name of the application where the registration tokens
+ must match in order to receive the message (optional).
+ data: A dictionary of data fields (optional). All keys and values in the dictionary must be
+ strings. When specified, overrides any data fields set via ``Message.data``.
+ notification: A ``messaging.AndroidNotification`` to be included in the message (optional).
+ fcm_options: A ``messaging.AndroidFCMOptions`` to be included in the message (optional).
+ """
+
+ def __init__(self, collapse_key=None, priority=None, ttl=None, restricted_package_name=None,
+ data=None, notification=None, fcm_options=None):
+ self.collapse_key = collapse_key
+ self.priority = priority
+ self.ttl = ttl
+ self.restricted_package_name = restricted_package_name
+ self.data = data
+ self.notification = notification
+ self.fcm_options = fcm_options
+
+
+class AndroidNotification:
+ """Android-specific notification parameters.
+
+ Args:
+ title: Title of the notification (optional). If specified, overrides the title set via
+ ``messaging.Notification``.
+ body: Body of the notification (optional). If specified, overrides the body set via
+ ``messaging.Notification``.
+ icon: Icon of the notification (optional).
+ color: Color of the notification icon expressed in ``#rrggbb`` form (optional).
+ sound: Sound to be played when the device receives the notification (optional). This is
+ usually the file name of the sound resource.
+ tag: Tag of the notification (optional). This is an identifier used to replace existing
+ notifications in the notification drawer. If not specified, each request creates a new
+ notification.
+ click_action: The action associated with a user click on the notification (optional). If
+ specified, an activity with a matching intent filter is launched when a user clicks on
+ the notification.
+ body_loc_key: Key of the body string in the app's string resources to use to localize the
+ body text (optional).
+ body_loc_args: A list of resource keys that will be used in place of the format specifiers
+ in ``body_loc_key`` (optional).
+ title_loc_key: Key of the title string in the app's string resources to use to localize the
+ title text (optional).
+ title_loc_args: A list of resource keys that will be used in place of the format specifiers
+ in ``title_loc_key`` (optional).
+ channel_id: channel_id of the notification (optional).
+ image: Image url of the notification (optional).
+ ticker: Sets the ``ticker`` text, which is sent to accessibility services. Prior to API
+ level 21 (Lollipop), sets the text that is displayed in the status bar when the
+ notification first arrives (optional).
+ sticky: When set to ``False`` or unset, the notification is automatically dismissed when the
+ user clicks it in the panel. When set to ``True``, the notification persists even when
+ the user clicks it (optional).
+ event_timestamp: For notifications that inform users about events with an absolute time
+ reference, sets the time that the event in the notification occurred as a
+ ``datetime.datetime`` instance. If the ``datetime.datetime`` instance is naive, it
+ defaults to be in the UTC timezone. Notifications in the panel are sorted by this time
+ (optional).
+ local_only: Sets whether or not this notification is relevant only to the current device.
+ Some notifications can be bridged to other devices for remote display, such as a Wear OS
+ watch. This hint can be set to recommend this notification not be bridged (optional).
+ See Wear OS guides:
+ https://developer.android.com/training/wearables/notifications/bridger#existing-method-of-preventing-bridging
+ priority: Sets the relative priority for this notification. Low-priority notifications may
+ be hidden from the user in certain situations. Note this priority differs from
+ ``AndroidMessagePriority``. This priority is processed by the client after the message
+ has been delivered. Whereas ``AndroidMessagePriority`` is an FCM concept that controls
+ when the message is delivered (optional). Must be one of ``default``, ``min``, ``low``,
+ ``high``, ``max`` or ``normal``.
+ vibrate_timings_millis: Sets the vibration pattern to use. Pass in an array of milliseconds
+ to turn the vibrator on or off. The first value indicates the duration to wait before
+ turning the vibrator on. The next value indicates the duration to keep the vibrator on.
+ Subsequent values alternate between duration to turn the vibrator off and to turn the
+ vibrator on. If ``vibrate_timings`` is set and ``default_vibrate_timings`` is set to
+ ``True``, the default value is used instead of the user-specified ``vibrate_timings``.
+ default_vibrate_timings: If set to ``True``, use the Android framework's default vibrate
+ pattern for the notification (optional). Default values are specified in ``config.xml``
+ https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml.
+ If ``default_vibrate_timings`` is set to ``True`` and ``vibrate_timings`` is also set,
+ the default value is used instead of the user-specified ``vibrate_timings``.
+ default_sound: If set to ``True``, use the Android framework's default sound for the
+ notification (optional). Default values are specified in ``config.xml``
+ https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml
+ light_settings: Settings to control the notification's LED blinking rate and color if LED is
+ available on the device. The total blinking time is controlled by the OS (optional).
+ default_light_settings: If set to ``True``, use the Android framework's default LED light
+ settings for the notification. Default values are specified in ``config.xml``
+ https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml.
+ If ``default_light_settings`` is set to ``True`` and ``light_settings`` is also set, the
+ user-specified ``light_settings`` is used instead of the default value.
+ visibility: Sets the visibility of the notification. Must be either ``private``, ``public``,
+ or ``secret``. If unspecified, default to ``private``.
+ notification_count: Sets the number of items this notification represents. May be displayed
+ as a badge count for Launchers that support badging. See ``NotificationBadge``
+ https://developer.android.com/training/notify-user/badges. For example, this might be
+ useful if you're using just one notification to represent multiple new messages but you
+ want the count here to represent the number of total new messages. If zero or
+ unspecified, systems that support badging use the default, which is to increment a
+ number displayed on the long-press menu each time a new notification arrives.
+
+
+ """
+
+ def __init__(self, title=None, body=None, icon=None, color=None, sound=None, tag=None,
+ click_action=None, body_loc_key=None, body_loc_args=None, title_loc_key=None,
+ title_loc_args=None, channel_id=None, image=None, ticker=None, sticky=None,
+ event_timestamp=None, local_only=None, priority=None, vibrate_timings_millis=None,
+ default_vibrate_timings=None, default_sound=None, light_settings=None,
+ default_light_settings=None, visibility=None, notification_count=None):
+ self.title = title
+ self.body = body
+ self.icon = icon
+ self.color = color
+ self.sound = sound
+ self.tag = tag
+ self.click_action = click_action
+ self.body_loc_key = body_loc_key
+ self.body_loc_args = body_loc_args
+ self.title_loc_key = title_loc_key
+ self.title_loc_args = title_loc_args
+ self.channel_id = channel_id
+ self.image = image
+ self.ticker = ticker
+ self.sticky = sticky
+ self.event_timestamp = event_timestamp
+ self.local_only = local_only
+ self.priority = priority
+ self.vibrate_timings_millis = vibrate_timings_millis
+ self.default_vibrate_timings = default_vibrate_timings
+ self.default_sound = default_sound
+ self.light_settings = light_settings
+ self.default_light_settings = default_light_settings
+ self.visibility = visibility
+ self.notification_count = notification_count
+
+
+class LightSettings:
+ """Represents settings to control notification LED that can be included in a
+ ``messaging.AndroidNotification``.
+
+ Args:
+ color: Sets the color of the LED in ``#rrggbb`` or ``#rrggbbaa`` format.
+ light_on_duration_millis: Along with ``light_off_duration``, defines the blink rate of LED
+ flashes.
+ light_off_duration_millis: Along with ``light_on_duration``, defines the blink rate of LED
+ flashes.
+ """
+ def __init__(self, color, light_on_duration_millis,
+ light_off_duration_millis):
+ self.color = color
+ self.light_on_duration_millis = light_on_duration_millis
+ self.light_off_duration_millis = light_off_duration_millis
+
+
+class AndroidFCMOptions:
+ """Options for features provided by the FCM SDK for Android.
+
+ Args:
+ analytics_label: contains additional options for features provided by the FCM Android SDK
+ (optional).
+ """
+
+ def __init__(self, analytics_label=None):
+ self.analytics_label = analytics_label
+
+
+class WebpushConfig:
+ """Webpush-specific options that can be included in a message.
+
+ Args:
+ headers: A dictionary of headers (optional). Refer `Webpush Specification`_ for supported
+ headers.
+ data: A dictionary of data fields (optional). All keys and values in the dictionary must be
+ strings. When specified, overrides any data fields set via ``Message.data``.
+ notification: A ``messaging.WebpushNotification`` to be included in the message (optional).
+ fcm_options: A ``messaging.WebpushFCMOptions`` instance to be included in the message
+ (optional).
+
+ .. _Webpush Specification: https://tools.ietf.org/html/rfc8030#section-5
+ """
+
+ def __init__(self, headers=None, data=None, notification=None, fcm_options=None):
+ self.headers = headers
+ self.data = data
+ self.notification = notification
+ self.fcm_options = fcm_options
+
+
+class WebpushNotificationAction:
+ """An action available to the users when the notification is presented.
+
+ Args:
+ action: Action string.
+ title: Title string.
+ icon: Icon URL for the action (optional).
+ """
+
+ def __init__(self, action, title, icon=None):
+ self.action = action
+ self.title = title
+ self.icon = icon
+
+
+class WebpushNotification:
+ """Webpush-specific notification parameters.
+
+ Refer to the `Notification Reference`_ for more information.
+
+ Args:
+ title: Title of the notification (optional). If specified, overrides the title set via
+ ``messaging.Notification``.
+ body: Body of the notification (optional). If specified, overrides the body set via
+ ``messaging.Notification``.
+ icon: Icon URL of the notification (optional).
+ actions: A list of ``messaging.WebpushNotificationAction`` instances (optional).
+ badge: URL of the image used to represent the notification when there is
+ not enough space to display the notification itself (optional).
+ data: Any arbitrary JSON data that should be associated with the notification (optional).
+ direction: The direction in which to display the notification (optional). Must be either
+ 'auto', 'ltr' or 'rtl'.
+ image: The URL of an image to be displayed in the notification (optional).
+ language: Notification language (optional).
+ renotify: A boolean indicating whether the user should be notified after a new
+ notification replaces an old one (optional).
+ require_interaction: A boolean indicating whether a notification should remain active
+ until the user clicks or dismisses it, rather than closing automatically (optional).
+ silent: ``True`` to indicate that the notification should be silent (optional).
+ tag: An identifying tag on the notification (optional).
+ timestamp_millis: A timestamp value in milliseconds on the notification (optional).
+ vibrate: A vibration pattern for the device's vibration hardware to emit when the
+ notification fires (optional). The pattern is specified as an integer array.
+ custom_data: A dict of custom key-value pairs to be included in the notification
+ (optional)
+
+ .. _Notification Reference: https://developer.mozilla.org/en-US/docs/Web/API\
+ /notification/Notification
+ """
+
+ def __init__(self, title=None, body=None, icon=None, actions=None, badge=None, data=None,
+ direction=None, image=None, language=None, renotify=None,
+ require_interaction=None, silent=None, tag=None, timestamp_millis=None,
+ vibrate=None, custom_data=None):
+ self.title = title
+ self.body = body
+ self.icon = icon
+ self.actions = actions
+ self.badge = badge
+ self.data = data
+ self.direction = direction
+ self.image = image
+ self.language = language
+ self.renotify = renotify
+ self.require_interaction = require_interaction
+ self.silent = silent
+ self.tag = tag
+ self.timestamp_millis = timestamp_millis
+ self.vibrate = vibrate
+ self.custom_data = custom_data
+
+
+class WebpushFCMOptions:
+ """Options for features provided by the FCM SDK for Web.
+
+ Args:
+ link: The link to open when the user clicks on the notification. Must be an HTTPS URL
+ (optional).
+ """
+
+ def __init__(self, link=None):
+ self.link = link
+
+
+class APNSConfig:
+ """APNS-specific options that can be included in a message.
+
+ Refer to `APNS Documentation`_ for more information.
+
+ Args:
+ headers: A dictionary of headers (optional).
+ payload: A ``messaging.APNSPayload`` to be included in the message (optional).
+ fcm_options: A ``messaging.APNSFCMOptions`` instance to be included in the message
+ (optional).
+
+ .. _APNS Documentation: https://developer.apple.com/library/content/documentation\
+ /NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html
+ """
+
+ def __init__(self, headers=None, payload=None, fcm_options=None):
+ self.headers = headers
+ self.payload = payload
+ self.fcm_options = fcm_options
+
+
+class APNSPayload:
+ """Payload of an APNS message.
+
+ Args:
+ aps: A ``messaging.Aps`` instance to be included in the payload.
+ kwargs: Arbitrary keyword arguments to be included as custom fields in the payload
+ (optional).
+ """
+
+ def __init__(self, aps, **kwargs):
+ self.aps = aps
+ self.custom_data = kwargs
+
+
+class Aps:
+ """Aps dictionary to be included in an APNS payload.
+
+ Args:
+ alert: A string or a ``messaging.ApsAlert`` instance (optional).
+ badge: A number representing the badge to be displayed with the message (optional).
+ sound: Name of the sound file to be played with the message or a
+ ``messaging.CriticalSound`` instance (optional).
+ content_available: A boolean indicating whether to configure a background update
+ notification (optional).
+ category: String identifier representing the message type (optional).
+ thread_id: An app-specific string identifier for grouping messages (optional).
+ mutable_content: A boolean indicating whether to support mutating notifications at
+ the client using app extensions (optional).
+ custom_data: A dict of custom key-value pairs to be included in the Aps dictionary
+ (optional).
+ """
+
+ def __init__(self, alert=None, badge=None, sound=None, content_available=None, category=None,
+ thread_id=None, mutable_content=None, custom_data=None):
+ self.alert = alert
+ self.badge = badge
+ self.sound = sound
+ self.content_available = content_available
+ self.category = category
+ self.thread_id = thread_id
+ self.mutable_content = mutable_content
+ self.custom_data = custom_data
+
+
+class CriticalSound:
+ """Critical alert sound configuration that can be included in ``messaging.Aps``.
+
+ Args:
+ name: The name of a sound file in your app's main bundle or in the ``Library/Sounds``
+ folder of your app's container directory. Specify the string ``default`` to play the
+ system sound.
+ critical: Set to ``True`` to set the critical alert flag on the sound configuration
+ (optional).
+ volume: The volume for the critical alert's sound. Must be a value between 0.0 (silent)
+ and 1.0 (full volume) (optional).
+ """
+
+ def __init__(self, name, critical=None, volume=None):
+ self.name = name
+ self.critical = critical
+ self.volume = volume
+
+
+class ApsAlert:
+ """An alert that can be included in ``messaging.Aps``.
+
+ Args:
+ title: Title of the alert (optional). If specified, overrides the title set via
+ ``messaging.Notification``.
+ subtitle: Subtitle of the alert (optional).
+ body: Body of the alert (optional). If specified, overrides the body set via
+ ``messaging.Notification``.
+ loc_key: Key of the body string in the app's string resources to use to localize the
+ body text (optional).
+ loc_args: A list of resource keys that will be used in place of the format specifiers
+ in ``loc_key`` (optional).
+ title_loc_key: Key of the title string in the app's string resources to use to localize the
+ title text (optional).
+ title_loc_args: A list of resource keys that will be used in place of the format specifiers
+ in ``title_loc_key`` (optional).
+ action_loc_key: Key of the text in the app's string resources to use to localize the
+ action button text (optional).
+ launch_image: Image for the notification action (optional).
+ custom_data: A dict of custom key-value pairs to be included in the ApsAlert dictionary
+ (optional)
+ """
+
+ def __init__(self, title=None, subtitle=None, body=None, loc_key=None, loc_args=None,
+ title_loc_key=None, title_loc_args=None, action_loc_key=None, launch_image=None,
+ custom_data=None):
+ self.title = title
+ self.subtitle = subtitle
+ self.body = body
+ self.loc_key = loc_key
+ self.loc_args = loc_args
+ self.title_loc_key = title_loc_key
+ self.title_loc_args = title_loc_args
+ self.action_loc_key = action_loc_key
+ self.launch_image = launch_image
+ self.custom_data = custom_data
+
+
+class APNSFCMOptions:
+ """Options for features provided by the FCM SDK for iOS.
+
+ Args:
+ analytics_label: contains additional options for features provided by the FCM iOS SDK
+ (optional).
+ image: contains the URL of an image that is going to be displayed in a notification
+ (optional).
+ """
+
+ def __init__(self, analytics_label=None, image=None):
+ self.analytics_label = analytics_label
+ self.image = image
+
+
+class FCMOptions:
+ """Options for features provided by SDK.
+
+ Args:
+ analytics_label: contains additional options to use across all platforms (optional).
+ """
+
+ def __init__(self, analytics_label=None):
+ self.analytics_label = analytics_label
+
+
+class ThirdPartyAuthError(exceptions.UnauthenticatedError):
+ """APNs certificate or web push auth key was invalid or missing."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.UnauthenticatedError.__init__(self, message, cause, http_response)
+
+
+class QuotaExceededError(exceptions.ResourceExhaustedError):
+ """Sending limit exceeded for the message target."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.ResourceExhaustedError.__init__(self, message, cause, http_response)
+
+
+class SenderIdMismatchError(exceptions.PermissionDeniedError):
+ """The authenticated sender ID is different from the sender ID for the registration token."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.PermissionDeniedError.__init__(self, message, cause, http_response)
+
+
+class UnregisteredError(exceptions.NotFoundError):
+ """App instance was unregistered from FCM.
+
+ This usually means that the token used is no longer valid and a new one must be used."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ exceptions.NotFoundError.__init__(self, message, cause, http_response)
diff --git a/venv/Lib/site-packages/firebase_admin/_rfc3339.py b/venv/Lib/site-packages/firebase_admin/_rfc3339.py
new file mode 100644
index 000000000..2c720bdd1
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_rfc3339.py
@@ -0,0 +1,87 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Parse RFC3339 date strings"""
+
+from datetime import datetime, timezone
+import re
+
+def parse_to_epoch(datestr):
+ """Parse an RFC3339 date string and return the number of seconds since the
+ epoch (as a float).
+
+ In particular, this method is meant to parse the strings returned by the
+ JSON mapping of protobuf google.protobuf.timestamp.Timestamp instances:
+ https://github.com/protocolbuffers/protobuf/blob/4cf5bfee9546101d98754d23ff378ff718ba8438/src/google/protobuf/timestamp.proto#L99
+
+ This method has microsecond precision; nanoseconds will be truncated.
+
+ Args:
+ datestr: A string in RFC3339 format.
+ Returns:
+ Float: The number of seconds since the Unix epoch.
+ Raises:
+ ValueError: Raised if the `datestr` is not a valid RFC3339 date string.
+ """
+ return _parse_to_datetime(datestr).timestamp()
+
+
+def _parse_to_datetime(datestr):
+ """Parse an RFC3339 date string and return a python datetime instance.
+
+ Args:
+ datestr: A string in RFC3339 format.
+ Returns:
+ datetime: The corresponding `datetime` (with timezone information).
+ Raises:
+ ValueError: Raised if the `datestr` is not a valid RFC3339 date string.
+ """
+ # If more than 6 digits appear in the fractional seconds position, truncate
+ # to just the most significant 6. (i.e. we only have microsecond precision;
+ # nanos are truncated.)
+ datestr_modified = re.sub(r'(\.\d{6})\d*', r'\1', datestr)
+
+ # This format is the one we actually expect to occur from our backend. The
+ # others are only present because the spec says we *should* accept them.
+ try:
+ return datetime.strptime(
+ datestr_modified, '%Y-%m-%dT%H:%M:%S.%fZ'
+ ).replace(tzinfo=timezone.utc)
+ except ValueError:
+ pass
+
+ try:
+ return datetime.strptime(
+ datestr_modified, '%Y-%m-%dT%H:%M:%SZ'
+ ).replace(tzinfo=timezone.utc)
+ except ValueError:
+ pass
+
+ # Note: %z parses timezone offsets, but requires the timezone offset *not*
+ # include a separating ':'. As of python 3.7, this was relaxed.
+ # TODO(rsgowman): Once python3.7 becomes our floor, we can drop the regex
+ # replacement.
+ datestr_modified = re.sub(r'(\d\d):(\d\d)$', r'\1\2', datestr_modified)
+
+ try:
+ return datetime.strptime(datestr_modified, '%Y-%m-%dT%H:%M:%S.%f%z')
+ except ValueError:
+ pass
+
+ try:
+ return datetime.strptime(datestr_modified, '%Y-%m-%dT%H:%M:%S%z')
+ except ValueError:
+ pass
+
+ raise ValueError('time data {0} does not match RFC3339 format'.format(datestr))
diff --git a/venv/Lib/site-packages/firebase_admin/_sseclient.py b/venv/Lib/site-packages/firebase_admin/_sseclient.py
new file mode 100644
index 000000000..6585dfc80
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_sseclient.py
@@ -0,0 +1,208 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""SSEClient module to stream realtime updates from the Firebase Database.
+
+Based on a similar implementation from Pyrebase.
+"""
+
+import re
+import time
+import warnings
+
+from google.auth import transport
+import requests
+
+
+# Technically, we should support streams that mix line endings. This regex,
+# however, assumes that a system will provide consistent line endings.
+end_of_field = re.compile(r'\r\n\r\n|\r\r|\n\n')
+
+
+class KeepAuthSession(transport.requests.AuthorizedSession):
+ """A session that does not drop authentication on redirects between domains."""
+
+ def __init__(self, credential):
+ super(KeepAuthSession, self).__init__(credential)
+
+ def rebuild_auth(self, prepared_request, response):
+ pass
+
+
+class _EventBuffer:
+ """A helper class for buffering and parsing raw SSE data."""
+
+ def __init__(self):
+ self._buffer = []
+ self._tail = ''
+
+ def append(self, char):
+ self._buffer.append(char)
+ self._tail += char
+ self._tail = self._tail[-4:]
+
+ def truncate(self):
+ head, sep, _ = self.buffer_string.rpartition('\n')
+ rem = head + sep
+ self._buffer = list(rem)
+ self._tail = rem[-4:]
+
+ @property
+ def is_end_of_field(self):
+ last_two_chars = self._tail[-2:]
+ return last_two_chars == '\n\n' or last_two_chars == '\r\r' or self._tail == '\r\n\r\n'
+
+ @property
+ def buffer_string(self):
+ return ''.join(self._buffer)
+
+
+class SSEClient:
+ """SSE client implementation."""
+
+ def __init__(self, url, session, retry=3000, **kwargs):
+ """Initializes the SSEClient.
+
+ Args:
+ url: The remote url to connect to.
+ session: The requests session.
+ retry: The retry interval in milliseconds (optional).
+ **kwargs: Extra kwargs that will be sent to ``requests.get()`` (optional).
+ """
+ self.url = url
+ self.session = session
+ self.retry = retry
+ self.requests_kwargs = kwargs
+ self.should_connect = True
+ self.last_id = None
+ self.buf = u'' # Keep data here as it streams in
+
+ headers = self.requests_kwargs.get('headers', {})
+ # The SSE spec requires making requests with Cache-Control: no-cache
+ headers['Cache-Control'] = 'no-cache'
+ # The 'Accept' header is not required, but explicit > implicit
+ headers['Accept'] = 'text/event-stream'
+ self.requests_kwargs['headers'] = headers
+ self._connect()
+
+ def close(self):
+ """Closes the SSEClient instance."""
+ self.should_connect = False
+ self.retry = 0
+ self.resp.close()
+
+ def _connect(self):
+ """Connects to the server using requests."""
+ if self.should_connect:
+ if self.last_id:
+ self.requests_kwargs['headers']['Last-Event-ID'] = self.last_id
+ self.resp = self.session.get(self.url, stream=True, **self.requests_kwargs)
+ self.resp_iterator = self.resp.iter_content(decode_unicode=True)
+ self.resp.raise_for_status()
+ else:
+ raise StopIteration()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if not re.search(end_of_field, self.buf):
+ temp_buffer = _EventBuffer()
+ while not temp_buffer.is_end_of_field:
+ try:
+ nextchar = next(self.resp_iterator)
+ temp_buffer.append(nextchar)
+ except (StopIteration, requests.RequestException):
+ time.sleep(self.retry / 1000.0)
+ self._connect()
+ # The SSE spec only supports resuming from a whole message, so
+ # if we have half a message we should throw it out.
+ temp_buffer.truncate()
+ continue
+ self.buf = temp_buffer.buffer_string
+
+ split = re.split(end_of_field, self.buf)
+ head = split[0]
+ self.buf = '\n\n'.join(split[1:])
+ event = Event.parse(head)
+
+ if event.data == 'credential is no longer valid':
+ self._connect()
+ return None
+ if event.data == 'null':
+ return None
+
+ # If the server requests a specific retry delay, we need to honor it.
+ if event.retry:
+ self.retry = event.retry
+
+ # last_id should only be set if included in the message. It's not
+ # forgotten if a message omits it.
+ if event.event_id:
+ self.last_id = event.event_id
+ return event
+
+ def next(self):
+ return self.__next__()
+
+
+class Event:
+ """Event represents the events fired by SSE."""
+
+ sse_line_pattern = re.compile('(?P[^:]*):?( ?(?P.*))?')
+
+ def __init__(self, data='', event_type='message', event_id=None, retry=None):
+ self.data = data
+ self.event_type = event_type
+ self.event_id = event_id
+ self.retry = retry
+
+ @classmethod
+ def parse(cls, raw):
+ """Given a possibly-multiline string representing an SSE message, parses it
+ and returns an Event object.
+
+ Args:
+ raw: the raw data to parse.
+
+ Returns:
+ Event: A new ``Event`` with the parameters initialized.
+ """
+ event = cls()
+ for line in raw.split('\n'):
+ match = cls.sse_line_pattern.match(line)
+ if match is None:
+ # Malformed line. Discard but warn.
+ warnings.warn('Invalid SSE line: "%s"' % line, SyntaxWarning)
+ continue
+
+ name = match.groupdict()['name']
+ value = match.groupdict()['value']
+ if name == '':
+ # line began with a ":", so is a comment. Ignore
+ continue
+ if name == 'data':
+ # If we already have some data, then join to it with a newline.
+ # Else this is it.
+ if event.data:
+ event.data = '%s\n%s' % (event.data, value)
+ else:
+ event.data = value
+ elif name == 'event':
+ event.event_type = value
+ elif name == 'id':
+ event.event_id = value
+ elif name == 'retry':
+ event.retry = int(value)
+ return event
diff --git a/venv/Lib/site-packages/firebase_admin/_token_gen.py b/venv/Lib/site-packages/firebase_admin/_token_gen.py
new file mode 100644
index 000000000..18a8008c7
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_token_gen.py
@@ -0,0 +1,401 @@
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase token minting and validation sub module."""
+
+import datetime
+import time
+
+import cachecontrol
+import requests
+from google.auth import credentials
+from google.auth import iam
+from google.auth import jwt
+from google.auth import transport
+import google.auth.exceptions
+import google.oauth2.id_token
+import google.oauth2.service_account
+
+from firebase_admin import exceptions
+from firebase_admin import _auth_utils
+
+
+# ID token constants
+ID_TOKEN_ISSUER_PREFIX = 'https://securetoken.google.com/'
+ID_TOKEN_CERT_URI = ('https://www.googleapis.com/robot/v1/metadata/x509/'
+ 'securetoken@system.gserviceaccount.com')
+
+# Session cookie constants
+COOKIE_ISSUER_PREFIX = 'https://session.firebase.google.com/'
+COOKIE_CERT_URI = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/publicKeys'
+MIN_SESSION_COOKIE_DURATION_SECONDS = int(datetime.timedelta(minutes=5).total_seconds())
+MAX_SESSION_COOKIE_DURATION_SECONDS = int(datetime.timedelta(days=14).total_seconds())
+
+# Custom token constants
+MAX_TOKEN_LIFETIME_SECONDS = int(datetime.timedelta(hours=1).total_seconds())
+FIREBASE_AUDIENCE = ('https://identitytoolkit.googleapis.com/google.'
+ 'identity.identitytoolkit.v1.IdentityToolkit')
+RESERVED_CLAIMS = set([
+ 'acr', 'amr', 'at_hash', 'aud', 'auth_time', 'azp', 'cnf', 'c_hash',
+ 'exp', 'firebase', 'iat', 'iss', 'jti', 'nbf', 'nonce', 'sub'
+])
+METADATA_SERVICE_URL = ('http://metadata.google.internal/computeMetadata/v1/instance/'
+ 'service-accounts/default/email')
+
+
+class _SigningProvider:
+ """Stores a reference to a google.auth.crypto.Signer."""
+
+ def __init__(self, signer, signer_email):
+ self._signer = signer
+ self._signer_email = signer_email
+
+ @property
+ def signer(self):
+ return self._signer
+
+ @property
+ def signer_email(self):
+ return self._signer_email
+
+ @classmethod
+ def from_credential(cls, google_cred):
+ return _SigningProvider(google_cred.signer, google_cred.signer_email)
+
+ @classmethod
+ def from_iam(cls, request, google_cred, service_account):
+ signer = iam.Signer(request, google_cred, service_account)
+ return _SigningProvider(signer, service_account)
+
+
+class TokenGenerator:
+ """Generates custom tokens and session cookies."""
+
+ ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v1'
+
+ def __init__(self, app, http_client):
+ self.app = app
+ self.http_client = http_client
+ self.request = transport.requests.Request()
+ self.base_url = '{0}/projects/{1}'.format(self.ID_TOOLKIT_URL, app.project_id)
+ self._signing_provider = None
+
+ def _init_signing_provider(self):
+ """Initializes a signing provider by following the go/firebase-admin-sign protocol."""
+ # If the SDK was initialized with a service account, use it to sign bytes.
+ google_cred = self.app.credential.get_credential()
+ if isinstance(google_cred, google.oauth2.service_account.Credentials):
+ return _SigningProvider.from_credential(google_cred)
+
+ # If the SDK was initialized with a service account email, use it with the IAM service
+ # to sign bytes.
+ service_account = self.app.options.get('serviceAccountId')
+ if service_account:
+ return _SigningProvider.from_iam(self.request, google_cred, service_account)
+
+ # If the SDK was initialized with some other credential type that supports signing
+ # (e.g. GAE credentials), use it to sign bytes.
+ if isinstance(google_cred, credentials.Signing):
+ return _SigningProvider.from_credential(google_cred)
+
+ # Attempt to discover a service account email from the local Metadata service. Use it
+ # with the IAM service to sign bytes.
+ resp = self.request(url=METADATA_SERVICE_URL, headers={'Metadata-Flavor': 'Google'})
+ if resp.status != 200:
+ raise ValueError(
+ 'Failed to contact the local metadata service: {0}.'.format(resp.data.decode()))
+ service_account = resp.data.decode()
+ return _SigningProvider.from_iam(self.request, google_cred, service_account)
+
+ @property
+ def signing_provider(self):
+ """Initializes and returns the SigningProvider instance to be used."""
+ if not self._signing_provider:
+ try:
+ self._signing_provider = self._init_signing_provider()
+ except Exception as error:
+ url = 'https://firebase.google.com/docs/auth/admin/create-custom-tokens'
+ raise ValueError(
+ 'Failed to determine service account: {0}. Make sure to initialize the SDK '
+ 'with service account credentials or specify a service account ID with '
+ 'iam.serviceAccounts.signBlob permission. Please refer to {1} for more '
+ 'details on creating custom tokens.'.format(error, url))
+ return self._signing_provider
+
+ def create_custom_token(self, uid, developer_claims=None, tenant_id=None):
+ """Builds and signs a Firebase custom auth token."""
+ if developer_claims is not None:
+ if not isinstance(developer_claims, dict):
+ raise ValueError('developer_claims must be a dictionary')
+
+ disallowed_keys = set(developer_claims.keys()) & RESERVED_CLAIMS
+ if disallowed_keys:
+ if len(disallowed_keys) > 1:
+ error_message = ('Developer claims {0} are reserved and '
+ 'cannot be specified.'.format(
+ ', '.join(disallowed_keys)))
+ else:
+ error_message = ('Developer claim {0} is reserved and '
+ 'cannot be specified.'.format(
+ ', '.join(disallowed_keys)))
+ raise ValueError(error_message)
+
+ if not uid or not isinstance(uid, str) or len(uid) > 128:
+ raise ValueError('uid must be a string between 1 and 128 characters.')
+
+ signing_provider = self.signing_provider
+ now = int(time.time())
+ payload = {
+ 'iss': signing_provider.signer_email,
+ 'sub': signing_provider.signer_email,
+ 'aud': FIREBASE_AUDIENCE,
+ 'uid': uid,
+ 'iat': now,
+ 'exp': now + MAX_TOKEN_LIFETIME_SECONDS,
+ }
+ if tenant_id:
+ payload['tenant_id'] = tenant_id
+
+ if developer_claims is not None:
+ payload['claims'] = developer_claims
+ try:
+ return jwt.encode(signing_provider.signer, payload)
+ except google.auth.exceptions.TransportError as error:
+ msg = 'Failed to sign custom token. {0}'.format(error)
+ raise TokenSignError(msg, error)
+
+
+ def create_session_cookie(self, id_token, expires_in):
+ """Creates a session cookie from the provided ID token."""
+ id_token = id_token.decode('utf-8') if isinstance(id_token, bytes) else id_token
+ if not isinstance(id_token, str) or not id_token:
+ raise ValueError(
+ 'Illegal ID token provided: {0}. ID token must be a non-empty '
+ 'string.'.format(id_token))
+
+ if isinstance(expires_in, datetime.timedelta):
+ expires_in = int(expires_in.total_seconds())
+ if isinstance(expires_in, bool) or not isinstance(expires_in, int):
+ raise ValueError('Illegal expiry duration: {0}.'.format(expires_in))
+ if expires_in < MIN_SESSION_COOKIE_DURATION_SECONDS:
+ raise ValueError('Illegal expiry duration: {0}. Duration must be at least {1} '
+ 'seconds.'.format(expires_in, MIN_SESSION_COOKIE_DURATION_SECONDS))
+ if expires_in > MAX_SESSION_COOKIE_DURATION_SECONDS:
+ raise ValueError('Illegal expiry duration: {0}. Duration must be at most {1} '
+ 'seconds.'.format(expires_in, MAX_SESSION_COOKIE_DURATION_SECONDS))
+
+ url = '{0}:createSessionCookie'.format(self.base_url)
+ payload = {
+ 'idToken': id_token,
+ 'validDuration': expires_in,
+ }
+ try:
+ body, http_resp = self.http_client.body_and_response('post', url, json=payload)
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+ else:
+ if not body or not body.get('sessionCookie'):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to create session cookie.', http_response=http_resp)
+ return body.get('sessionCookie')
+
+
+class TokenVerifier:
+ """Verifies ID tokens and session cookies."""
+
+ def __init__(self, app):
+ session = cachecontrol.CacheControl(requests.Session())
+ self.request = transport.requests.Request(session=session)
+ self.id_token_verifier = _JWTVerifier(
+ project_id=app.project_id, short_name='ID token',
+ operation='verify_id_token()',
+ doc_url='https://firebase.google.com/docs/auth/admin/verify-id-tokens',
+ cert_url=ID_TOKEN_CERT_URI,
+ issuer=ID_TOKEN_ISSUER_PREFIX,
+ invalid_token_error=_auth_utils.InvalidIdTokenError,
+ expired_token_error=ExpiredIdTokenError)
+ self.cookie_verifier = _JWTVerifier(
+ project_id=app.project_id, short_name='session cookie',
+ operation='verify_session_cookie()',
+ doc_url='https://firebase.google.com/docs/auth/admin/verify-id-tokens',
+ cert_url=COOKIE_CERT_URI,
+ issuer=COOKIE_ISSUER_PREFIX,
+ invalid_token_error=InvalidSessionCookieError,
+ expired_token_error=ExpiredSessionCookieError)
+
+ def verify_id_token(self, id_token):
+ return self.id_token_verifier.verify(id_token, self.request)
+
+ def verify_session_cookie(self, cookie):
+ return self.cookie_verifier.verify(cookie, self.request)
+
+
+class _JWTVerifier:
+ """Verifies Firebase JWTs (ID tokens or session cookies)."""
+
+ def __init__(self, **kwargs):
+ self.project_id = kwargs.pop('project_id')
+ self.short_name = kwargs.pop('short_name')
+ self.operation = kwargs.pop('operation')
+ self.url = kwargs.pop('doc_url')
+ self.cert_url = kwargs.pop('cert_url')
+ self.issuer = kwargs.pop('issuer')
+ if self.short_name[0].lower() in 'aeiou':
+ self.articled_short_name = 'an {0}'.format(self.short_name)
+ else:
+ self.articled_short_name = 'a {0}'.format(self.short_name)
+ self._invalid_token_error = kwargs.pop('invalid_token_error')
+ self._expired_token_error = kwargs.pop('expired_token_error')
+
+ def verify(self, token, request):
+ """Verifies the signature and data for the provided JWT."""
+ token = token.encode('utf-8') if isinstance(token, str) else token
+ if not isinstance(token, bytes) or not token:
+ raise ValueError(
+ 'Illegal {0} provided: {1}. {0} must be a non-empty '
+ 'string.'.format(self.short_name, token))
+
+ if not self.project_id:
+ raise ValueError(
+ 'Failed to ascertain project ID from the credential or the environment. Project '
+ 'ID is required to call {0}. Initialize the app with a credentials.Certificate '
+ 'or set your Firebase project ID as an app option. Alternatively set the '
+ 'GOOGLE_CLOUD_PROJECT environment variable.'.format(self.operation))
+
+ header, payload = self._decode_unverified(token)
+ issuer = payload.get('iss')
+ audience = payload.get('aud')
+ subject = payload.get('sub')
+ expected_issuer = self.issuer + self.project_id
+
+ project_id_match_msg = (
+ 'Make sure the {0} comes from the same Firebase project as the service account used '
+ 'to authenticate this SDK.'.format(self.short_name))
+ verify_id_token_msg = (
+ 'See {0} for details on how to retrieve {1}.'.format(self.url, self.short_name))
+
+ error_message = None
+ if audience == FIREBASE_AUDIENCE:
+ error_message = (
+ '{0} expects {1}, but was given a custom '
+ 'token.'.format(self.operation, self.articled_short_name))
+ elif not header.get('kid'):
+ if header.get('alg') == 'HS256' and payload.get(
+ 'v') == 0 and 'uid' in payload.get('d', {}):
+ error_message = (
+ '{0} expects {1}, but was given a legacy custom '
+ 'token.'.format(self.operation, self.articled_short_name))
+ else:
+ error_message = 'Firebase {0} has no "kid" claim.'.format(self.short_name)
+ elif header.get('alg') != 'RS256':
+ error_message = (
+ 'Firebase {0} has incorrect algorithm. Expected "RS256" but got '
+ '"{1}". {2}'.format(self.short_name, header.get('alg'), verify_id_token_msg))
+ elif audience != self.project_id:
+ error_message = (
+ 'Firebase {0} has incorrect "aud" (audience) claim. Expected "{1}" but '
+ 'got "{2}". {3} {4}'.format(self.short_name, self.project_id, audience,
+ project_id_match_msg, verify_id_token_msg))
+ elif issuer != expected_issuer:
+ error_message = (
+ 'Firebase {0} has incorrect "iss" (issuer) claim. Expected "{1}" but '
+ 'got "{2}". {3} {4}'.format(self.short_name, expected_issuer, issuer,
+ project_id_match_msg, verify_id_token_msg))
+ elif subject is None or not isinstance(subject, str):
+ error_message = (
+ 'Firebase {0} has no "sub" (subject) claim. '
+ '{1}'.format(self.short_name, verify_id_token_msg))
+ elif not subject:
+ error_message = (
+ 'Firebase {0} has an empty string "sub" (subject) claim. '
+ '{1}'.format(self.short_name, verify_id_token_msg))
+ elif len(subject) > 128:
+ error_message = (
+ 'Firebase {0} has a "sub" (subject) claim longer than 128 characters. '
+ '{1}'.format(self.short_name, verify_id_token_msg))
+
+ if error_message:
+ raise self._invalid_token_error(error_message)
+
+ try:
+ verified_claims = google.oauth2.id_token.verify_token(
+ token,
+ request=request,
+ audience=self.project_id,
+ certs_url=self.cert_url)
+ verified_claims['uid'] = verified_claims['sub']
+ return verified_claims
+ except google.auth.exceptions.TransportError as error:
+ raise CertificateFetchError(str(error), cause=error)
+ except ValueError as error:
+ if 'Token expired' in str(error):
+ raise self._expired_token_error(str(error), cause=error)
+ raise self._invalid_token_error(str(error), cause=error)
+
+ def _decode_unverified(self, token):
+ try:
+ header = jwt.decode_header(token)
+ payload = jwt.decode(token, verify=False)
+ return header, payload
+ except ValueError as error:
+ raise self._invalid_token_error(str(error), cause=error)
+
+
+class TokenSignError(exceptions.UnknownError):
+ """Unexpected error while signing a Firebase custom token."""
+
+ def __init__(self, message, cause):
+ exceptions.UnknownError.__init__(self, message, cause)
+
+
+class CertificateFetchError(exceptions.UnknownError):
+ """Failed to fetch some public key certificates required to verify a token."""
+
+ def __init__(self, message, cause):
+ exceptions.UnknownError.__init__(self, message, cause)
+
+
+class ExpiredIdTokenError(_auth_utils.InvalidIdTokenError):
+ """The provided ID token is expired."""
+
+ def __init__(self, message, cause):
+ _auth_utils.InvalidIdTokenError.__init__(self, message, cause)
+
+
+class RevokedIdTokenError(_auth_utils.InvalidIdTokenError):
+ """The provided ID token has been revoked."""
+
+ def __init__(self, message):
+ _auth_utils.InvalidIdTokenError.__init__(self, message)
+
+
+class InvalidSessionCookieError(exceptions.InvalidArgumentError):
+ """The provided string is not a valid Firebase session cookie."""
+
+ def __init__(self, message, cause=None):
+ exceptions.InvalidArgumentError.__init__(self, message, cause)
+
+
+class ExpiredSessionCookieError(InvalidSessionCookieError):
+ """The provided session cookie is expired."""
+
+ def __init__(self, message, cause):
+ InvalidSessionCookieError.__init__(self, message, cause)
+
+
+class RevokedSessionCookieError(InvalidSessionCookieError):
+ """The provided session cookie has been revoked."""
+
+ def __init__(self, message):
+ InvalidSessionCookieError.__init__(self, message)
diff --git a/venv/Lib/site-packages/firebase_admin/_user_identifier.py b/venv/Lib/site-packages/firebase_admin/_user_identifier.py
new file mode 100644
index 000000000..85a224e0b
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_user_identifier.py
@@ -0,0 +1,103 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes to uniquely identify a user."""
+
+from firebase_admin import _auth_utils
+
+class UserIdentifier:
+ """Identifies a user to be looked up."""
+
+
+class UidIdentifier(UserIdentifier):
+ """Used for looking up an account by uid.
+
+ See ``auth.get_user()``.
+ """
+
+ def __init__(self, uid):
+ """Constructs a new `UidIdentifier` object.
+
+ Args:
+ uid: A user ID string.
+ """
+ self._uid = _auth_utils.validate_uid(uid, required=True)
+
+ @property
+ def uid(self):
+ return self._uid
+
+
+class EmailIdentifier(UserIdentifier):
+ """Used for looking up an account by email.
+
+ See ``auth.get_user()``.
+ """
+
+ def __init__(self, email):
+ """Constructs a new `EmailIdentifier` object.
+
+ Args:
+ email: A user email address string.
+ """
+ self._email = _auth_utils.validate_email(email, required=True)
+
+ @property
+ def email(self):
+ return self._email
+
+
+class PhoneIdentifier(UserIdentifier):
+ """Used for looking up an account by phone number.
+
+ See ``auth.get_user()``.
+ """
+
+ def __init__(self, phone_number):
+ """Constructs a new `PhoneIdentifier` object.
+
+ Args:
+ phone_number: A phone number string.
+ """
+ self._phone_number = _auth_utils.validate_phone(phone_number, required=True)
+
+ @property
+ def phone_number(self):
+ return self._phone_number
+
+
+class ProviderIdentifier(UserIdentifier):
+ """Used for looking up an account by provider.
+
+ See ``auth.get_user()``.
+ """
+
+ def __init__(self, provider_id, provider_uid):
+ """Constructs a new `ProviderIdentifier` object.
+
+ Args:
+ provider_id: A provider ID string.
+ provider_uid: A provider UID string.
+ """
+ self._provider_id = _auth_utils.validate_provider_id(provider_id, required=True)
+ self._provider_uid = _auth_utils.validate_provider_uid(
+ provider_uid, required=True)
+
+ @property
+ def provider_id(self):
+ return self._provider_id
+
+ @property
+ def provider_uid(self):
+ return self._provider_uid
diff --git a/venv/Lib/site-packages/firebase_admin/_user_import.py b/venv/Lib/site-packages/firebase_admin/_user_import.py
new file mode 100644
index 000000000..7834b232a
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_user_import.py
@@ -0,0 +1,520 @@
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase user import sub module."""
+
+import base64
+import json
+
+from firebase_admin import _auth_utils
+
+
+def b64_encode(bytes_value):
+ return base64.urlsafe_b64encode(bytes_value).decode()
+
+
+class UserProvider:
+ """Represents a user identity provider that can be associated with a Firebase user.
+
+ One or more providers can be specified in an ``ImportUserRecord`` when importing users via
+ ``auth.import_users()``.
+
+ Args:
+ uid: User's unique ID assigned by the identity provider.
+ provider_id: ID of the identity provider. This can be a short domain name or the identifier
+ of an OpenID identity provider.
+ email: User's email address (optional).
+ display_name: User's display name (optional).
+ photo_url: User's photo URL (optional).
+ """
+
+ def __init__(self, uid, provider_id, email=None, display_name=None, photo_url=None):
+ self.uid = uid
+ self.provider_id = provider_id
+ self.email = email
+ self.display_name = display_name
+ self.photo_url = photo_url
+
+ @property
+ def uid(self):
+ return self._uid
+
+ @uid.setter
+ def uid(self, uid):
+ self._uid = _auth_utils.validate_uid(uid, required=True)
+
+ @property
+ def provider_id(self):
+ return self._provider_id
+
+ @provider_id.setter
+ def provider_id(self, provider_id):
+ self._provider_id = _auth_utils.validate_provider_id(provider_id, required=True)
+
+ @property
+ def email(self):
+ return self._email
+
+ @email.setter
+ def email(self, email):
+ self._email = _auth_utils.validate_email(email)
+
+ @property
+ def display_name(self):
+ return self._display_name
+
+ @display_name.setter
+ def display_name(self, display_name):
+ self._display_name = _auth_utils.validate_display_name(display_name)
+
+ @property
+ def photo_url(self):
+ return self._photo_url
+
+ @photo_url.setter
+ def photo_url(self, photo_url):
+ self._photo_url = _auth_utils.validate_photo_url(photo_url)
+
+ def to_dict(self):
+ payload = {
+ 'rawId': self.uid,
+ 'providerId': self.provider_id,
+ 'displayName': self.display_name,
+ 'email': self.email,
+ 'photoUrl': self.photo_url,
+ }
+ return {k: v for k, v in payload.items() if v is not None}
+
+
+class ImportUserRecord:
+ """Represents a user account to be imported to Firebase Auth.
+
+ Must specify the ``uid`` field at a minimum. A sequence of ``ImportUserRecord`` objects can be
+ passed to the ``auth.import_users()`` function, in order to import those users into Firebase
+ Auth in bulk. If the ``password_hash`` is set on a user, a hash configuration must be
+ specified when calling ``import_users()``.
+
+ Args:
+ uid: User's unique ID. Must be a non-empty string not longer than 128 characters.
+ email: User's email address (optional).
+ email_verified: A boolean indicating whether the user's email has been verified (optional).
+ display_name: User's display name (optional).
+ phone_number: User's phone number (optional).
+ photo_url: User's photo URL (optional).
+ disabled: A boolean indicating whether this user account has been disabled (optional).
+ user_metadata: An ``auth.UserMetadata`` instance with additional user metadata (optional).
+ provider_data: A list of ``auth.UserProvider`` instances (optional).
+ custom_claims: A ``dict`` of custom claims to be set on the user account (optional).
+ password_hash: User's password hash as a ``bytes`` sequence (optional).
+ password_salt: User's password salt as a ``bytes`` sequence (optional).
+
+ Raises:
+ ValueError: If provided arguments are invalid.
+ """
+
+ def __init__(self, uid, email=None, email_verified=None, display_name=None, phone_number=None,
+ photo_url=None, disabled=None, user_metadata=None, provider_data=None,
+ custom_claims=None, password_hash=None, password_salt=None):
+ self.uid = uid
+ self.email = email
+ self.display_name = display_name
+ self.phone_number = phone_number
+ self.photo_url = photo_url
+ self.password_hash = password_hash
+ self.password_salt = password_salt
+ self.email_verified = email_verified
+ self.disabled = disabled
+ self.user_metadata = user_metadata
+ self.provider_data = provider_data
+ self.custom_claims = custom_claims
+
+ @property
+ def uid(self):
+ return self._uid
+
+ @uid.setter
+ def uid(self, uid):
+ self._uid = _auth_utils.validate_uid(uid, required=True)
+
+ @property
+ def email(self):
+ return self._email
+
+ @email.setter
+ def email(self, email):
+ self._email = _auth_utils.validate_email(email)
+
+ @property
+ def display_name(self):
+ return self._display_name
+
+ @display_name.setter
+ def display_name(self, display_name):
+ self._display_name = _auth_utils.validate_display_name(display_name)
+
+ @property
+ def phone_number(self):
+ return self._phone_number
+
+ @phone_number.setter
+ def phone_number(self, phone_number):
+ self._phone_number = _auth_utils.validate_phone(phone_number)
+
+ @property
+ def photo_url(self):
+ return self._photo_url
+
+ @photo_url.setter
+ def photo_url(self, photo_url):
+ self._photo_url = _auth_utils.validate_photo_url(photo_url)
+
+ @property
+ def password_hash(self):
+ return self._password_hash
+
+ @password_hash.setter
+ def password_hash(self, password_hash):
+ self._password_hash = _auth_utils.validate_bytes(password_hash, 'password_hash')
+
+ @property
+ def password_salt(self):
+ return self._password_salt
+
+ @password_salt.setter
+ def password_salt(self, password_salt):
+ self._password_salt = _auth_utils.validate_bytes(password_salt, 'password_salt')
+
+ @property
+ def user_metadata(self):
+ return self._user_metadata
+
+ @user_metadata.setter
+ def user_metadata(self, user_metadata):
+ created_at = user_metadata.creation_timestamp if user_metadata is not None else None
+ last_login_at = user_metadata.last_sign_in_timestamp if user_metadata is not None else None
+ self._created_at = _auth_utils.validate_timestamp(created_at, 'creation_timestamp')
+ self._last_login_at = _auth_utils.validate_timestamp(
+ last_login_at, 'last_sign_in_timestamp')
+ self._user_metadata = user_metadata
+
+ @property
+ def provider_data(self):
+ return self._provider_data
+
+ @provider_data.setter
+ def provider_data(self, provider_data):
+ if provider_data is not None:
+ try:
+ if any([not isinstance(p, UserProvider) for p in provider_data]):
+ raise ValueError('One or more provider data instances are invalid.')
+ except TypeError:
+ raise ValueError('provider_data must be iterable.')
+ self._provider_data = provider_data
+
+ @property
+ def custom_claims(self):
+ return self._custom_claims
+
+ @custom_claims.setter
+ def custom_claims(self, custom_claims):
+ json_claims = json.dumps(custom_claims) if isinstance(
+ custom_claims, dict) else custom_claims
+ self._custom_claims_str = _auth_utils.validate_custom_claims(json_claims)
+ self._custom_claims = custom_claims
+
+ def to_dict(self):
+ """Returns a dict representation of the user. For internal use only."""
+ payload = {
+ 'localId': self.uid,
+ 'email': self.email,
+ 'displayName': self.display_name,
+ 'phoneNumber': self.phone_number,
+ 'photoUrl': self.photo_url,
+ 'emailVerified': (bool(self.email_verified)
+ if self.email_verified is not None else None),
+ 'disabled': bool(self.disabled) if self.disabled is not None else None,
+ 'customAttributes': self._custom_claims_str,
+ 'createdAt': self._created_at,
+ 'lastLoginAt': self._last_login_at,
+ 'passwordHash': b64_encode(self.password_hash) if self.password_hash else None,
+ 'salt': b64_encode(self.password_salt) if self.password_salt else None,
+ }
+ if self.provider_data:
+ payload['providerUserInfo'] = [p.to_dict() for p in self.provider_data]
+ return {k: v for k, v in payload.items() if v is not None}
+
+
+class UserImportHash:
+ """Represents a hash algorithm used to hash user passwords.
+
+ An instance of this class must be specified when importing users with passwords via the
+ ``auth.import_users()`` API. Use one of the provided class methods to obtain new
+ instances when required. Refer to `documentation`_ for more details.
+
+ .. _documentation: https://firebase.google.com/docs/auth/admin/import-users
+ """
+
+ def __init__(self, name, data=None):
+ self._name = name
+ self._data = data
+
+ def to_dict(self):
+ payload = {'hashAlgorithm': self._name}
+ if self._data:
+ payload.update(self._data)
+ return payload
+
+ @classmethod
+ def _hmac(cls, name, key):
+ data = {
+ 'signerKey': b64_encode(_auth_utils.validate_bytes(key, 'key', required=True))
+ }
+ return UserImportHash(name, data)
+
+ @classmethod
+ def hmac_sha512(cls, key):
+ """Creates a new HMAC SHA512 algorithm instance.
+
+ Args:
+ key: Signer key as a byte sequence.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return cls._hmac('HMAC_SHA512', key)
+
+ @classmethod
+ def hmac_sha256(cls, key):
+ """Creates a new HMAC SHA256 algorithm instance.
+
+ Args:
+ key: Signer key as a byte sequence.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return cls._hmac('HMAC_SHA256', key)
+
+ @classmethod
+ def hmac_sha1(cls, key):
+ """Creates a new HMAC SHA1 algorithm instance.
+
+ Args:
+ key: Signer key as a byte sequence.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return cls._hmac('HMAC_SHA1', key)
+
+ @classmethod
+ def hmac_md5(cls, key):
+ """Creates a new HMAC MD5 algorithm instance.
+
+ Args:
+ key: Signer key as a byte sequence.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return cls._hmac('HMAC_MD5', key)
+
+ @classmethod
+ def md5(cls, rounds):
+ """Creates a new MD5 algorithm instance.
+
+ Args:
+ rounds: Number of rounds. Must be an integer between 0 and 8192.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash(
+ 'MD5',
+ {'rounds': _auth_utils.validate_int(rounds, 'rounds', 0, 8192)})
+
+ @classmethod
+ def sha1(cls, rounds):
+ """Creates a new SHA1 algorithm instance.
+
+ Args:
+ rounds: Number of rounds. Must be an integer between 1 and 8192.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash(
+ 'SHA1',
+ {'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8192)})
+
+ @classmethod
+ def sha256(cls, rounds):
+ """Creates a new SHA256 algorithm instance.
+
+ Args:
+ rounds: Number of rounds. Must be an integer between 1 and 8192.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash(
+ 'SHA256',
+ {'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8192)})
+
+ @classmethod
+ def sha512(cls, rounds):
+ """Creates a new SHA512 algorithm instance.
+
+ Args:
+ rounds: Number of rounds. Must be an integer between 1 and 8192.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash(
+ 'SHA512',
+ {'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8192)})
+
+ @classmethod
+ def pbkdf_sha1(cls, rounds):
+ """Creates a new PBKDF SHA1 algorithm instance.
+
+ Args:
+ rounds: Number of rounds. Must be an integer between 0 and 120000.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash(
+ 'PBKDF_SHA1',
+ {'rounds': _auth_utils.validate_int(rounds, 'rounds', 0, 120000)})
+
+ @classmethod
+ def pbkdf2_sha256(cls, rounds):
+ """Creates a new PBKDF2 SHA256 algorithm instance.
+
+ Args:
+ rounds: Number of rounds. Must be an integer between 0 and 120000.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash(
+ 'PBKDF2_SHA256',
+ {'rounds': _auth_utils.validate_int(rounds, 'rounds', 0, 120000)})
+
+ @classmethod
+ def scrypt(cls, key, rounds, memory_cost, salt_separator=None):
+ """Creates a new Scrypt algorithm instance.
+
+ This is the modified Scrypt algorithm used by Firebase Auth. See ``standard_scrypt()``
+ function for the standard Scrypt algorith,
+
+ Args:
+ key: Signer key as a byte sequence.
+ rounds: Number of rounds. Must be an integer between 1 and 8.
+ memory_cost: Memory cost as an integer between 1 and 14.
+ salt_separator: Salt separator as a byte sequence (optional).
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ data = {
+ 'signerKey': b64_encode(_auth_utils.validate_bytes(key, 'key', required=True)),
+ 'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8),
+ 'memoryCost': _auth_utils.validate_int(memory_cost, 'memory_cost', 1, 14),
+ }
+ if salt_separator:
+ data['saltSeparator'] = b64_encode(_auth_utils.validate_bytes(
+ salt_separator, 'salt_separator'))
+ return UserImportHash('SCRYPT', data)
+
+ @classmethod
+ def bcrypt(cls):
+ """Creates a new Bcrypt algorithm instance.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ return UserImportHash('BCRYPT')
+
+ @classmethod
+ def standard_scrypt(cls, memory_cost, parallelization, block_size, derived_key_length):
+ """Creates a new standard Scrypt algorithm instance.
+
+ Args:
+ memory_cost: Memory cost as a non-negaive integer.
+ parallelization: Parallelization as a non-negative integer.
+ block_size: Block size as a non-negative integer.
+ derived_key_length: Derived key length as a non-negative integer.
+
+ Returns:
+ UserImportHash: A new ``UserImportHash``.
+ """
+ data = {
+ 'memoryCost': _auth_utils.validate_int(memory_cost, 'memory_cost', low=0),
+ 'parallelization': _auth_utils.validate_int(parallelization, 'parallelization', low=0),
+ 'blockSize': _auth_utils.validate_int(block_size, 'block_size', low=0),
+ 'dkLen': _auth_utils.validate_int(derived_key_length, 'derived_key_length', low=0),
+ }
+ return UserImportHash('STANDARD_SCRYPT', data)
+
+
+class ErrorInfo:
+ """Represents an error encountered while performing a batch operation such
+ as importing users or deleting multiple user accounts.
+ """
+ # TODO(rsgowman): This class used to be specific to importing users (hence
+ # it's home in _user_import.py). It's now also used by bulk deletion of
+ # users. Move this to a more common location.
+
+ def __init__(self, error):
+ self._index = error['index']
+ self._reason = error['message']
+
+ @property
+ def index(self):
+ return self._index
+
+ @property
+ def reason(self):
+ return self._reason
+
+
+class UserImportResult:
+ """Represents the result of a bulk user import operation.
+
+ See ``auth.import_users()`` API for more details.
+ """
+
+ def __init__(self, result, total):
+ errors = result.get('error', [])
+ self._success_count = total - len(errors)
+ self._failure_count = len(errors)
+ self._errors = [ErrorInfo(err) for err in errors]
+
+ @property
+ def success_count(self):
+ """Returns the number of users successfully imported."""
+ return self._success_count
+
+ @property
+ def failure_count(self):
+ """Returns the number of users that failed to be imported."""
+ return self._failure_count
+
+ @property
+ def errors(self):
+ """Returns a list of ``auth.ErrorInfo`` instances describing the errors encountered."""
+ return self._errors
diff --git a/venv/Lib/site-packages/firebase_admin/_user_mgt.py b/venv/Lib/site-packages/firebase_admin/_user_mgt.py
new file mode 100644
index 000000000..1d97dd504
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_user_mgt.py
@@ -0,0 +1,846 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase user management sub module."""
+
+import base64
+from collections import defaultdict
+import json
+from urllib import parse
+
+import requests
+
+from firebase_admin import _auth_utils
+from firebase_admin import _rfc3339
+from firebase_admin import _user_identifier
+from firebase_admin import _user_import
+from firebase_admin._user_import import ErrorInfo
+
+
+MAX_LIST_USERS_RESULTS = 1000
+MAX_IMPORT_USERS_SIZE = 1000
+B64_REDACTED = base64.b64encode(b'REDACTED')
+
+
+class Sentinel:
+
+ def __init__(self, description):
+ self.description = description
+
+
+DELETE_ATTRIBUTE = Sentinel('Value used to delete an attribute from a user profile')
+
+
+class UserMetadata:
+ """Contains additional metadata associated with a user account."""
+
+ def __init__(self, creation_timestamp=None, last_sign_in_timestamp=None,
+ last_refresh_timestamp=None):
+ self._creation_timestamp = _auth_utils.validate_timestamp(
+ creation_timestamp, 'creation_timestamp')
+ self._last_sign_in_timestamp = _auth_utils.validate_timestamp(
+ last_sign_in_timestamp, 'last_sign_in_timestamp')
+ self._last_refresh_timestamp = _auth_utils.validate_timestamp(
+ last_refresh_timestamp, 'last_refresh_timestamp')
+
+ @property
+ def creation_timestamp(self):
+ """ Creation timestamp in milliseconds since the epoch.
+
+ Returns:
+ integer: The user creation timestamp in milliseconds since the epoch.
+ """
+ return self._creation_timestamp
+
+ @property
+ def last_sign_in_timestamp(self):
+ """ Last sign in timestamp in milliseconds since the epoch.
+
+ Returns:
+ integer: The last sign in timestamp in milliseconds since the epoch.
+ """
+ return self._last_sign_in_timestamp
+
+ @property
+ def last_refresh_timestamp(self):
+ """The time at which the user was last active (ID token refreshed).
+
+ Returns:
+ integer: Milliseconds since epoch timestamp, or `None` if the user was
+ never active.
+ """
+ return self._last_refresh_timestamp
+
+
+class UserInfo:
+ """A collection of standard profile information for a user.
+
+ Used to expose profile information returned by an identity provider.
+ """
+
+ @property
+ def uid(self):
+ """Returns the user ID of this user."""
+ raise NotImplementedError
+
+ @property
+ def display_name(self):
+ """Returns the display name of this user."""
+ raise NotImplementedError
+
+ @property
+ def email(self):
+ """Returns the email address associated with this user."""
+ raise NotImplementedError
+
+ @property
+ def phone_number(self):
+ """Returns the phone number associated with this user."""
+ raise NotImplementedError
+
+ @property
+ def photo_url(self):
+ """Returns the photo URL of this user."""
+ raise NotImplementedError
+
+ @property
+ def provider_id(self):
+ """Returns the ID of the identity provider.
+
+ This can be a short domain name (e.g. google.com), or the identity of an OpenID
+ identity provider.
+ """
+ raise NotImplementedError
+
+
+class UserRecord(UserInfo):
+ """Contains metadata associated with a Firebase user account."""
+
+ def __init__(self, data):
+ super(UserRecord, self).__init__()
+ if not isinstance(data, dict):
+ raise ValueError('Invalid data argument: {0}. Must be a dictionary.'.format(data))
+ if not data.get('localId'):
+ raise ValueError('User ID must not be None or empty.')
+ self._data = data
+
+ @property
+ def uid(self):
+ """Returns the user ID of this user.
+
+ Returns:
+ string: A user ID string. This value is never None or empty.
+ """
+ return self._data.get('localId')
+
+ @property
+ def display_name(self):
+ """Returns the display name of this user.
+
+ Returns:
+ string: A display name string or None.
+ """
+ return self._data.get('displayName')
+
+ @property
+ def email(self):
+ """Returns the email address associated with this user.
+
+ Returns:
+ string: An email address string or None.
+ """
+ return self._data.get('email')
+
+ @property
+ def phone_number(self):
+ """Returns the phone number associated with this user.
+
+ Returns:
+ string: A phone number string or None.
+ """
+ return self._data.get('phoneNumber')
+
+ @property
+ def photo_url(self):
+ """Returns the photo URL of this user.
+
+ Returns:
+ string: A URL string or None.
+ """
+ return self._data.get('photoUrl')
+
+ @property
+ def provider_id(self):
+ """Returns the provider ID of this user.
+
+ Returns:
+ string: A constant provider ID value.
+ """
+ return 'firebase'
+
+ @property
+ def email_verified(self):
+ """Returns whether the email address of this user has been verified.
+
+ Returns:
+ bool: True if the email has been verified, and False otherwise.
+ """
+ return bool(self._data.get('emailVerified'))
+
+ @property
+ def disabled(self):
+ """Returns whether this user account is disabled.
+
+ Returns:
+ bool: True if the user account is disabled, and False otherwise.
+ """
+ return bool(self._data.get('disabled'))
+
+ @property
+ def tokens_valid_after_timestamp(self):
+ """Returns the time, in milliseconds since the epoch, before which tokens are invalid.
+
+ Note: this is truncated to 1 second accuracy.
+
+ Returns:
+ int: Timestamp in milliseconds since the epoch, truncated to the second.
+ All tokens issued before that time are considered revoked.
+ """
+ valid_since = self._data.get('validSince')
+ if valid_since is not None:
+ return 1000 * int(valid_since)
+ return 0
+
+ @property
+ def user_metadata(self):
+ """Returns additional metadata associated with this user.
+
+ Returns:
+ UserMetadata: A UserMetadata instance. Does not return None.
+ """
+ def _int_or_none(key):
+ if key in self._data:
+ return int(self._data[key])
+ return None
+ last_refresh_at_millis = None
+ last_refresh_at_rfc3339 = self._data.get('lastRefreshAt', None)
+ if last_refresh_at_rfc3339:
+ last_refresh_at_millis = int(_rfc3339.parse_to_epoch(last_refresh_at_rfc3339) * 1000)
+ return UserMetadata(
+ _int_or_none('createdAt'), _int_or_none('lastLoginAt'), last_refresh_at_millis)
+
+ @property
+ def provider_data(self):
+ """Returns a list of UserInfo instances.
+
+ Each object represents an identity from an identity provider that is linked to this user.
+
+ Returns:
+ list: A list of UserInfo objects, which may be empty.
+ """
+ providers = self._data.get('providerUserInfo', [])
+ return [ProviderUserInfo(entry) for entry in providers]
+
+ @property
+ def custom_claims(self):
+ """Returns any custom claims set on this user account.
+
+ Returns:
+ dict: A dictionary of claims or None.
+ """
+ claims = self._data.get('customAttributes')
+ if claims:
+ parsed = json.loads(claims)
+ if parsed != {}:
+ return parsed
+ return None
+
+ @property
+ def tenant_id(self):
+ """Returns the tenant ID of this user.
+
+ Returns:
+ string: A tenant ID string or None.
+ """
+ return self._data.get('tenantId')
+
+
+class ExportedUserRecord(UserRecord):
+ """Contains metadata associated with a user including password hash and salt."""
+
+ @property
+ def password_hash(self):
+ """The user's password hash as a base64-encoded string.
+
+ If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this
+ is the base64-encoded password hash of the user. If a different hashing algorithm was
+ used to create this user, as is typical when migrating from another Auth system, this
+ is an empty string. If no password is set, or if the service account doesn't have permission
+ to read the password, then this is ``None``.
+ """
+ password_hash = self._data.get('passwordHash')
+
+ # If the password hash is redacted (probably due to missing permissions) then clear it out,
+ # similar to how the salt is returned. (Otherwise, it *looks* like a b64-encoded hash is
+ # present, which is confusing.)
+ if password_hash == B64_REDACTED:
+ return None
+ return password_hash
+
+ @property
+ def password_salt(self):
+ """The user's password salt as a base64-encoded string.
+
+ If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this
+ is the base64-encoded password salt of the user. If a different hashing algorithm was
+ used to create this user, as is typical when migrating from another Auth system, this is
+ an empty string. If no password is set, or if the service account doesn't have permission to
+ read the password, then this is ``None``.
+ """
+ return self._data.get('salt')
+
+
+class GetUsersResult:
+ """Represents the result of the ``auth.get_users()`` API."""
+
+ def __init__(self, users, not_found):
+ """Constructs a `GetUsersResult` object.
+
+ Args:
+ users: List of `UserRecord` instances.
+ not_found: List of `UserIdentifier` instances.
+ """
+ self._users = users
+ self._not_found = not_found
+
+ @property
+ def users(self):
+ """Set of `UserRecord` instances, corresponding to the set of users
+ that were requested. Only users that were found are listed here. The
+ result set is unordered.
+ """
+ return self._users
+
+ @property
+ def not_found(self):
+ """Set of `UserIdentifier` instances that were requested, but not
+ found.
+ """
+ return self._not_found
+
+
+class ListUsersPage:
+ """Represents a page of user records exported from a Firebase project.
+
+ Provides methods for traversing the user accounts included in this page, as well as retrieving
+ subsequent pages of users. The iterator returned by ``iterate_all()`` can be used to iterate
+ through all users in the Firebase project starting from this page.
+ """
+
+ def __init__(self, download, page_token, max_results):
+ self._download = download
+ self._max_results = max_results
+ self._current = download(page_token, max_results)
+
+ @property
+ def users(self):
+ """A list of ``ExportedUserRecord`` instances available in this page."""
+ return [ExportedUserRecord(user) for user in self._current.get('users', [])]
+
+ @property
+ def next_page_token(self):
+ """Page token string for the next page (empty string indicates no more pages)."""
+ return self._current.get('nextPageToken', '')
+
+ @property
+ def has_next_page(self):
+ """A boolean indicating whether more pages are available."""
+ return bool(self.next_page_token)
+
+ def get_next_page(self):
+ """Retrieves the next page of user accounts, if available.
+
+ Returns:
+ ListUsersPage: Next page of users, or None if this is the last page.
+ """
+ if self.has_next_page:
+ return ListUsersPage(self._download, self.next_page_token, self._max_results)
+ return None
+
+ def iterate_all(self):
+ """Retrieves an iterator for user accounts.
+
+ Returned iterator will iterate through all the user accounts in the Firebase project
+ starting from this page. The iterator will never buffer more than one page of users
+ in memory at a time.
+
+ Returns:
+ iterator: An iterator of ExportedUserRecord instances.
+ """
+ return _UserIterator(self)
+
+
+class DeleteUsersResult:
+ """Represents the result of the ``auth.delete_users()`` API."""
+
+ def __init__(self, result, total):
+ """Constructs a `DeleteUsersResult` object.
+
+ Args:
+ result: The proto response, wrapped in a
+ `BatchDeleteAccountsResponse` instance.
+ total: Total integer number of deletion attempts.
+ """
+ errors = result.errors
+ self._success_count = total - len(errors)
+ self._failure_count = len(errors)
+ self._errors = errors
+
+ @property
+ def success_count(self):
+ """Returns the number of users that were deleted successfully (possibly
+ zero).
+
+ Users that did not exist prior to calling `delete_users()` are
+ considered to be successfully deleted.
+ """
+ return self._success_count
+
+ @property
+ def failure_count(self):
+ """Returns the number of users that failed to be deleted (possibly
+ zero).
+ """
+ return self._failure_count
+
+ @property
+ def errors(self):
+ """A list of `auth.ErrorInfo` instances describing the errors that
+ were encountered during the deletion. Length of this list is equal to
+ `failure_count`.
+ """
+ return self._errors
+
+
+class BatchDeleteAccountsResponse:
+ """Represents the results of a `delete_users()` call."""
+
+ def __init__(self, errors=None):
+ """Constructs a `BatchDeleteAccountsResponse` instance, corresponding to
+ the JSON representing the `BatchDeleteAccountsResponse` proto.
+
+ Args:
+ errors: List of dictionaries, with each dictionary representing an
+ `ErrorInfo` instance as returned by the server. `None` implies
+ an empty list.
+ """
+ self.errors = [ErrorInfo(err) for err in errors] if errors else []
+
+
+class ProviderUserInfo(UserInfo):
+ """Contains metadata regarding how a user is known by a particular identity provider."""
+
+ def __init__(self, data):
+ super(ProviderUserInfo, self).__init__()
+ if not isinstance(data, dict):
+ raise ValueError('Invalid data argument: {0}. Must be a dictionary.'.format(data))
+ if not data.get('rawId'):
+ raise ValueError('User ID must not be None or empty.')
+ self._data = data
+
+ @property
+ def uid(self):
+ return self._data.get('rawId')
+
+ @property
+ def display_name(self):
+ return self._data.get('displayName')
+
+ @property
+ def email(self):
+ return self._data.get('email')
+
+ @property
+ def phone_number(self):
+ return self._data.get('phoneNumber')
+
+ @property
+ def photo_url(self):
+ return self._data.get('photoUrl')
+
+ @property
+ def provider_id(self):
+ return self._data.get('providerId')
+
+
+class ActionCodeSettings:
+ """Contains required continue/state URL with optional Android and iOS settings.
+ Used when invoking the email action link generation APIs.
+ """
+
+ def __init__(self, url, handle_code_in_app=None, dynamic_link_domain=None, ios_bundle_id=None,
+ android_package_name=None, android_install_app=None, android_minimum_version=None):
+ self.url = url
+ self.handle_code_in_app = handle_code_in_app
+ self.dynamic_link_domain = dynamic_link_domain
+ self.ios_bundle_id = ios_bundle_id
+ self.android_package_name = android_package_name
+ self.android_install_app = android_install_app
+ self.android_minimum_version = android_minimum_version
+
+
+def encode_action_code_settings(settings):
+ """ Validates the provided action code settings for email link generation and
+ populates the REST api parameters.
+
+ settings - ``ActionCodeSettings`` object provided to be encoded
+ returns - dict of parameters to be passed for link gereration.
+ """
+
+ parameters = {}
+ # url
+ if not settings.url:
+ raise ValueError("Dynamic action links url is mandatory")
+
+ try:
+ parsed = parse.urlparse(settings.url)
+ if not parsed.netloc:
+ raise ValueError('Malformed dynamic action links url: "{0}".'.format(settings.url))
+ parameters['continueUrl'] = settings.url
+ except Exception:
+ raise ValueError('Malformed dynamic action links url: "{0}".'.format(settings.url))
+
+ # handle_code_in_app
+ if settings.handle_code_in_app is not None:
+ if not isinstance(settings.handle_code_in_app, bool):
+ raise ValueError('Invalid value provided for handle_code_in_app: {0}'
+ .format(settings.handle_code_in_app))
+ parameters['canHandleCodeInApp'] = settings.handle_code_in_app
+
+ # dynamic_link_domain
+ if settings.dynamic_link_domain is not None:
+ if not isinstance(settings.dynamic_link_domain, str):
+ raise ValueError('Invalid value provided for dynamic_link_domain: {0}'
+ .format(settings.dynamic_link_domain))
+ parameters['dynamicLinkDomain'] = settings.dynamic_link_domain
+
+ # ios_bundle_id
+ if settings.ios_bundle_id is not None:
+ if not isinstance(settings.ios_bundle_id, str):
+ raise ValueError('Invalid value provided for ios_bundle_id: {0}'
+ .format(settings.ios_bundle_id))
+ parameters['iosBundleId'] = settings.ios_bundle_id
+
+ # android_* attributes
+ if (settings.android_minimum_version or settings.android_install_app) \
+ and not settings.android_package_name:
+ raise ValueError("Android package name is required when specifying other Android settings")
+
+ if settings.android_package_name is not None:
+ if not isinstance(settings.android_package_name, str):
+ raise ValueError('Invalid value provided for android_package_name: {0}'
+ .format(settings.android_package_name))
+ parameters['androidPackageName'] = settings.android_package_name
+
+ if settings.android_minimum_version is not None:
+ if not isinstance(settings.android_minimum_version, str):
+ raise ValueError('Invalid value provided for android_minimum_version: {0}'
+ .format(settings.android_minimum_version))
+ parameters['androidMinimumVersion'] = settings.android_minimum_version
+
+ if settings.android_install_app is not None:
+ if not isinstance(settings.android_install_app, bool):
+ raise ValueError('Invalid value provided for android_install_app: {0}'
+ .format(settings.android_install_app))
+ parameters['androidInstallApp'] = settings.android_install_app
+
+ return parameters
+
+
+class UserManager:
+ """Provides methods for interacting with the Google Identity Toolkit."""
+
+ ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v1'
+
+ def __init__(self, http_client, project_id, tenant_id=None):
+ self.http_client = http_client
+ self.base_url = '{0}/projects/{1}'.format(self.ID_TOOLKIT_URL, project_id)
+ if tenant_id:
+ self.base_url += '/tenants/{0}'.format(tenant_id)
+
+ def get_user(self, **kwargs):
+ """Gets the user data corresponding to the provided key."""
+ if 'uid' in kwargs:
+ key, key_type = kwargs.pop('uid'), 'user ID'
+ payload = {'localId' : [_auth_utils.validate_uid(key, required=True)]}
+ elif 'email' in kwargs:
+ key, key_type = kwargs.pop('email'), 'email'
+ payload = {'email' : [_auth_utils.validate_email(key, required=True)]}
+ elif 'phone_number' in kwargs:
+ key, key_type = kwargs.pop('phone_number'), 'phone number'
+ payload = {'phoneNumber' : [_auth_utils.validate_phone(key, required=True)]}
+ else:
+ raise TypeError('Unsupported keyword arguments: {0}.'.format(kwargs))
+
+ body, http_resp = self._make_request('post', '/accounts:lookup', json=payload)
+ if not body or not body.get('users'):
+ raise _auth_utils.UserNotFoundError(
+ 'No user record found for the provided {0}: {1}.'.format(key_type, key),
+ http_response=http_resp)
+ return body['users'][0]
+
+ def get_users(self, identifiers):
+ """Looks up multiple users by their identifiers (uid, email, etc.)
+
+ Args:
+ identifiers: UserIdentifier[]: The identifiers indicating the user
+ to be looked up. Must have <= 100 entries.
+
+ Returns:
+ list[dict[string, string]]: List of dicts representing the JSON
+ `UserInfo` responses from the server.
+
+ Raises:
+ ValueError: If any of the identifiers are invalid or if more than
+ 100 identifiers are specified.
+ UnexpectedResponseError: If the backend server responds with an
+ unexpected message.
+ """
+ if not identifiers:
+ return []
+ if len(identifiers) > 100:
+ raise ValueError('`identifiers` parameter must have <= 100 entries.')
+
+ payload = defaultdict(list)
+ for identifier in identifiers:
+ if isinstance(identifier, _user_identifier.UidIdentifier):
+ payload['localId'].append(identifier.uid)
+ elif isinstance(identifier, _user_identifier.EmailIdentifier):
+ payload['email'].append(identifier.email)
+ elif isinstance(identifier, _user_identifier.PhoneIdentifier):
+ payload['phoneNumber'].append(identifier.phone_number)
+ elif isinstance(identifier, _user_identifier.ProviderIdentifier):
+ payload['federatedUserId'].append({
+ 'providerId': identifier.provider_id,
+ 'rawId': identifier.provider_uid
+ })
+ else:
+ raise ValueError(
+ 'Invalid entry in "identifiers" list. Unsupported type: {}'
+ .format(type(identifier)))
+
+ body, http_resp = self._make_request(
+ 'post', '/accounts:lookup', json=payload)
+ if not http_resp.ok:
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to get users.', http_response=http_resp)
+ return body.get('users', [])
+
+ def list_users(self, page_token=None, max_results=MAX_LIST_USERS_RESULTS):
+ """Retrieves a batch of users."""
+ if page_token is not None:
+ if not isinstance(page_token, str) or not page_token:
+ raise ValueError('Page token must be a non-empty string.')
+ if not isinstance(max_results, int):
+ raise ValueError('Max results must be an integer.')
+ if max_results < 1 or max_results > MAX_LIST_USERS_RESULTS:
+ raise ValueError(
+ 'Max results must be a positive integer less than '
+ '{0}.'.format(MAX_LIST_USERS_RESULTS))
+
+ payload = {'maxResults': max_results}
+ if page_token:
+ payload['nextPageToken'] = page_token
+ body, _ = self._make_request('get', '/accounts:batchGet', params=payload)
+ return body
+
+ def create_user(self, uid=None, display_name=None, email=None, phone_number=None,
+ photo_url=None, password=None, disabled=None, email_verified=None):
+ """Creates a new user account with the specified properties."""
+ payload = {
+ 'localId': _auth_utils.validate_uid(uid),
+ 'displayName': _auth_utils.validate_display_name(display_name),
+ 'email': _auth_utils.validate_email(email),
+ 'phoneNumber': _auth_utils.validate_phone(phone_number),
+ 'photoUrl': _auth_utils.validate_photo_url(photo_url),
+ 'password': _auth_utils.validate_password(password),
+ 'emailVerified': bool(email_verified) if email_verified is not None else None,
+ 'disabled': bool(disabled) if disabled is not None else None,
+ }
+ payload = {k: v for k, v in payload.items() if v is not None}
+ body, http_resp = self._make_request('post', '/accounts', json=payload)
+ if not body or not body.get('localId'):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to create new user.', http_response=http_resp)
+ return body.get('localId')
+
+ def update_user(self, uid, display_name=None, email=None, phone_number=None,
+ photo_url=None, password=None, disabled=None, email_verified=None,
+ valid_since=None, custom_claims=None):
+ """Updates an existing user account with the specified properties"""
+ payload = {
+ 'localId': _auth_utils.validate_uid(uid, required=True),
+ 'email': _auth_utils.validate_email(email),
+ 'password': _auth_utils.validate_password(password),
+ 'validSince': _auth_utils.validate_timestamp(valid_since, 'valid_since'),
+ 'emailVerified': bool(email_verified) if email_verified is not None else None,
+ 'disableUser': bool(disabled) if disabled is not None else None,
+ }
+
+ remove = []
+ if display_name is not None:
+ if display_name is DELETE_ATTRIBUTE:
+ remove.append('DISPLAY_NAME')
+ else:
+ payload['displayName'] = _auth_utils.validate_display_name(display_name)
+ if photo_url is not None:
+ if photo_url is DELETE_ATTRIBUTE:
+ remove.append('PHOTO_URL')
+ else:
+ payload['photoUrl'] = _auth_utils.validate_photo_url(photo_url)
+ if remove:
+ payload['deleteAttribute'] = remove
+
+ if phone_number is not None:
+ if phone_number is DELETE_ATTRIBUTE:
+ payload['deleteProvider'] = ['phone']
+ else:
+ payload['phoneNumber'] = _auth_utils.validate_phone(phone_number)
+
+ if custom_claims is not None:
+ if custom_claims is DELETE_ATTRIBUTE:
+ custom_claims = {}
+ json_claims = json.dumps(custom_claims) if isinstance(
+ custom_claims, dict) else custom_claims
+ payload['customAttributes'] = _auth_utils.validate_custom_claims(json_claims)
+
+ payload = {k: v for k, v in payload.items() if v is not None}
+ body, http_resp = self._make_request('post', '/accounts:update', json=payload)
+ if not body or not body.get('localId'):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to update user: {0}.'.format(uid), http_response=http_resp)
+ return body.get('localId')
+
+ def delete_user(self, uid):
+ """Deletes the user identified by the specified user ID."""
+ _auth_utils.validate_uid(uid, required=True)
+ body, http_resp = self._make_request('post', '/accounts:delete', json={'localId' : uid})
+ if not body or not body.get('kind'):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to delete user: {0}.'.format(uid), http_response=http_resp)
+
+ def delete_users(self, uids, force_delete=False):
+ """Deletes the users identified by the specified user ids.
+
+ Args:
+ uids: A list of strings indicating the uids of the users to be deleted.
+ Must have <= 1000 entries.
+ force_delete: Optional parameter that indicates if users should be
+ deleted, even if they're not disabled. Defaults to False.
+
+
+ Returns:
+ BatchDeleteAccountsResponse: Server's proto response, wrapped in a
+ python object.
+
+ Raises:
+ ValueError: If any of the identifiers are invalid or if more than 1000
+ identifiers are specified.
+ UnexpectedResponseError: If the backend server responds with an
+ unexpected message.
+ """
+ if not uids:
+ return BatchDeleteAccountsResponse()
+
+ if len(uids) > 1000:
+ raise ValueError("`uids` paramter must have <= 1000 entries.")
+ for uid in uids:
+ _auth_utils.validate_uid(uid, required=True)
+
+ body, http_resp = self._make_request('post', '/accounts:batchDelete',
+ json={'localIds': uids, 'force': force_delete})
+ if not isinstance(body, dict):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Unexpected response from server while attempting to delete users.',
+ http_response=http_resp)
+ return BatchDeleteAccountsResponse(body.get('errors', []))
+
+ def import_users(self, users, hash_alg=None):
+ """Imports the given list of users to Firebase Auth."""
+ try:
+ if not users or len(users) > MAX_IMPORT_USERS_SIZE:
+ raise ValueError(
+ 'Users must be a non-empty list with no more than {0} elements.'.format(
+ MAX_IMPORT_USERS_SIZE))
+ if any([not isinstance(u, _user_import.ImportUserRecord) for u in users]):
+ raise ValueError('One or more user objects are invalid.')
+ except TypeError:
+ raise ValueError('users must be iterable')
+
+ payload = {'users': [u.to_dict() for u in users]}
+ if any(['passwordHash' in u for u in payload['users']]):
+ if not isinstance(hash_alg, _user_import.UserImportHash):
+ raise ValueError('A UserImportHash is required to import users with passwords.')
+ payload.update(hash_alg.to_dict())
+ body, http_resp = self._make_request('post', '/accounts:batchCreate', json=payload)
+ if not isinstance(body, dict):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to import users.', http_response=http_resp)
+ return body
+
+ def generate_email_action_link(self, action_type, email, action_code_settings=None):
+ """Fetches the email action links for types
+
+ Args:
+ action_type: String. Valid values ['VERIFY_EMAIL', 'EMAIL_SIGNIN', 'PASSWORD_RESET']
+ email: Email of the user for which the action is performed
+ action_code_settings: ``ActionCodeSettings`` object or dict (optional). Defines whether
+ the link is to be handled by a mobile app and the additional state information to be
+ passed in the deep link, etc.
+ Returns:
+ link_url: action url to be emailed to the user
+
+ Raises:
+ UnexpectedResponseError: If the backend server responds with an unexpected message
+ FirebaseError: If an error occurs while generating the link
+ ValueError: If the provided arguments are invalid
+ """
+ payload = {
+ 'requestType': _auth_utils.validate_action_type(action_type),
+ 'email': _auth_utils.validate_email(email),
+ 'returnOobLink': True
+ }
+
+ if action_code_settings:
+ payload.update(encode_action_code_settings(action_code_settings))
+
+ body, http_resp = self._make_request('post', '/accounts:sendOobCode', json=payload)
+ if not body or not body.get('oobLink'):
+ raise _auth_utils.UnexpectedResponseError(
+ 'Failed to generate email action link.', http_response=http_resp)
+ return body.get('oobLink')
+
+ def _make_request(self, method, path, **kwargs):
+ url = '{0}{1}'.format(self.base_url, path)
+ try:
+ return self.http_client.body_and_response(method, url, **kwargs)
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+
+
+class _UserIterator(_auth_utils.PageIterator):
+
+ @property
+ def items(self):
+ return self._current_page.users
diff --git a/venv/Lib/site-packages/firebase_admin/_utils.py b/venv/Lib/site-packages/firebase_admin/_utils.py
new file mode 100644
index 000000000..a5fc8d022
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/_utils.py
@@ -0,0 +1,341 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Internal utilities common to all modules."""
+
+import io
+import json
+import socket
+
+import googleapiclient
+import httplib2
+import requests
+
+import firebase_admin
+from firebase_admin import exceptions
+
+
+_ERROR_CODE_TO_EXCEPTION_TYPE = {
+ exceptions.INVALID_ARGUMENT: exceptions.InvalidArgumentError,
+ exceptions.FAILED_PRECONDITION: exceptions.FailedPreconditionError,
+ exceptions.OUT_OF_RANGE: exceptions.OutOfRangeError,
+ exceptions.UNAUTHENTICATED: exceptions.UnauthenticatedError,
+ exceptions.PERMISSION_DENIED: exceptions.PermissionDeniedError,
+ exceptions.NOT_FOUND: exceptions.NotFoundError,
+ exceptions.ABORTED: exceptions.AbortedError,
+ exceptions.ALREADY_EXISTS: exceptions.AlreadyExistsError,
+ exceptions.CONFLICT: exceptions.ConflictError,
+ exceptions.RESOURCE_EXHAUSTED: exceptions.ResourceExhaustedError,
+ exceptions.CANCELLED: exceptions.CancelledError,
+ exceptions.DATA_LOSS: exceptions.DataLossError,
+ exceptions.UNKNOWN: exceptions.UnknownError,
+ exceptions.INTERNAL: exceptions.InternalError,
+ exceptions.UNAVAILABLE: exceptions.UnavailableError,
+ exceptions.DEADLINE_EXCEEDED: exceptions.DeadlineExceededError,
+}
+
+
+_HTTP_STATUS_TO_ERROR_CODE = {
+ 400: exceptions.INVALID_ARGUMENT,
+ 401: exceptions.UNAUTHENTICATED,
+ 403: exceptions.PERMISSION_DENIED,
+ 404: exceptions.NOT_FOUND,
+ 409: exceptions.CONFLICT,
+ 412: exceptions.FAILED_PRECONDITION,
+ 429: exceptions.RESOURCE_EXHAUSTED,
+ 500: exceptions.INTERNAL,
+ 503: exceptions.UNAVAILABLE,
+}
+
+
+# See https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
+_RPC_CODE_TO_ERROR_CODE = {
+ 1: exceptions.CANCELLED,
+ 2: exceptions.UNKNOWN,
+ 3: exceptions.INVALID_ARGUMENT,
+ 4: exceptions.DEADLINE_EXCEEDED,
+ 5: exceptions.NOT_FOUND,
+ 6: exceptions.ALREADY_EXISTS,
+ 7: exceptions.PERMISSION_DENIED,
+ 8: exceptions.RESOURCE_EXHAUSTED,
+ 9: exceptions.FAILED_PRECONDITION,
+ 10: exceptions.ABORTED,
+ 11: exceptions.OUT_OF_RANGE,
+ 13: exceptions.INTERNAL,
+ 14: exceptions.UNAVAILABLE,
+ 15: exceptions.DATA_LOSS,
+ 16: exceptions.UNAUTHENTICATED,
+}
+
+
+def _get_initialized_app(app):
+ """Returns a reference to an initialized App instance."""
+ if app is None:
+ return firebase_admin.get_app()
+
+ if isinstance(app, firebase_admin.App):
+ initialized_app = firebase_admin.get_app(app.name)
+ if app is not initialized_app:
+ raise ValueError('Illegal app argument. App instance not '
+ 'initialized via the firebase module.')
+ return app
+
+ raise ValueError('Illegal app argument. Argument must be of type '
+ ' firebase_admin.App, but given "{0}".'.format(type(app)))
+
+
+
+def get_app_service(app, name, initializer):
+ app = _get_initialized_app(app)
+ return app._get_service(name, initializer) # pylint: disable=protected-access
+
+
+def handle_platform_error_from_requests(error, handle_func=None):
+ """Constructs a ``FirebaseError`` from the given requests error.
+
+ This can be used to handle errors returned by Google Cloud Platform (GCP) APIs.
+
+ Args:
+ error: An error raised by the requests module while making an HTTP call to a GCP API.
+ handle_func: A function that can be used to handle platform errors in a custom way. When
+ specified, this function will be called with three arguments. It has the same
+ signature as ```_handle_func_requests``, but may return ``None``.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code.
+ """
+ if error.response is None:
+ return handle_requests_error(error)
+
+ response = error.response
+ content = response.content.decode()
+ status_code = response.status_code
+ error_dict, message = _parse_platform_error(content, status_code)
+ exc = None
+ if handle_func:
+ exc = handle_func(error, message, error_dict)
+
+ return exc if exc else _handle_func_requests(error, message, error_dict)
+
+
+def handle_operation_error(error):
+ """Constructs a ``FirebaseError`` from the given operation error.
+
+ Args:
+ error: An error returned by a long running operation.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code.
+ """
+ if not isinstance(error, dict):
+ return exceptions.UnknownError(
+ message='Unknown error while making a remote service call: {0}'.format(error),
+ cause=error)
+
+ rpc_code = error.get('code')
+ message = error.get('message')
+ error_code = _rpc_code_to_error_code(rpc_code)
+ err_type = _error_code_to_exception_type(error_code)
+ return err_type(message=message)
+
+
+def _handle_func_requests(error, message, error_dict):
+ """Constructs a ``FirebaseError`` from the given GCP error.
+
+ Args:
+ error: An error raised by the requests module while making an HTTP call.
+ message: A message to be included in the resulting ``FirebaseError``.
+ error_dict: Parsed GCP error response.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code or None.
+ """
+ code = error_dict.get('status')
+ return handle_requests_error(error, message, code)
+
+
+def handle_requests_error(error, message=None, code=None):
+ """Constructs a ``FirebaseError`` from the given requests error.
+
+ This method is agnostic of the remote service that produced the error, whether it is a GCP
+ service or otherwise. Therefore, this method does not attempt to parse the error response in
+ any way.
+
+ Args:
+ error: An error raised by the requests module while making an HTTP call.
+ message: A message to be included in the resulting ``FirebaseError`` (optional). If not
+ specified the string representation of the ``error`` argument is used as the message.
+ code: A GCP error code that will be used to determine the resulting error type (optional).
+ If not specified the HTTP status code on the error response is used to determine a
+ suitable error code.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code.
+ """
+ if isinstance(error, requests.exceptions.Timeout):
+ return exceptions.DeadlineExceededError(
+ message='Timed out while making an API call: {0}'.format(error),
+ cause=error)
+ if isinstance(error, requests.exceptions.ConnectionError):
+ return exceptions.UnavailableError(
+ message='Failed to establish a connection: {0}'.format(error),
+ cause=error)
+ if error.response is None:
+ return exceptions.UnknownError(
+ message='Unknown error while making a remote service call: {0}'.format(error),
+ cause=error)
+
+ if not code:
+ code = _http_status_to_error_code(error.response.status_code)
+ if not message:
+ message = str(error)
+
+ err_type = _error_code_to_exception_type(code)
+ return err_type(message=message, cause=error, http_response=error.response)
+
+
+def handle_platform_error_from_googleapiclient(error, handle_func=None):
+ """Constructs a ``FirebaseError`` from the given googleapiclient error.
+
+ This can be used to handle errors returned by Google Cloud Platform (GCP) APIs.
+
+ Args:
+ error: An error raised by the googleapiclient while making an HTTP call to a GCP API.
+ handle_func: A function that can be used to handle platform errors in a custom way. When
+ specified, this function will be called with three arguments. It has the same
+ signature as ```_handle_func_googleapiclient``, but may return ``None``.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code.
+ """
+ if not isinstance(error, googleapiclient.errors.HttpError):
+ return handle_googleapiclient_error(error)
+
+ content = error.content.decode()
+ status_code = error.resp.status
+ error_dict, message = _parse_platform_error(content, status_code)
+ http_response = _http_response_from_googleapiclient_error(error)
+ exc = None
+ if handle_func:
+ exc = handle_func(error, message, error_dict, http_response)
+
+ return exc if exc else _handle_func_googleapiclient(error, message, error_dict, http_response)
+
+
+def _handle_func_googleapiclient(error, message, error_dict, http_response):
+ """Constructs a ``FirebaseError`` from the given GCP error.
+
+ Args:
+ error: An error raised by the googleapiclient module while making an HTTP call.
+ message: A message to be included in the resulting ``FirebaseError``.
+ error_dict: Parsed GCP error response.
+ http_response: A requests HTTP response object to associate with the exception.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code or None.
+ """
+ code = error_dict.get('status')
+ return handle_googleapiclient_error(error, message, code, http_response)
+
+
+def handle_googleapiclient_error(error, message=None, code=None, http_response=None):
+ """Constructs a ``FirebaseError`` from the given googleapiclient error.
+
+ This method is agnostic of the remote service that produced the error, whether it is a GCP
+ service or otherwise. Therefore, this method does not attempt to parse the error response in
+ any way.
+
+ Args:
+ error: An error raised by the googleapiclient module while making an HTTP call.
+ message: A message to be included in the resulting ``FirebaseError`` (optional). If not
+ specified the string representation of the ``error`` argument is used as the message.
+ code: A GCP error code that will be used to determine the resulting error type (optional).
+ If not specified the HTTP status code on the error response is used to determine a
+ suitable error code.
+ http_response: A requests HTTP response object to associate with the exception (optional).
+ If not specified, one will be created from the ``error``.
+
+ Returns:
+ FirebaseError: A ``FirebaseError`` that can be raised to the user code.
+ """
+ if isinstance(error, socket.timeout) or (
+ isinstance(error, socket.error) and 'timed out' in str(error)):
+ return exceptions.DeadlineExceededError(
+ message='Timed out while making an API call: {0}'.format(error),
+ cause=error)
+ if isinstance(error, httplib2.ServerNotFoundError):
+ return exceptions.UnavailableError(
+ message='Failed to establish a connection: {0}'.format(error),
+ cause=error)
+ if not isinstance(error, googleapiclient.errors.HttpError):
+ return exceptions.UnknownError(
+ message='Unknown error while making a remote service call: {0}'.format(error),
+ cause=error)
+
+ if not code:
+ code = _http_status_to_error_code(error.resp.status)
+ if not message:
+ message = str(error)
+ if not http_response:
+ http_response = _http_response_from_googleapiclient_error(error)
+
+ err_type = _error_code_to_exception_type(code)
+ return err_type(message=message, cause=error, http_response=http_response)
+
+
+def _http_response_from_googleapiclient_error(error):
+ """Creates a requests HTTP Response object from the given googleapiclient error."""
+ resp = requests.models.Response()
+ resp.raw = io.BytesIO(error.content)
+ resp.status_code = error.resp.status
+ return resp
+
+
+def _http_status_to_error_code(status):
+ """Maps an HTTP status to a platform error code."""
+ return _HTTP_STATUS_TO_ERROR_CODE.get(status, exceptions.UNKNOWN)
+
+def _rpc_code_to_error_code(rpc_code):
+ """Maps an RPC code to a platform error code."""
+ return _RPC_CODE_TO_ERROR_CODE.get(rpc_code, exceptions.UNKNOWN)
+
+def _error_code_to_exception_type(code):
+ """Maps a platform error code to an exception type."""
+ return _ERROR_CODE_TO_EXCEPTION_TYPE.get(code, exceptions.UnknownError)
+
+
+def _parse_platform_error(content, status_code):
+ """Parses an HTTP error response from a Google Cloud Platform API and extracts the error code
+ and message fields.
+
+ Args:
+ content: Decoded content of the response body.
+ status_code: HTTP status code.
+
+ Returns:
+ tuple: A tuple containing error code and message.
+ """
+ data = {}
+ try:
+ parsed_body = json.loads(content)
+ if isinstance(parsed_body, dict):
+ data = parsed_body
+ except ValueError:
+ pass
+
+ error_dict = data.get('error', {})
+ msg = error_dict.get('message')
+ if not msg:
+ msg = 'Unexpected HTTP response with status: {0}; body: {1}'.format(status_code, content)
+ return error_dict, msg
diff --git a/venv/Lib/site-packages/firebase_admin/auth.py b/venv/Lib/site-packages/firebase_admin/auth.py
new file mode 100644
index 000000000..5154bb495
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/auth.py
@@ -0,0 +1,883 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Authentication module.
+
+This module contains functions for minting and verifying JWTs used for
+authenticating against Firebase services. It also provides functions for
+creating and managing user accounts in Firebase projects.
+"""
+
+from firebase_admin import _auth_client
+from firebase_admin import _auth_providers
+from firebase_admin import _auth_utils
+from firebase_admin import _user_identifier
+from firebase_admin import _token_gen
+from firebase_admin import _user_import
+from firebase_admin import _user_mgt
+from firebase_admin import _utils
+
+
+_AUTH_ATTRIBUTE = '_auth'
+
+
+__all__ = [
+ 'ActionCodeSettings',
+ 'CertificateFetchError',
+ 'Client',
+ 'ConfigurationNotFoundError',
+ 'DELETE_ATTRIBUTE',
+ 'EmailAlreadyExistsError',
+ 'ErrorInfo',
+ 'ExpiredIdTokenError',
+ 'ExpiredSessionCookieError',
+ 'ExportedUserRecord',
+ 'DeleteUsersResult',
+ 'GetUsersResult',
+ 'ImportUserRecord',
+ 'InsufficientPermissionError',
+ 'InvalidDynamicLinkDomainError',
+ 'InvalidIdTokenError',
+ 'InvalidSessionCookieError',
+ 'ListProviderConfigsPage',
+ 'ListUsersPage',
+ 'OIDCProviderConfig',
+ 'PhoneNumberAlreadyExistsError',
+ 'ProviderConfig',
+ 'RevokedIdTokenError',
+ 'RevokedSessionCookieError',
+ 'SAMLProviderConfig',
+ 'TokenSignError',
+ 'UidAlreadyExistsError',
+ 'UnexpectedResponseError',
+ 'UserImportHash',
+ 'UserImportResult',
+ 'UserInfo',
+ 'UserMetadata',
+ 'UserNotFoundError',
+ 'UserProvider',
+ 'UserRecord',
+
+ 'UserIdentifier',
+ 'UidIdentifier',
+ 'EmailIdentifier',
+ 'PhoneIdentifier',
+ 'ProviderIdentifier',
+
+ 'create_custom_token',
+ 'create_oidc_provider_config',
+ 'create_saml_provider_config',
+ 'create_session_cookie',
+ 'create_user',
+ 'delete_oidc_provider_config',
+ 'delete_saml_provider_config',
+ 'delete_user',
+ 'delete_users',
+ 'generate_email_verification_link',
+ 'generate_password_reset_link',
+ 'generate_sign_in_with_email_link',
+ 'get_oidc_provider_config',
+ 'get_saml_provider_config',
+ 'get_user',
+ 'get_user_by_email',
+ 'get_user_by_phone_number',
+ 'get_users',
+ 'import_users',
+ 'list_saml_provider_configs',
+ 'list_users',
+ 'revoke_refresh_tokens',
+ 'set_custom_user_claims',
+ 'update_oidc_provider_config',
+ 'update_saml_provider_config',
+ 'update_user',
+ 'verify_id_token',
+ 'verify_session_cookie',
+]
+
+ActionCodeSettings = _user_mgt.ActionCodeSettings
+CertificateFetchError = _token_gen.CertificateFetchError
+Client = _auth_client.Client
+ConfigurationNotFoundError = _auth_utils.ConfigurationNotFoundError
+DELETE_ATTRIBUTE = _user_mgt.DELETE_ATTRIBUTE
+DeleteUsersResult = _user_mgt.DeleteUsersResult
+EmailAlreadyExistsError = _auth_utils.EmailAlreadyExistsError
+ErrorInfo = _user_import.ErrorInfo
+ExpiredIdTokenError = _token_gen.ExpiredIdTokenError
+ExpiredSessionCookieError = _token_gen.ExpiredSessionCookieError
+ExportedUserRecord = _user_mgt.ExportedUserRecord
+GetUsersResult = _user_mgt.GetUsersResult
+ImportUserRecord = _user_import.ImportUserRecord
+InsufficientPermissionError = _auth_utils.InsufficientPermissionError
+InvalidDynamicLinkDomainError = _auth_utils.InvalidDynamicLinkDomainError
+InvalidIdTokenError = _auth_utils.InvalidIdTokenError
+InvalidSessionCookieError = _token_gen.InvalidSessionCookieError
+ListProviderConfigsPage = _auth_providers.ListProviderConfigsPage
+ListUsersPage = _user_mgt.ListUsersPage
+OIDCProviderConfig = _auth_providers.OIDCProviderConfig
+PhoneNumberAlreadyExistsError = _auth_utils.PhoneNumberAlreadyExistsError
+ProviderConfig = _auth_providers.ProviderConfig
+RevokedIdTokenError = _token_gen.RevokedIdTokenError
+RevokedSessionCookieError = _token_gen.RevokedSessionCookieError
+SAMLProviderConfig = _auth_providers.SAMLProviderConfig
+TokenSignError = _token_gen.TokenSignError
+UidAlreadyExistsError = _auth_utils.UidAlreadyExistsError
+UnexpectedResponseError = _auth_utils.UnexpectedResponseError
+UserImportHash = _user_import.UserImportHash
+UserImportResult = _user_import.UserImportResult
+UserInfo = _user_mgt.UserInfo
+UserMetadata = _user_mgt.UserMetadata
+UserNotFoundError = _auth_utils.UserNotFoundError
+UserProvider = _user_import.UserProvider
+UserRecord = _user_mgt.UserRecord
+
+UserIdentifier = _user_identifier.UserIdentifier
+UidIdentifier = _user_identifier.UidIdentifier
+EmailIdentifier = _user_identifier.EmailIdentifier
+PhoneIdentifier = _user_identifier.PhoneIdentifier
+ProviderIdentifier = _user_identifier.ProviderIdentifier
+
+
+def _get_client(app):
+ """Returns a client instance for an App.
+
+ If the App already has a client associated with it, simply returns
+ it. Otherwise creates a new client, and adds it to the App before
+ returning it.
+
+ Args:
+ app: A Firebase App instance (or ``None`` to use the default App).
+
+ Returns:
+ Client: A client for the specified App instance.
+
+ Raises:
+ ValueError: If the app argument is invalid.
+ """
+ return _utils.get_app_service(app, _AUTH_ATTRIBUTE, Client)
+
+
+def create_custom_token(uid, developer_claims=None, app=None):
+ """Builds and signs a Firebase custom auth token.
+
+ Args:
+ uid: ID of the user for whom the token is created.
+ developer_claims: A dictionary of claims to be included in the token
+ (optional).
+ app: An App instance (optional).
+
+ Returns:
+ bytes: A token minted from the input parameters.
+
+ Raises:
+ ValueError: If input parameters are invalid.
+ TokenSignError: If an error occurs while signing the token using the remote IAM service.
+ """
+ client = _get_client(app)
+ return client.create_custom_token(uid, developer_claims)
+
+
+def verify_id_token(id_token, app=None, check_revoked=False):
+ """Verifies the signature and data for the provided JWT.
+
+ Accepts a signed token string, verifies that it is current, and issued
+ to this project, and that it was correctly signed by Google.
+
+ Args:
+ id_token: A string of the encoded JWT.
+ app: An App instance (optional).
+ check_revoked: Boolean, If true, checks whether the token has been revoked (optional).
+
+ Returns:
+ dict: A dictionary of key-value pairs parsed from the decoded JWT.
+
+ Raises:
+ ValueError: If ``id_token`` is a not a string or is empty.
+ InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token.
+ ExpiredIdTokenError: If the specified ID token has expired.
+ RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been revoked.
+ CertificateFetchError: If an error occurs while fetching the public key certificates
+ required to verify the ID token.
+ """
+ client = _get_client(app)
+ return client.verify_id_token(id_token, check_revoked=check_revoked)
+
+
+def create_session_cookie(id_token, expires_in, app=None):
+ """Creates a new Firebase session cookie from the given ID token and options.
+
+ The returned JWT can be set as a server-side session cookie with a custom cookie policy.
+
+ Args:
+ id_token: The Firebase ID token to exchange for a session cookie.
+ expires_in: Duration until the cookie is expired. This can be specified
+ as a numeric seconds value or a ``datetime.timedelta`` instance.
+ app: An App instance (optional).
+
+ Returns:
+ bytes: A session cookie generated from the input parameters.
+
+ Raises:
+ ValueError: If input parameters are invalid.
+ FirebaseError: If an error occurs while creating the cookie.
+ """
+ client = _get_client(app)
+ # pylint: disable=protected-access
+ return client._token_generator.create_session_cookie(id_token, expires_in)
+
+
+def verify_session_cookie(session_cookie, check_revoked=False, app=None):
+ """Verifies a Firebase session cookie.
+
+ Accepts a session cookie string, verifies that it is current, and issued
+ to this project, and that it was correctly signed by Google.
+
+ Args:
+ session_cookie: A session cookie string to verify.
+ check_revoked: Boolean, if true, checks whether the cookie has been revoked (optional).
+ app: An App instance (optional).
+
+ Returns:
+ dict: A dictionary of key-value pairs parsed from the decoded JWT.
+
+ Raises:
+ ValueError: If ``session_cookie`` is a not a string or is empty.
+ InvalidSessionCookieError: If ``session_cookie`` is not a valid Firebase session cookie.
+ ExpiredSessionCookieError: If the specified session cookie has expired.
+ RevokedSessionCookieError: If ``check_revoked`` is ``True`` and the cookie has been revoked.
+ CertificateFetchError: If an error occurs while fetching the public key certificates
+ required to verify the session cookie.
+ """
+ client = _get_client(app)
+ # pylint: disable=protected-access
+ verified_claims = client._token_verifier.verify_session_cookie(session_cookie)
+ if check_revoked:
+ client._check_jwt_revoked(verified_claims, RevokedSessionCookieError, 'session cookie')
+ return verified_claims
+
+
+def revoke_refresh_tokens(uid, app=None):
+ """Revokes all refresh tokens for an existing user.
+
+ This function updates the user's ``tokens_valid_after_timestamp`` to the current UTC
+ in seconds since the epoch. It is important that the server on which this is called has its
+ clock set correctly and synchronized.
+
+ While this revokes all sessions for a specified user and disables any new ID tokens for
+ existing sessions from getting minted, existing ID tokens may remain active until their
+ natural expiration (one hour). To verify that ID tokens are revoked, use
+ ``verify_id_token(idToken, check_revoked=True)``.
+
+ Args:
+ uid: A user ID string.
+ app: An App instance (optional).
+
+ Raises:
+ ValueError: If the user ID is None, empty or malformed.
+ FirebaseError: If an error occurs while revoking the refresh token.
+ """
+ client = _get_client(app)
+ client.revoke_refresh_tokens(uid)
+
+
+def get_user(uid, app=None):
+ """Gets the user data corresponding to the specified user ID.
+
+ Args:
+ uid: A user ID string.
+ app: An App instance (optional).
+
+ Returns:
+ UserRecord: A user record instance.
+
+ Raises:
+ ValueError: If the user ID is None, empty or malformed.
+ UserNotFoundError: If the specified user ID does not exist.
+ FirebaseError: If an error occurs while retrieving the user.
+ """
+ client = _get_client(app)
+ return client.get_user(uid=uid)
+
+
+def get_user_by_email(email, app=None):
+ """Gets the user data corresponding to the specified user email.
+
+ Args:
+ email: A user email address string.
+ app: An App instance (optional).
+
+ Returns:
+ UserRecord: A user record instance.
+
+ Raises:
+ ValueError: If the email is None, empty or malformed.
+ UserNotFoundError: If no user exists by the specified email address.
+ FirebaseError: If an error occurs while retrieving the user.
+ """
+ client = _get_client(app)
+ return client.get_user_by_email(email=email)
+
+
+def get_user_by_phone_number(phone_number, app=None):
+ """Gets the user data corresponding to the specified phone number.
+
+ Args:
+ phone_number: A phone number string.
+ app: An App instance (optional).
+
+ Returns:
+ UserRecord: A user record instance.
+
+ Raises:
+ ValueError: If the phone number is None, empty or malformed.
+ UserNotFoundError: If no user exists by the specified phone number.
+ FirebaseError: If an error occurs while retrieving the user.
+ """
+ client = _get_client(app)
+ return client.get_user_by_phone_number(phone_number=phone_number)
+
+
+def get_users(identifiers, app=None):
+ """Gets the user data corresponding to the specified identifiers.
+
+ There are no ordering guarantees; in particular, the nth entry in the
+ result list is not guaranteed to correspond to the nth entry in the input
+ parameters list.
+
+ A maximum of 100 identifiers may be supplied. If more than 100
+ identifiers are supplied, this method raises a `ValueError`.
+
+ Args:
+ identifiers (list[UserIdentifier]): A list of ``UserIdentifier``
+ instances used to indicate which user records should be returned.
+ Must have <= 100 entries.
+ app: An App instance (optional).
+
+ Returns:
+ GetUsersResult: A ``GetUsersResult`` instance corresponding to the
+ specified identifiers.
+
+ Raises:
+ ValueError: If any of the identifiers are invalid or if more than 100
+ identifiers are specified.
+ """
+ client = _get_client(app)
+ return client.get_users(identifiers)
+
+
+def list_users(page_token=None, max_results=_user_mgt.MAX_LIST_USERS_RESULTS, app=None):
+ """Retrieves a page of user accounts from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of user accounts that may be included in the returned page.
+ This function never returns None. If there are no user accounts in the Firebase project, this
+ returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the page
+ (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in the
+ returned page (optional). Defaults to 1000, which is also the maximum number allowed.
+ app: An App instance (optional).
+
+ Returns:
+ ListUsersPage: A page of user accounts.
+
+ Raises:
+ ValueError: If ``max_results`` or ``page_token`` are invalid.
+ FirebaseError: If an error occurs while retrieving the user accounts.
+ """
+ client = _get_client(app)
+ return client.list_users(page_token=page_token, max_results=max_results)
+
+
+def create_user(**kwargs): # pylint: disable=differing-param-doc
+ """Creates a new user account with the specified properties.
+
+ Args:
+ kwargs: A series of keyword arguments (optional).
+
+ Keyword Args:
+ uid: User ID to assign to the newly created user (optional).
+ display_name: The user's display name (optional).
+ email: The user's primary email (optional).
+ email_verified: A boolean indicating whether or not the user's primary email is
+ verified (optional).
+ phone_number: The user's primary phone number (optional).
+ photo_url: The user's photo URL (optional).
+ password: The user's raw, unhashed password. (optional).
+ disabled: A boolean indicating whether or not the user account is disabled (optional).
+ app: An App instance (optional).
+
+ Returns:
+ UserRecord: A user record instance for the newly created user.
+
+ Raises:
+ ValueError: If the specified user properties are invalid.
+ FirebaseError: If an error occurs while creating the user account.
+ """
+ app = kwargs.pop('app', None)
+ client = _get_client(app)
+ return client.create_user(**kwargs)
+
+
+def update_user(uid, **kwargs): # pylint: disable=differing-param-doc
+ """Updates an existing user account with the specified properties.
+
+ Args:
+ uid: A user ID string.
+ kwargs: A series of keyword arguments (optional).
+
+ Keyword Args:
+ display_name: The user's display name (optional). Can be removed by explicitly passing
+ ``auth.DELETE_ATTRIBUTE``.
+ email: The user's primary email (optional).
+ email_verified: A boolean indicating whether or not the user's primary email is
+ verified (optional).
+ phone_number: The user's primary phone number (optional). Can be removed by explicitly
+ passing ``auth.DELETE_ATTRIBUTE``.
+ photo_url: The user's photo URL (optional). Can be removed by explicitly passing
+ ``auth.DELETE_ATTRIBUTE``.
+ password: The user's raw, unhashed password. (optional).
+ disabled: A boolean indicating whether or not the user account is disabled (optional).
+ custom_claims: A dictionary or a JSON string containing the custom claims to be set on the
+ user account (optional). To remove all custom claims, pass ``auth.DELETE_ATTRIBUTE``.
+ valid_since: An integer signifying the seconds since the epoch (optional). This field is
+ set by ``revoke_refresh_tokens`` and it is discouraged to set this field directly.
+ app: An App instance (optional).
+
+ Returns:
+ UserRecord: An updated user record instance for the user.
+
+ Raises:
+ ValueError: If the specified user ID or properties are invalid.
+ FirebaseError: If an error occurs while updating the user account.
+ """
+ app = kwargs.pop('app', None)
+ client = _get_client(app)
+ return client.update_user(uid, **kwargs)
+
+
+def set_custom_user_claims(uid, custom_claims, app=None):
+ """Sets additional claims on an existing user account.
+
+ Custom claims set via this function can be used to define user roles and privilege levels.
+ These claims propagate to all the devices where the user is already signed in (after token
+ expiration or when token refresh is forced), and next time the user signs in. The claims
+ can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, iat,
+ iss, etc), an error is thrown. Claims payload must also not be larger then 1000 characters
+ when serialized into a JSON string.
+
+ Args:
+ uid: A user ID string.
+ custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any
+ claims set previously.
+ app: An App instance (optional).
+
+ Raises:
+ ValueError: If the specified user ID or the custom claims are invalid.
+ FirebaseError: If an error occurs while updating the user account.
+ """
+ client = _get_client(app)
+ client.set_custom_user_claims(uid, custom_claims=custom_claims)
+
+
+def delete_user(uid, app=None):
+ """Deletes the user identified by the specified user ID.
+
+ Args:
+ uid: A user ID string.
+ app: An App instance (optional).
+
+ Raises:
+ ValueError: If the user ID is None, empty or malformed.
+ FirebaseError: If an error occurs while deleting the user account.
+ """
+ client = _get_client(app)
+ client.delete_user(uid)
+
+
+def delete_users(uids, app=None):
+ """Deletes the users specified by the given identifiers.
+
+ Deleting a non-existing user does not generate an error (the method is
+ idempotent.) Non-existing users are considered to be successfully deleted
+ and are therefore included in the `DeleteUserResult.success_count` value.
+
+ A maximum of 1000 identifiers may be supplied. If more than 1000
+ identifiers are supplied, this method raises a `ValueError`.
+
+ Args:
+ uids: A list of strings indicating the uids of the users to be deleted.
+ Must have <= 1000 entries.
+ app: An App instance (optional).
+
+ Returns:
+ DeleteUsersResult: The total number of successful/failed deletions, as
+ well as the array of errors that correspond to the failed deletions.
+
+ Raises:
+ ValueError: If any of the identifiers are invalid or if more than 1000
+ identifiers are specified.
+ """
+ client = _get_client(app)
+ return client.delete_users(uids)
+
+
+def import_users(users, hash_alg=None, app=None):
+ """Imports the specified list of users into Firebase Auth.
+
+ At most 1000 users can be imported at a time. This operation is optimized for bulk imports and
+ will ignore checks on identifier uniqueness which could result in duplications. The
+ ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the
+ ``UserImportHash`` class for supported hash algorithms.
+
+ Args:
+ users: A list of ``ImportUserRecord`` instances to import. Length of the list must not
+ exceed 1000.
+ hash_alg: A ``UserImportHash`` object (optional). Required when importing users with
+ passwords.
+ app: An App instance (optional).
+
+ Returns:
+ UserImportResult: An object summarizing the result of the import operation.
+
+ Raises:
+ ValueError: If the provided arguments are invalid.
+ FirebaseError: If an error occurs while importing users.
+ """
+ client = _get_client(app)
+ return client.import_users(users, hash_alg)
+
+
+def generate_password_reset_link(email, action_code_settings=None, app=None):
+ """Generates the out-of-band email action link for password reset flows for the specified email
+ address.
+
+ Args:
+ email: The email of the user whose password is to be reset.
+ action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether
+ the link is to be handled by a mobile app and the additional state information to be
+ passed in the deep link.
+ app: An App instance (optional).
+ Returns:
+ link: The password reset link created by the API
+
+ Raises:
+ ValueError: If the provided arguments are invalid
+ FirebaseError: If an error occurs while generating the link
+ """
+ client = _get_client(app)
+ return client.generate_password_reset_link(email, action_code_settings=action_code_settings)
+
+
+def generate_email_verification_link(email, action_code_settings=None, app=None):
+ """Generates the out-of-band email action link for email verification flows for the specified
+ email address.
+
+ Args:
+ email: The email of the user to be verified.
+ action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether
+ the link is to be handled by a mobile app and the additional state information to be
+ passed in the deep link.
+ app: An App instance (optional).
+ Returns:
+ link: The email verification link created by the API
+
+ Raises:
+ ValueError: If the provided arguments are invalid
+ FirebaseError: If an error occurs while generating the link
+ """
+ client = _get_client(app)
+ return client.generate_email_verification_link(
+ email, action_code_settings=action_code_settings)
+
+
+def generate_sign_in_with_email_link(email, action_code_settings, app=None):
+ """Generates the out-of-band email action link for email link sign-in flows, using the action
+ code settings provided.
+
+ Args:
+ email: The email of the user signing in.
+ action_code_settings: ``ActionCodeSettings`` instance. Defines whether
+ the link is to be handled by a mobile app and the additional state information to be
+ passed in the deep link.
+ app: An App instance (optional).
+
+ Returns:
+ link: The email sign-in link created by the API
+
+ Raises:
+ ValueError: If the provided arguments are invalid
+ FirebaseError: If an error occurs while generating the link
+ """
+ client = _get_client(app)
+ return client.generate_sign_in_with_email_link(
+ email, action_code_settings=action_code_settings)
+
+
+def get_oidc_provider_config(provider_id, app=None):
+ """Returns the ``OIDCProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+ app: An App instance (optional).
+
+ Returns:
+ OIDCProviderConfig: An OIDC provider config instance.
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix.
+ ConfigurationNotFoundError: If no OIDC provider is available with the given identifier.
+ FirebaseError: If an error occurs while retrieving the OIDC provider.
+ """
+ client = _get_client(app)
+ return client.get_oidc_provider_config(provider_id)
+
+def create_oidc_provider_config(
+ provider_id, client_id, issuer, display_name=None, enabled=None, app=None):
+ """Creates a new OIDC provider config from the given parameters.
+
+ OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about
+ GCIP, including pricing and features, see https://cloud.google.com/identity-platform.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``oidc.``.
+ client_id: Client ID of the new config.
+ issuer: Issuer of the new config. Must be a valid URL.
+ display_name: The user-friendly display name to the current configuration (optional).
+ This name is also used as the provider label in the Cloud Console.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional). A user cannot sign in using a disabled provider.
+ app: An App instance (optional).
+
+ Returns:
+ OIDCProviderConfig: The newly created OIDC provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while creating the new OIDC provider config.
+ """
+ client = _get_client(app)
+ return client.create_oidc_provider_config(
+ provider_id, client_id=client_id, issuer=issuer, display_name=display_name,
+ enabled=enabled)
+
+
+def update_oidc_provider_config(
+ provider_id, client_id=None, issuer=None, display_name=None, enabled=None, app=None):
+ """Updates an existing OIDC provider config with the given parameters.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``oidc.``.
+ client_id: Client ID of the new config (optional).
+ issuer: Issuer of the new config (optional). Must be a valid URL.
+ display_name: The user-friendly display name of the current configuration (optional).
+ Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional).
+ app: An App instance (optional).
+
+ Returns:
+ OIDCProviderConfig: The updated OIDC provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while updating the OIDC provider config.
+ """
+ client = _get_client(app)
+ return client.update_oidc_provider_config(
+ provider_id, client_id=client_id, issuer=issuer, display_name=display_name,
+ enabled=enabled)
+
+
+def delete_oidc_provider_config(provider_id, app=None):
+ """Deletes the ``OIDCProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+ app: An App instance (optional).
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix.
+ ConfigurationNotFoundError: If no OIDC provider is available with the given identifier.
+ FirebaseError: If an error occurs while deleting the OIDC provider.
+ """
+ client = _get_client(app)
+ client.delete_oidc_provider_config(provider_id)
+
+
+def list_oidc_provider_configs(
+ page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS, app=None):
+ """Retrieves a page of OIDC provider configs from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of configs that may be included in the returned
+ page. This function never returns ``None``. If there are no OIDC configs in the Firebase
+ project, this returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the
+ page (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in
+ the returned page (optional). Defaults to 100, which is also the maximum number
+ allowed.
+ app: An App instance (optional).
+
+ Returns:
+ ListProviderConfigsPage: A page of OIDC provider config instances.
+
+ Raises:
+ ValueError: If ``max_results`` or ``page_token`` are invalid.
+ FirebaseError: If an error occurs while retrieving the OIDC provider configs.
+ """
+ client = _get_client(app)
+ return client.list_oidc_provider_configs(page_token, max_results)
+
+
+def get_saml_provider_config(provider_id, app=None):
+ """Returns the ``SAMLProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+ app: An App instance (optional).
+
+ Returns:
+ SAMLProviderConfig: A SAML provider config instance.
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix.
+ ConfigurationNotFoundError: If no SAML provider is available with the given identifier.
+ FirebaseError: If an error occurs while retrieving the SAML provider.
+ """
+ client = _get_client(app)
+ return client.get_saml_provider_config(provider_id)
+
+
+def create_saml_provider_config(
+ provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url,
+ display_name=None, enabled=None, app=None):
+ """Creates a new SAML provider config from the given parameters.
+
+ SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about
+ GCIP, including pricing and features, see https://cloud.google.com/identity-platform.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``saml.``.
+ idp_entity_id: The SAML IdP entity identifier.
+ sso_url: The SAML IdP SSO URL. Must be a valid URL.
+ x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this provider.
+ Multiple certificates are accepted to prevent outages during IdP key rotation (for
+ example ADFS rotates every 10 days). When the Auth server receives a SAML response, it
+ will match the SAML response with the certificate on record. Otherwise the response is
+ rejected. Developers are expected to manage the certificate updates as keys are
+ rotated.
+ rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by the
+ developer but needs to be provided to the SAML IdP.
+ callback_url: Callback URL string. This is fixed and must always be the same as the OAuth
+ redirect URL provisioned by Firebase Auth, unless a custom authDomain is used.
+ display_name: The user-friendly display name to the current configuration (optional). This
+ name is also used as the provider label in the Cloud Console.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional). A user cannot sign in using a disabled provider.
+ app: An App instance (optional).
+
+ Returns:
+ SAMLProviderConfig: The newly created SAML provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while creating the new SAML provider config.
+ """
+ client = _get_client(app)
+ return client.create_saml_provider_config(
+ provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url,
+ x509_certificates=x509_certificates, rp_entity_id=rp_entity_id, callback_url=callback_url,
+ display_name=display_name, enabled=enabled)
+
+
+def update_saml_provider_config(
+ provider_id, idp_entity_id=None, sso_url=None, x509_certificates=None,
+ rp_entity_id=None, callback_url=None, display_name=None, enabled=None, app=None):
+ """Updates an existing SAML provider config with the given parameters.
+
+ Args:
+ provider_id: Provider ID string. Must have the prefix ``saml.``.
+ idp_entity_id: The SAML IdP entity identifier (optional).
+ sso_url: The SAML IdP SSO URL. Must be a valid URL (optional).
+ x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this
+ provider (optional).
+ rp_entity_id: The SAML relying party entity ID (optional).
+ callback_url: Callback URL string (optional).
+ display_name: The user-friendly display name of the current configuration (optional).
+ Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name.
+ enabled: A boolean indicating whether the provider configuration is enabled or disabled
+ (optional).
+ app: An App instance (optional).
+
+ Returns:
+ SAMLProviderConfig: The updated SAML provider config instance.
+
+ Raises:
+ ValueError: If any of the specified input parameters are invalid.
+ FirebaseError: If an error occurs while updating the SAML provider config.
+ """
+ client = _get_client(app)
+ return client.update_saml_provider_config(
+ provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url,
+ x509_certificates=x509_certificates, rp_entity_id=rp_entity_id,
+ callback_url=callback_url, display_name=display_name, enabled=enabled)
+
+
+def delete_saml_provider_config(provider_id, app=None):
+ """Deletes the ``SAMLProviderConfig`` with the given ID.
+
+ Args:
+ provider_id: Provider ID string.
+ app: An App instance (optional).
+
+ Raises:
+ ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix.
+ ConfigurationNotFoundError: If no SAML provider is available with the given identifier.
+ FirebaseError: If an error occurs while deleting the SAML provider.
+ """
+ client = _get_client(app)
+ client.delete_saml_provider_config(provider_id)
+
+
+def list_saml_provider_configs(
+ page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS, app=None):
+ """Retrieves a page of SAML provider configs from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of configs that may be included in the returned
+ page. This function never returns ``None``. If there are no SAML configs in the Firebase
+ project, this returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the
+ page (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in
+ the returned page (optional). Defaults to 100, which is also the maximum number
+ allowed.
+ app: An App instance (optional).
+
+ Returns:
+ ListProviderConfigsPage: A page of SAML provider config instances.
+
+ Raises:
+ ValueError: If ``max_results`` or ``page_token`` are invalid.
+ FirebaseError: If an error occurs while retrieving the SAML provider configs.
+ """
+ client = _get_client(app)
+ return client.list_saml_provider_configs(page_token, max_results)
diff --git a/venv/Lib/site-packages/firebase_admin/credentials.py b/venv/Lib/site-packages/firebase_admin/credentials.py
new file mode 100644
index 000000000..8f9c504f0
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/credentials.py
@@ -0,0 +1,214 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase credentials module."""
+import collections
+import json
+
+import google.auth
+from google.auth.transport import requests
+from google.oauth2 import credentials
+from google.oauth2 import service_account
+
+
+_request = requests.Request()
+_scopes = [
+ 'https://www.googleapis.com/auth/cloud-platform',
+ 'https://www.googleapis.com/auth/datastore',
+ 'https://www.googleapis.com/auth/devstorage.read_write',
+ 'https://www.googleapis.com/auth/firebase',
+ 'https://www.googleapis.com/auth/identitytoolkit',
+ 'https://www.googleapis.com/auth/userinfo.email'
+]
+
+AccessTokenInfo = collections.namedtuple('AccessTokenInfo', ['access_token', 'expiry'])
+"""Data included in an OAuth2 access token.
+
+Contains the access token string and the expiry time. The expirty time is exposed as a
+``datetime`` value.
+"""
+
+
+class Base:
+ """Provides OAuth2 access tokens for accessing Firebase services."""
+
+ def get_access_token(self):
+ """Fetches a Google OAuth2 access token using this credential instance.
+
+ Returns:
+ AccessTokenInfo: An access token obtained using the credential.
+ """
+ google_cred = self.get_credential()
+ google_cred.refresh(_request)
+ return AccessTokenInfo(google_cred.token, google_cred.expiry)
+
+ def get_credential(self):
+ """Returns the Google credential instance used for authentication."""
+ raise NotImplementedError
+
+
+class Certificate(Base):
+ """A credential initialized from a JSON certificate keyfile."""
+
+ _CREDENTIAL_TYPE = 'service_account'
+
+ def __init__(self, cert):
+ """Initializes a credential from a Google service account certificate.
+
+ Service account certificates can be downloaded as JSON files from the Firebase console.
+ To instantiate a credential from a certificate file, either specify the file path or a
+ dict representing the parsed contents of the file.
+
+ Args:
+ cert: Path to a certificate file or a dict representing the contents of a certificate.
+
+ Raises:
+ IOError: If the specified certificate file doesn't exist or cannot be read.
+ ValueError: If the specified certificate is invalid.
+ """
+ super(Certificate, self).__init__()
+ if isinstance(cert, str):
+ with open(cert) as json_file:
+ json_data = json.load(json_file)
+ elif isinstance(cert, dict):
+ json_data = cert
+ else:
+ raise ValueError(
+ 'Invalid certificate argument: "{0}". Certificate argument must be a file path, '
+ 'or a dict containing the parsed file contents.'.format(cert))
+
+ if json_data.get('type') != self._CREDENTIAL_TYPE:
+ raise ValueError('Invalid service account certificate. Certificate must contain a '
+ '"type" field set to "{0}".'.format(self._CREDENTIAL_TYPE))
+ try:
+ self._g_credential = service_account.Credentials.from_service_account_info(
+ json_data, scopes=_scopes)
+ except ValueError as error:
+ raise ValueError('Failed to initialize a certificate credential. '
+ 'Caused by: "{0}"'.format(error))
+
+ @property
+ def project_id(self):
+ return self._g_credential.project_id
+
+ @property
+ def signer(self):
+ return self._g_credential.signer
+
+ @property
+ def service_account_email(self):
+ return self._g_credential.service_account_email
+
+ def get_credential(self):
+ """Returns the underlying Google credential.
+
+ Returns:
+ google.auth.credentials.Credentials: A Google Auth credential instance."""
+ return self._g_credential
+
+
+class ApplicationDefault(Base):
+ """A Google Application Default credential."""
+
+ def __init__(self):
+ """Creates an instance that will use Application Default credentials.
+
+ The credentials will be lazily initialized when get_credential() or
+ project_id() is called. See those methods for possible errors raised.
+ """
+ super(ApplicationDefault, self).__init__()
+ self._g_credential = None # Will be lazily-loaded via _load_credential().
+
+ def get_credential(self):
+ """Returns the underlying Google credential.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: If Application Default
+ credentials cannot be initialized in the current environment.
+ Returns:
+ google.auth.credentials.Credentials: A Google Auth credential instance."""
+ self._load_credential()
+ return self._g_credential
+
+ @property
+ def project_id(self):
+ """Returns the project_id from the underlying Google credential.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: If Application Default
+ credentials cannot be initialized in the current environment.
+ Returns:
+ str: The project id."""
+ self._load_credential()
+ return self._project_id
+
+ def _load_credential(self):
+ if not self._g_credential:
+ self._g_credential, self._project_id = google.auth.default(scopes=_scopes)
+
+class RefreshToken(Base):
+ """A credential initialized from an existing refresh token."""
+
+ _CREDENTIAL_TYPE = 'authorized_user'
+
+ def __init__(self, refresh_token):
+ """Initializes a credential from a refresh token JSON file.
+
+ The JSON must consist of client_id, client_secert and refresh_token fields. Refresh
+ token files are typically created and managed by the gcloud SDK. To instantiate
+ a credential from a refresh token file, either specify the file path or a dict
+ representing the parsed contents of the file.
+
+ Args:
+ refresh_token: Path to a refresh token file or a dict representing the contents of a
+ refresh token file.
+
+ Raises:
+ IOError: If the specified file doesn't exist or cannot be read.
+ ValueError: If the refresh token configuration is invalid.
+ """
+ super(RefreshToken, self).__init__()
+ if isinstance(refresh_token, str):
+ with open(refresh_token) as json_file:
+ json_data = json.load(json_file)
+ elif isinstance(refresh_token, dict):
+ json_data = refresh_token
+ else:
+ raise ValueError(
+ 'Invalid refresh token argument: "{0}". Refresh token argument must be a file '
+ 'path, or a dict containing the parsed file contents.'.format(refresh_token))
+
+ if json_data.get('type') != self._CREDENTIAL_TYPE:
+ raise ValueError('Invalid refresh token configuration. JSON must contain a '
+ '"type" field set to "{0}".'.format(self._CREDENTIAL_TYPE))
+ self._g_credential = credentials.Credentials.from_authorized_user_info(json_data, _scopes)
+
+ @property
+ def client_id(self):
+ return self._g_credential.client_id
+
+ @property
+ def client_secret(self):
+ return self._g_credential.client_secret
+
+ @property
+ def refresh_token(self):
+ return self._g_credential.refresh_token
+
+ def get_credential(self):
+ """Returns the underlying Google credential.
+
+ Returns:
+ google.auth.credentials.Credentials: A Google Auth credential instance."""
+ return self._g_credential
diff --git a/venv/Lib/site-packages/firebase_admin/db.py b/venv/Lib/site-packages/firebase_admin/db.py
new file mode 100644
index 000000000..be2b9c917
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/db.py
@@ -0,0 +1,991 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Realtime Database module.
+
+This module contains functions and classes that facilitate interacting with the Firebase Realtime
+Database. It supports basic data manipulation operations, as well as complex queries such as
+limit queries and range queries. However, it does not support realtime update notifications. This
+module uses the Firebase REST API underneath.
+"""
+
+import collections
+import json
+import os
+import sys
+import threading
+from urllib import parse
+
+import google.auth
+import requests
+
+import firebase_admin
+from firebase_admin import exceptions
+from firebase_admin import _http_client
+from firebase_admin import _sseclient
+from firebase_admin import _utils
+
+
+_DB_ATTRIBUTE = '_database'
+_INVALID_PATH_CHARACTERS = '[].?#$'
+_RESERVED_FILTERS = ('$key', '$value', '$priority')
+_USER_AGENT = 'Firebase/HTTP/{0}/{1}.{2}/AdminPython'.format(
+ firebase_admin.__version__, sys.version_info.major, sys.version_info.minor)
+_TRANSACTION_MAX_RETRIES = 25
+_EMULATOR_HOST_ENV_VAR = 'FIREBASE_DATABASE_EMULATOR_HOST'
+
+
+def reference(path='/', app=None, url=None):
+ """Returns a database ``Reference`` representing the node at the specified path.
+
+ If no path is specified, this function returns a ``Reference`` that represents the database
+ root. By default, the returned References provide access to the Firebase Database specified at
+ app initialization. To connect to a different database instance in the same Firebase project,
+ specify the ``url`` parameter.
+
+ Args:
+ path: Path to a node in the Firebase realtime database (optional).
+ app: An App instance (optional).
+ url: Base URL of the Firebase Database instance (optional). When specified, takes
+ precedence over the the ``databaseURL`` option set at app initialization.
+
+ Returns:
+ Reference: A newly initialized Reference.
+
+ Raises:
+ ValueError: If the specified path or app is invalid.
+ """
+ service = _utils.get_app_service(app, _DB_ATTRIBUTE, _DatabaseService)
+ client = service.get_client(url)
+ return Reference(client=client, path=path)
+
+def _parse_path(path):
+ """Parses a path string into a set of segments."""
+ if not isinstance(path, str):
+ raise ValueError('Invalid path: "{0}". Path must be a string.'.format(path))
+ if any(ch in path for ch in _INVALID_PATH_CHARACTERS):
+ raise ValueError(
+ 'Invalid path: "{0}". Path contains illegal characters.'.format(path))
+ return [seg for seg in path.split('/') if seg]
+
+
+class Event:
+ """Represents a realtime update event received from the database."""
+
+ def __init__(self, sse_event):
+ self._sse_event = sse_event
+ self._data = json.loads(sse_event.data)
+
+ @property
+ def data(self):
+ """Parsed JSON data of this event."""
+ return self._data['data']
+
+ @property
+ def path(self):
+ """Path of the database reference that triggered this event."""
+ return self._data['path']
+
+ @property
+ def event_type(self):
+ """Event type string (put, patch)."""
+ return self._sse_event.event_type
+
+
+class ListenerRegistration:
+ """Represents the addition of an event listener to a database reference."""
+
+ def __init__(self, callback, sse):
+ """Initializes a new listener with given parameters.
+
+ This is an internal API. Use the ``db.Reference.listen()`` method to start a
+ new listener.
+
+ Args:
+ callback: The callback function to fire in case of event.
+ sse: A transport session to make requests with.
+ """
+ self._callback = callback
+ self._sse = sse
+ self._thread = threading.Thread(target=self._start_listen)
+ self._thread.start()
+
+ def _start_listen(self):
+ # iterate the sse client's generator
+ for sse_event in self._sse:
+ # only inject data events
+ if sse_event:
+ self._callback(Event(sse_event))
+
+ def close(self):
+ """Stops the event listener represented by this registration
+
+ This closes the SSE HTTP connection, and joins the background thread.
+ """
+ self._sse.close()
+ self._thread.join()
+
+
+class Reference:
+ """Reference represents a node in the Firebase realtime database."""
+
+ def __init__(self, **kwargs):
+ """Creates a new Reference using the provided parameters.
+
+ This method is for internal use only. Use db.reference() to obtain an instance of
+ Reference.
+ """
+ self._client = kwargs.get('client')
+ if 'segments' in kwargs:
+ self._segments = kwargs.get('segments')
+ else:
+ self._segments = _parse_path(kwargs.get('path'))
+ self._pathurl = '/' + '/'.join(self._segments)
+
+ @property
+ def key(self):
+ if self._segments:
+ return self._segments[-1]
+ return None
+
+ @property
+ def path(self):
+ return self._pathurl
+
+ @property
+ def parent(self):
+ if self._segments:
+ return Reference(client=self._client, segments=self._segments[:-1])
+ return None
+
+ def child(self, path):
+ """Returns a Reference to the specified child node.
+
+ The path may point to an immediate child of the current Reference, or a deeply nested
+ child. Child paths must not begin with '/'.
+
+ Args:
+ path: Path to the child node.
+
+ Returns:
+ Reference: A database Reference representing the specified child node.
+
+ Raises:
+ ValueError: If the child path is not a string, not well-formed or begins with '/'.
+ """
+ if not path or not isinstance(path, str):
+ raise ValueError(
+ 'Invalid path argument: "{0}". Path must be a non-empty string.'.format(path))
+ if path.startswith('/'):
+ raise ValueError(
+ 'Invalid path argument: "{0}". Child path must not start with "/"'.format(path))
+ full_path = self._pathurl + '/' + path
+ return Reference(client=self._client, path=full_path)
+
+ def get(self, etag=False, shallow=False):
+ """Returns the value, and optionally the ETag, at the current location of the database.
+
+ Args:
+ etag: A boolean indicating whether the Etag value should be returned or not (optional).
+ shallow: A boolean indicating whether to execute a shallow read (optional). Shallow
+ reads do not retrieve the child nodes of the current database location. Cannot be
+ set to True if ``etag`` is also set to True.
+
+ Returns:
+ object: If etag is False returns the decoded JSON value of the current database location.
+ If etag is True, returns a 2-tuple consisting of the decoded JSON value and the Etag
+ associated with the current database location.
+
+ Raises:
+ ValueError: If both ``etag`` and ``shallow`` are set to True.
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ if etag:
+ if shallow:
+ raise ValueError('etag and shallow cannot both be set to True.')
+ headers, data = self._client.headers_and_body(
+ 'get', self._add_suffix(), headers={'X-Firebase-ETag' : 'true'})
+ return data, headers.get('ETag')
+
+ params = 'shallow=true' if shallow else None
+ return self._client.body('get', self._add_suffix(), params=params)
+
+ def get_if_changed(self, etag):
+ """Gets data in this location only if the specified ETag does not match.
+
+ Args:
+ etag: The ETag value to be checked against the ETag of the current location.
+
+ Returns:
+ tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. If the ETag
+ specified by the caller did not match, the boolen value will be True and the JSON
+ and ETag values would reflect the corresponding values in the database. If the ETag
+ matched, the boolean value will be False and the other elements of the tuple will be
+ None.
+
+ Raises:
+ ValueError: If the ETag is not a string.
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ if not isinstance(etag, str):
+ raise ValueError('ETag must be a string.')
+
+ resp = self._client.request('get', self._add_suffix(), headers={'if-none-match': etag})
+ if resp.status_code == 304:
+ return False, None, None
+
+ return True, resp.json(), resp.headers.get('ETag')
+
+ def set(self, value):
+ """Sets the data at this location to the given value.
+
+ The value must be JSON-serializable and not None.
+
+ Args:
+ value: JSON-serializable value to be set at this location.
+
+ Raises:
+ ValueError: If the provided value is None.
+ TypeError: If the value is not JSON-serializable.
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ if value is None:
+ raise ValueError('Value must not be None.')
+ self._client.request('put', self._add_suffix(), json=value, params='print=silent')
+
+ def set_if_unchanged(self, expected_etag, value):
+ """Conditonally sets the data at this location to the given value.
+
+ Sets the data at this location to the given value only if ``expected_etag`` is same as the
+ ETag value in the database.
+
+ Args:
+ expected_etag: Value of ETag we want to check.
+ value: JSON-serializable value to be set at this location.
+
+ Returns:
+ tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. The boolean
+ indicates whether the set operation was successful or not. The decoded JSON and the
+ ETag corresponds to the latest value in this database location.
+
+ Raises:
+ ValueError: If the value is None, or if expected_etag is not a string.
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ # pylint: disable=missing-raises-doc
+ if not isinstance(expected_etag, str):
+ raise ValueError('Expected ETag must be a string.')
+ if value is None:
+ raise ValueError('Value must not be none.')
+
+ try:
+ headers = self._client.headers(
+ 'put', self._add_suffix(), json=value, headers={'if-match': expected_etag})
+ return True, value, headers.get('ETag')
+ except exceptions.FailedPreconditionError as error:
+ http_response = error.http_response
+ if http_response is not None and 'ETag' in http_response.headers:
+ etag = http_response.headers['ETag']
+ snapshot = http_response.json()
+ return False, snapshot, etag
+
+ raise error
+
+ def push(self, value=''):
+ """Creates a new child node.
+
+ The optional value argument can be used to provide an initial value for the child node. If
+ no value is provided, child node will have empty string as the default value.
+
+ Args:
+ value: JSON-serializable initial value for the child node (optional).
+
+ Returns:
+ Reference: A Reference representing the newly created child node.
+
+ Raises:
+ ValueError: If the value is None.
+ TypeError: If the value is not JSON-serializable.
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ if value is None:
+ raise ValueError('Value must not be None.')
+ output = self._client.body('post', self._add_suffix(), json=value)
+ push_id = output.get('name')
+ return self.child(push_id)
+
+ def update(self, value):
+ """Updates the specified child keys of this Reference to the provided values.
+
+ Args:
+ value: A dictionary containing the child keys to update, and their new values.
+
+ Raises:
+ ValueError: If value is empty or not a dictionary.
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ if not value or not isinstance(value, dict):
+ raise ValueError('Value argument must be a non-empty dictionary.')
+ if None in value.keys():
+ raise ValueError('Dictionary must not contain None keys.')
+ self._client.request('patch', self._add_suffix(), json=value, params='print=silent')
+
+ def delete(self):
+ """Deletes this node from the database.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ self._client.request('delete', self._add_suffix())
+
+ def listen(self, callback):
+ """Registers the ``callback`` function to receive realtime updates.
+
+ The specified callback function will get invoked with ``db.Event`` objects for each
+ realtime update received from the database. It will also get called whenever the SDK
+ reconnects to the server due to network issues or credential expiration. In general,
+ the OAuth2 credentials used to authorize connections to the server expire every hour.
+ Therefore clients should expect the ``callback`` to fire at least once every hour, even if
+ there are no updates in the database.
+
+ This API is based on the event streaming support available in the Firebase REST API. Each
+ call to ``listen()`` starts a new HTTP connection and a background thread. This is an
+ experimental feature. It currently does not honor the auth overrides and timeout settings.
+ Cannot be used in thread-constrained environments like Google App Engine.
+
+ Args:
+ callback: A function to be called when a data change is detected.
+
+ Returns:
+ ListenerRegistration: An object that can be used to stop the event listener.
+
+ Raises:
+ FirebaseError: If an error occurs while starting the initial HTTP connection.
+ """
+ return self._listen_with_session(callback)
+
+ def transaction(self, transaction_update):
+ """Atomically modifies the data at this location.
+
+ Unlike a normal ``set()``, which just overwrites the data regardless of its previous state,
+ ``transaction()`` is used to modify the existing value to a new value, ensuring there are
+ no conflicts with other clients simultaneously writing to the same location.
+
+ This is accomplished by passing an update function which is used to transform the current
+ value of this reference into a new value. If another client writes to this location before
+ the new value is successfully saved, the update function is called again with the new
+ current value, and the write will be retried. In case of repeated failures, this method
+ will retry the transaction up to 25 times before giving up and raising a
+ TransactionAbortedError. The update function may also force an early abort by raising an
+ exception instead of returning a value.
+
+ Args:
+ transaction_update: A function which will be passed the current data stored at this
+ location. The function should return the new value it would like written. If
+ an exception is raised, the transaction will be aborted, and the data at this
+ location will not be modified. The exceptions raised by this function are
+ propagated to the caller of the transaction method.
+
+ Returns:
+ object: New value of the current database Reference (only if the transaction commits).
+
+ Raises:
+ TransactionAbortedError: If the transaction aborts after exhausting all retry attempts.
+ ValueError: If transaction_update is not a function.
+ """
+ if not callable(transaction_update):
+ raise ValueError('transaction_update must be a function.')
+
+ tries = 0
+ data, etag = self.get(etag=True)
+ while tries < _TRANSACTION_MAX_RETRIES:
+ new_data = transaction_update(data)
+ success, data, etag = self.set_if_unchanged(etag, new_data)
+ if success:
+ return new_data
+ tries += 1
+
+ raise TransactionAbortedError('Transaction aborted after failed retries.')
+
+ def order_by_child(self, path):
+ """Returns a Query that orders data by child values.
+
+ Returned Query can be used to set additional parameters, and execute complex database
+ queries (e.g. limit queries, range queries).
+
+ Args:
+ path: Path to a valid child of the current Reference.
+
+ Returns:
+ Query: A database Query instance.
+
+ Raises:
+ ValueError: If the child path is not a string, not well-formed or None.
+ """
+ if path in _RESERVED_FILTERS:
+ raise ValueError('Illegal child path: {0}'.format(path))
+ return Query(order_by=path, client=self._client, pathurl=self._add_suffix())
+
+ def order_by_key(self):
+ """Creates a Query that orderes data by key.
+
+ Returned Query can be used to set additional parameters, and execute complex database
+ queries (e.g. limit queries, range queries).
+
+ Returns:
+ Query: A database Query instance.
+ """
+ return Query(order_by='$key', client=self._client, pathurl=self._add_suffix())
+
+ def order_by_value(self):
+ """Creates a Query that orderes data by value.
+
+ Returned Query can be used to set additional parameters, and execute complex database
+ queries (e.g. limit queries, range queries).
+
+ Returns:
+ Query: A database Query instance.
+ """
+ return Query(order_by='$value', client=self._client, pathurl=self._add_suffix())
+
+ def _add_suffix(self, suffix='.json'):
+ return self._pathurl + suffix
+
+ def _listen_with_session(self, callback, session=None):
+ url = self._client.base_url + self._add_suffix()
+ if not session:
+ session = self._client.create_listener_session()
+
+ try:
+ sse = _sseclient.SSEClient(url, session)
+ return ListenerRegistration(callback, sse)
+ except requests.exceptions.RequestException as error:
+ raise _Client.handle_rtdb_error(error)
+
+
+class Query:
+ """Represents a complex query that can be executed on a Reference.
+
+ Complex queries can consist of up to 2 components: a required ordering constraint, and an
+ optional filtering constraint. At the server, data is first sorted according to the given
+ ordering constraint (e.g. order by child). Then the filtering constraint (e.g. limit, range)
+ is applied on the sorted data to produce the final result. Despite the ordering constraint,
+ the final result is returned by the server as an unordered collection. Therefore the Query
+ interface performs another round of sorting at the client-side before returning the results
+ to the caller. This client-side sorted results are returned to the user as a Python
+ OrderedDict.
+ """
+
+ def __init__(self, **kwargs):
+ order_by = kwargs.pop('order_by')
+ if not order_by or not isinstance(order_by, str):
+ raise ValueError('order_by field must be a non-empty string')
+ if order_by not in _RESERVED_FILTERS:
+ if order_by.startswith('/'):
+ raise ValueError('Invalid path argument: "{0}". Child path must not start '
+ 'with "/"'.format(order_by))
+ segments = _parse_path(order_by)
+ order_by = '/'.join(segments)
+ self._client = kwargs.pop('client')
+ self._pathurl = kwargs.pop('pathurl')
+ self._order_by = order_by
+ self._params = {'orderBy' : json.dumps(order_by)}
+ if kwargs:
+ raise ValueError('Unexpected keyword arguments: {0}'.format(kwargs))
+
+ def limit_to_first(self, limit):
+ """Creates a query with limit, and anchors it to the start of the window.
+
+ Args:
+ limit: The maximum number of child nodes to return.
+
+ Returns:
+ Query: The updated Query instance.
+
+ Raises:
+ ValueError: If the value is not an integer, or set_limit_last() was called previously.
+ """
+ if not isinstance(limit, int) or limit < 0:
+ raise ValueError('Limit must be a non-negative integer.')
+ if 'limitToLast' in self._params:
+ raise ValueError('Cannot set both first and last limits.')
+ self._params['limitToFirst'] = limit
+ return self
+
+ def limit_to_last(self, limit):
+ """Creates a query with limit, and anchors it to the end of the window.
+
+ Args:
+ limit: The maximum number of child nodes to return.
+
+ Returns:
+ Query: The updated Query instance.
+
+ Raises:
+ ValueError: If the value is not an integer, or set_limit_first() was called previously.
+ """
+ if not isinstance(limit, int) or limit < 0:
+ raise ValueError('Limit must be a non-negative integer.')
+ if 'limitToFirst' in self._params:
+ raise ValueError('Cannot set both first and last limits.')
+ self._params['limitToLast'] = limit
+ return self
+
+ def start_at(self, start):
+ """Sets the lower bound for a range query.
+
+ The Query will only return child nodes with a value greater than or equal to the specified
+ value.
+
+ Args:
+ start: JSON-serializable value to start at, inclusive.
+
+ Returns:
+ Query: The updated Query instance.
+
+ Raises:
+ ValueError: If the value is ``None``.
+ """
+ if start is None:
+ raise ValueError('Start value must not be None.')
+ self._params['startAt'] = json.dumps(start)
+ return self
+
+ def end_at(self, end):
+ """Sets the upper bound for a range query.
+
+ The Query will only return child nodes with a value less than or equal to the specified
+ value.
+
+ Args:
+ end: JSON-serializable value to end at, inclusive.
+
+ Returns:
+ Query: The updated Query instance.
+
+ Raises:
+ ValueError: If the value is ``None``.
+ """
+ if end is None:
+ raise ValueError('End value must not be None.')
+ self._params['endAt'] = json.dumps(end)
+ return self
+
+ def equal_to(self, value):
+ """Sets an equals constraint on the Query.
+
+ The Query will only return child nodes whose value is equal to the specified value.
+
+ Args:
+ value: JSON-serializable value to query for.
+
+ Returns:
+ Query: The updated Query instance.
+
+ Raises:
+ ValueError: If the value is ``None``.
+ """
+ if value is None:
+ raise ValueError('Equal to value must not be None.')
+ self._params['equalTo'] = json.dumps(value)
+ return self
+
+ @property
+ def _querystr(self):
+ params = []
+ for key in sorted(self._params):
+ params.append('{0}={1}'.format(key, self._params[key]))
+ return '&'.join(params)
+
+ def get(self):
+ """Executes this Query and returns the results.
+
+ The results will be returned as a sorted list or an OrderedDict.
+
+ Returns:
+ object: Decoded JSON result of the Query.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the remote database server.
+ """
+ result = self._client.body('get', self._pathurl, params=self._querystr)
+ if isinstance(result, (dict, list)) and self._order_by != '$priority':
+ return _Sorter(result, self._order_by).get()
+ return result
+
+
+class TransactionAbortedError(exceptions.AbortedError):
+ """A transaction was aborted aftr exceeding the maximum number of retries."""
+
+ def __init__(self, message):
+ exceptions.AbortedError.__init__(self, message)
+
+
+class _Sorter:
+ """Helper class for sorting query results."""
+
+ def __init__(self, results, order_by):
+ if isinstance(results, dict):
+ self.dict_input = True
+ entries = [_SortEntry(k, v, order_by) for k, v in results.items()]
+ elif isinstance(results, list):
+ self.dict_input = False
+ entries = [_SortEntry(k, v, order_by) for k, v in enumerate(results)]
+ else:
+ raise ValueError('Sorting not supported for "{0}" object.'.format(type(results)))
+ self.sort_entries = sorted(entries)
+
+ def get(self):
+ if self.dict_input:
+ return collections.OrderedDict([(e.key, e.value) for e in self.sort_entries])
+
+ return [e.value for e in self.sort_entries]
+
+
+class _SortEntry:
+ """A wrapper that is capable of sorting items in a dictionary."""
+
+ _type_none = 0
+ _type_bool_false = 1
+ _type_bool_true = 2
+ _type_numeric = 3
+ _type_string = 4
+ _type_object = 5
+
+ def __init__(self, key, value, order_by):
+ self._key = key
+ self._value = value
+ if order_by in ('$key', '$priority'):
+ self._index = key
+ elif order_by == '$value':
+ self._index = value
+ else:
+ self._index = _SortEntry._extract_child(value, order_by)
+ self._index_type = _SortEntry._get_index_type(self._index)
+
+ @property
+ def key(self):
+ return self._key
+
+ @property
+ def index(self):
+ return self._index
+
+ @property
+ def index_type(self):
+ return self._index_type
+
+ @property
+ def value(self):
+ return self._value
+
+ @classmethod
+ def _get_index_type(cls, index):
+ """Assigns an integer code to the type of the index.
+
+ The index type determines how differently typed values are sorted. This ordering is based
+ on https://firebase.google.com/docs/database/rest/retrieve-data#section-rest-ordered-data
+ """
+ if index is None:
+ return cls._type_none
+ if isinstance(index, bool) and not index:
+ return cls._type_bool_false
+ if isinstance(index, bool) and index:
+ return cls._type_bool_true
+ if isinstance(index, (int, float)):
+ return cls._type_numeric
+ if isinstance(index, str):
+ return cls._type_string
+
+ return cls._type_object
+
+ @classmethod
+ def _extract_child(cls, value, path):
+ segments = path.split('/')
+ current = value
+ for segment in segments:
+ if isinstance(current, dict):
+ current = current.get(segment)
+ else:
+ return None
+ return current
+
+ def _compare(self, other):
+ """Compares two _SortEntry instances.
+
+ If the indices have the same numeric or string type, compare them directly. Ties are
+ broken by comparing the keys. If the indices have the same type, but are neither numeric
+ nor string, compare the keys. In all other cases compare based on the ordering provided
+ by index types.
+ """
+ self_key, other_key = self.index_type, other.index_type
+ if self_key == other_key:
+ if self_key in (self._type_numeric, self._type_string) and self.index != other.index:
+ self_key, other_key = self.index, other.index
+ else:
+ self_key, other_key = self.key, other.key
+
+ if self_key < other_key:
+ return -1
+ if self_key > other_key:
+ return 1
+
+ return 0
+
+ def __lt__(self, other):
+ return self._compare(other) < 0
+
+ def __le__(self, other):
+ return self._compare(other) <= 0
+
+ def __gt__(self, other):
+ return self._compare(other) > 0
+
+ def __ge__(self, other):
+ return self._compare(other) >= 0
+
+ def __eq__(self, other):
+ return self._compare(other) == 0
+
+
+class _DatabaseService:
+ """Service that maintains a collection of database clients."""
+
+ _DEFAULT_AUTH_OVERRIDE = '_admin_'
+
+ def __init__(self, app):
+ self._credential = app.credential
+ db_url = app.options.get('databaseURL')
+ if db_url:
+ _DatabaseService._parse_db_url(db_url) # Just for validation.
+ self._db_url = db_url
+ else:
+ self._db_url = None
+ auth_override = _DatabaseService._get_auth_override(app)
+ if auth_override not in (self._DEFAULT_AUTH_OVERRIDE, {}):
+ self._auth_override = json.dumps(auth_override, separators=(',', ':'))
+ else:
+ self._auth_override = None
+ self._timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS)
+ self._clients = {}
+
+ emulator_host = os.environ.get(_EMULATOR_HOST_ENV_VAR)
+ if emulator_host:
+ if '//' in emulator_host:
+ raise ValueError(
+ 'Invalid {0}: "{1}". It must follow format "host:port".'.format(
+ _EMULATOR_HOST_ENV_VAR, emulator_host))
+ self._emulator_host = emulator_host
+ else:
+ self._emulator_host = None
+
+ def get_client(self, db_url=None):
+ """Creates a client based on the db_url. Clients may be cached."""
+ if db_url is None:
+ db_url = self._db_url
+
+ base_url, namespace = _DatabaseService._parse_db_url(db_url, self._emulator_host)
+ if base_url == 'https://{0}.firebaseio.com'.format(namespace):
+ # Production base_url. No need to specify namespace in query params.
+ params = {}
+ credential = self._credential.get_credential()
+ else:
+ # Emulator base_url. Use fake credentials and specify ?ns=foo in query params.
+ credential = _EmulatorAdminCredentials()
+ params = {'ns': namespace}
+ if self._auth_override:
+ params['auth_variable_override'] = self._auth_override
+
+ client_cache_key = (base_url, json.dumps(params, sort_keys=True))
+ if client_cache_key not in self._clients:
+ client = _Client(credential, base_url, self._timeout, params)
+ self._clients[client_cache_key] = client
+ return self._clients[client_cache_key]
+
+ @classmethod
+ def _parse_db_url(cls, url, emulator_host=None):
+ """Parses (base_url, namespace) from a database URL.
+
+ The input can be either a production URL (https://foo-bar.firebaseio.com/)
+ or an Emulator URL (http://localhost:8080/?ns=foo-bar). In case of Emulator
+ URL, the namespace is extracted from the query param ns. The resulting
+ base_url never includes query params.
+
+ If url is a production URL and emulator_host is specified, the result
+ base URL will use emulator_host instead. emulator_host is ignored
+ if url is already an emulator URL.
+ """
+ if not url or not isinstance(url, str):
+ raise ValueError(
+ 'Invalid database URL: "{0}". Database URL must be a non-empty '
+ 'URL string.'.format(url))
+ parsed_url = parse.urlparse(url)
+ if parsed_url.netloc.endswith('.firebaseio.com'):
+ return cls._parse_production_url(parsed_url, emulator_host)
+
+ return cls._parse_emulator_url(parsed_url)
+
+ @classmethod
+ def _parse_production_url(cls, parsed_url, emulator_host):
+ """Parses production URL like https://foo-bar.firebaseio.com/"""
+ if parsed_url.scheme != 'https':
+ raise ValueError(
+ 'Invalid database URL scheme: "{0}". Database URL must be an HTTPS URL.'.format(
+ parsed_url.scheme))
+ namespace = parsed_url.netloc.split('.')[0]
+ if not namespace:
+ raise ValueError(
+ 'Invalid database URL: "{0}". Database URL must be a valid URL to a '
+ 'Firebase Realtime Database instance.'.format(parsed_url.geturl()))
+
+ if emulator_host:
+ base_url = 'http://{0}'.format(emulator_host)
+ else:
+ base_url = 'https://{0}'.format(parsed_url.netloc)
+ return base_url, namespace
+
+ @classmethod
+ def _parse_emulator_url(cls, parsed_url):
+ """Parses emulator URL like http://localhost:8080/?ns=foo-bar"""
+ query_ns = parse.parse_qs(parsed_url.query).get('ns')
+ if parsed_url.scheme != 'http' or (not query_ns or len(query_ns) != 1 or not query_ns[0]):
+ raise ValueError(
+ 'Invalid database URL: "{0}". Database URL must be a valid URL to a '
+ 'Firebase Realtime Database instance.'.format(parsed_url.geturl()))
+
+ namespace = query_ns[0]
+ base_url = '{0}://{1}'.format(parsed_url.scheme, parsed_url.netloc)
+ return base_url, namespace
+
+ @classmethod
+ def _get_auth_override(cls, app):
+ auth_override = app.options.get('databaseAuthVariableOverride', cls._DEFAULT_AUTH_OVERRIDE)
+ if auth_override == cls._DEFAULT_AUTH_OVERRIDE or auth_override is None:
+ return auth_override
+ if not isinstance(auth_override, dict):
+ raise ValueError('Invalid databaseAuthVariableOverride option: "{0}". Override '
+ 'value must be a dict or None.'.format(auth_override))
+
+ return auth_override
+
+ def close(self):
+ for value in self._clients.values():
+ value.close()
+ self._clients = {}
+
+
+class _Client(_http_client.JsonHttpClient):
+ """HTTP client used to make REST calls.
+
+ _Client maintains an HTTP session, and handles authenticating HTTP requests along with
+ marshalling and unmarshalling of JSON data.
+ """
+
+ def __init__(self, credential, base_url, timeout, params=None):
+ """Creates a new _Client from the given parameters.
+
+ This exists primarily to enable testing. For regular use, obtain _Client instances by
+ calling the from_app() class method.
+
+ Args:
+ credential: A Google credential that can be used to authenticate requests.
+ base_url: A URL prefix to be added to all outgoing requests. This is typically the
+ Firebase Realtime Database URL.
+ timeout: HTTP request timeout in seconds. If set to None connections will never
+ timeout, which is the default behavior of the underlying requests library.
+ params: Dict of query parameters to add to all outgoing requests.
+ """
+ super().__init__(
+ credential=credential, base_url=base_url,
+ timeout=timeout, headers={'User-Agent': _USER_AGENT})
+ self.credential = credential
+ self.params = params if params else {}
+
+ def request(self, method, url, **kwargs):
+ """Makes an HTTP call using the Python requests library.
+
+ Extends the request() method of the parent JsonHttpClient class. Handles default
+ params like auth overrides, and low-level exceptions.
+
+ Args:
+ method: HTTP method name as a string (e.g. get, post).
+ url: URL path of the remote endpoint. This will be appended to the server's base URL.
+ kwargs: An additional set of keyword arguments to be passed into requests API
+ (e.g. json, params).
+
+ Returns:
+ Response: An HTTP response object.
+
+ Raises:
+ FirebaseError: If an error occurs while making the HTTP call.
+ """
+ query = '&'.join('{0}={1}'.format(key, self.params[key]) for key in self.params)
+ extra_params = kwargs.get('params')
+ if extra_params:
+ if query:
+ query = extra_params + '&' + query
+ else:
+ query = extra_params
+ kwargs['params'] = query
+
+ try:
+ return super(_Client, self).request(method, url, **kwargs)
+ except requests.exceptions.RequestException as error:
+ raise _Client.handle_rtdb_error(error)
+
+ def create_listener_session(self):
+ return _sseclient.KeepAuthSession(self.credential)
+
+ @classmethod
+ def handle_rtdb_error(cls, error):
+ """Converts an error encountered while calling RTDB into a FirebaseError."""
+ if error.response is None:
+ return _utils.handle_requests_error(error)
+
+ message = cls._extract_error_message(error.response)
+ return _utils.handle_requests_error(error, message=message)
+
+ @classmethod
+ def _extract_error_message(cls, response):
+ """Extracts an error message from an error response.
+
+ If the server has sent a JSON response with an 'error' field, which is the typical
+ behavior of the Realtime Database REST API, parses the response to retrieve the error
+ message. If the server has sent a non-JSON response, returns the full response
+ as the error message.
+ """
+ message = None
+ try:
+ # RTDB error format: {"error": "text message"}
+ data = response.json()
+ if isinstance(data, dict):
+ message = data.get('error')
+ except ValueError:
+ pass
+
+ if not message:
+ message = 'Unexpected response from database: {0}'.format(response.content.decode())
+
+ return message
+
+# Temporarily disable the lint rule. For more information see:
+# https://github.com/googleapis/google-auth-library-python/pull/561
+# pylint: disable=abstract-method
+class _EmulatorAdminCredentials(google.auth.credentials.Credentials):
+ def __init__(self):
+ google.auth.credentials.Credentials.__init__(self)
+ self.token = 'owner'
+
+ def refresh(self, request):
+ pass
diff --git a/venv/Lib/site-packages/firebase_admin/exceptions.py b/venv/Lib/site-packages/firebase_admin/exceptions.py
new file mode 100644
index 000000000..06504225f
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/exceptions.py
@@ -0,0 +1,237 @@
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Exceptions module.
+
+This module defines the base types for exceptions and the platform-wide error codes as outlined in
+https://cloud.google.com/apis/design/errors.
+
+:class:`FirebaseError` is the parent class of all exceptions raised by the Admin SDK. It contains
+the ``code``, ``http_response`` and ``cause`` properties common to all Firebase exception types.
+Each exception also carries a message that outlines what went wrong. This can be logged for
+audit or debugging purposes.
+
+When calling an Admin SDK API, developers can catch the parent ``FirebaseError`` and
+inspect its ``code`` to implement fine-grained error handling. Alternatively, developers can
+catch one or more subtypes of ``FirebaseError``. Under normal conditions, any given API can raise
+only a small subset of the available exception subtypes. However, the SDK also exposes rare error
+conditions like connection timeouts and other I/O errors as instances of ``FirebaseError``.
+Therefore it is always a good idea to have a handler specified for ``FirebaseError``, after all the
+subtype error handlers.
+"""
+
+
+#: Error code for ``InvalidArgumentError`` type.
+INVALID_ARGUMENT = 'INVALID_ARGUMENT'
+
+#: Error code for ``FailedPreconditionError`` type.
+FAILED_PRECONDITION = 'FAILED_PRECONDITION'
+
+#: Error code for ``OutOfRangeError`` type.
+OUT_OF_RANGE = 'OUT_OF_RANGE'
+
+#: Error code for ``UnauthenticatedError`` type.
+UNAUTHENTICATED = 'UNAUTHENTICATED'
+
+#: Error code for ``PermissionDeniedError`` type.
+PERMISSION_DENIED = 'PERMISSION_DENIED'
+
+#: Error code for ``NotFoundError`` type.
+NOT_FOUND = 'NOT_FOUND'
+
+#: Error code for ``ConflictError`` type.
+CONFLICT = 'CONFLICT'
+
+#: Error code for ``AbortedError`` type.
+ABORTED = 'ABORTED'
+
+#: Error code for ``AlreadyExistsError`` type.
+ALREADY_EXISTS = 'ALREADY_EXISTS'
+
+#: Error code for ``ResourceExhaustedError`` type.
+RESOURCE_EXHAUSTED = 'RESOURCE_EXHAUSTED'
+
+#: Error code for ``CancelledError`` type.
+CANCELLED = 'CANCELLED'
+
+#: Error code for ``DataLossError`` type.
+DATA_LOSS = 'DATA_LOSS'
+
+#: Error code for ``UnknownError`` type.
+UNKNOWN = 'UNKNOWN'
+
+#: Error code for ``InternalError`` type.
+INTERNAL = 'INTERNAL'
+
+#: Error code for ``UnavailableError`` type.
+UNAVAILABLE = 'UNAVAILABLE'
+
+#: Error code for ``DeadlineExceededError`` type.
+DEADLINE_EXCEEDED = 'DEADLINE_EXCEEDED'
+
+
+class FirebaseError(Exception):
+ """Base class for all errors raised by the Admin SDK.
+
+ Args:
+ code: A string error code that represents the type of the exception. Possible error
+ codes are defined in https://cloud.google.com/apis/design/errors#handling_errors.
+ message: A human-readable error message string.
+ cause: The exception that caused this error (optional).
+ http_response: If this error was caused by an HTTP error response, this property is
+ set to the ``requests.Response`` object that represents the HTTP response (optional).
+ See https://2.python-requests.org/en/master/api/#requests.Response for details of
+ this object.
+ """
+
+ def __init__(self, code, message, cause=None, http_response=None):
+ Exception.__init__(self, message)
+ self._code = code
+ self._cause = cause
+ self._http_response = http_response
+
+ @property
+ def code(self):
+ return self._code
+
+ @property
+ def cause(self):
+ return self._cause
+
+ @property
+ def http_response(self):
+ return self._http_response
+
+
+class InvalidArgumentError(FirebaseError):
+ """Client specified an invalid argument."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, INVALID_ARGUMENT, message, cause, http_response)
+
+
+class FailedPreconditionError(FirebaseError):
+ """Request can not be executed in the current system state, such as deleting a non-empty
+ directory."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, FAILED_PRECONDITION, message, cause, http_response)
+
+
+class OutOfRangeError(FirebaseError):
+ """Client specified an invalid range."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, OUT_OF_RANGE, message, cause, http_response)
+
+
+class UnauthenticatedError(FirebaseError):
+ """Request not authenticated due to missing, invalid, or expired OAuth token."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, UNAUTHENTICATED, message, cause, http_response)
+
+
+class PermissionDeniedError(FirebaseError):
+ """Client does not have sufficient permission.
+
+ This can happen because the OAuth token does not have the right scopes, the client doesn't
+ have permission, or the API has not been enabled for the client project.
+ """
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, PERMISSION_DENIED, message, cause, http_response)
+
+
+class NotFoundError(FirebaseError):
+ """A specified resource is not found, or the request is rejected by undisclosed reasons, such
+ as whitelisting."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, NOT_FOUND, message, cause, http_response)
+
+
+class ConflictError(FirebaseError):
+ """Concurrency conflict, such as read-modify-write conflict."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, CONFLICT, message, cause, http_response)
+
+
+class AbortedError(FirebaseError):
+ """Concurrency conflict, such as read-modify-write conflict."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, ABORTED, message, cause, http_response)
+
+
+class AlreadyExistsError(FirebaseError):
+ """The resource that a client tried to create already exists."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, ALREADY_EXISTS, message, cause, http_response)
+
+
+class ResourceExhaustedError(FirebaseError):
+ """Either out of resource quota or reaching rate limiting."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, RESOURCE_EXHAUSTED, message, cause, http_response)
+
+
+class CancelledError(FirebaseError):
+ """Request cancelled by the client."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, CANCELLED, message, cause, http_response)
+
+
+class DataLossError(FirebaseError):
+ """Unrecoverable data loss or data corruption."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, DATA_LOSS, message, cause, http_response)
+
+
+class UnknownError(FirebaseError):
+ """Unknown server error."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, UNKNOWN, message, cause, http_response)
+
+
+class InternalError(FirebaseError):
+ """Internal server error."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, INTERNAL, message, cause, http_response)
+
+
+class UnavailableError(FirebaseError):
+ """Service unavailable. Typically the server is down."""
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, UNAVAILABLE, message, cause, http_response)
+
+
+class DeadlineExceededError(FirebaseError):
+ """Request deadline exceeded.
+
+ This will happen only if the caller sets a deadline that is shorter than the method's
+ default deadline (i.e. requested deadline is not enough for the server to process the
+ request) and the request did not finish within the deadline.
+ """
+
+ def __init__(self, message, cause=None, http_response=None):
+ FirebaseError.__init__(self, DEADLINE_EXCEEDED, message, cause, http_response)
diff --git a/venv/Lib/site-packages/firebase_admin/firestore.py b/venv/Lib/site-packages/firebase_admin/firestore.py
new file mode 100644
index 000000000..32c9897d5
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/firestore.py
@@ -0,0 +1,76 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Cloud Firestore module.
+
+This module contains utilities for accessing the Google Cloud Firestore databases associated with
+Firebase apps. This requires the ``google-cloud-firestore`` Python module.
+"""
+
+try:
+ from google.cloud import firestore # pylint: disable=import-error,no-name-in-module
+ existing = globals().keys()
+ for key, value in firestore.__dict__.items():
+ if not key.startswith('_') and key not in existing:
+ globals()[key] = value
+except ImportError:
+ raise ImportError('Failed to import the Cloud Firestore library for Python. Make sure '
+ 'to install the "google-cloud-firestore" module.')
+
+from firebase_admin import _utils
+
+
+_FIRESTORE_ATTRIBUTE = '_firestore'
+
+
+def client(app=None):
+ """Returns a client that can be used to interact with Google Cloud Firestore.
+
+ Args:
+ app: An App instance (optional).
+
+ Returns:
+ google.cloud.firestore.Firestore: A `Firestore Client`_.
+
+ Raises:
+ ValueError: If a project ID is not specified either via options, credentials or
+ environment variables, or if the specified project ID is not a valid string.
+
+ .. _Firestore Client: https://googlecloudplatform.github.io/google-cloud-python/latest\
+ /firestore/client.html
+ """
+ fs_client = _utils.get_app_service(app, _FIRESTORE_ATTRIBUTE, _FirestoreClient.from_app)
+ return fs_client.get()
+
+
+class _FirestoreClient:
+ """Holds a Google Cloud Firestore client instance."""
+
+ def __init__(self, credentials, project):
+ self._client = firestore.Client(credentials=credentials, project=project)
+
+ def get(self):
+ return self._client
+
+ @classmethod
+ def from_app(cls, app):
+ """Creates a new _FirestoreClient for the specified app."""
+ credentials = app.credential.get_credential()
+ project = app.project_id
+ if not project:
+ raise ValueError(
+ 'Project ID is required to access Firestore. Either set the projectId option, '
+ 'or use service account credentials. Alternatively, set the GOOGLE_CLOUD_PROJECT '
+ 'environment variable.')
+ return _FirestoreClient(credentials, project)
diff --git a/venv/Lib/site-packages/firebase_admin/instance_id.py b/venv/Lib/site-packages/firebase_admin/instance_id.py
new file mode 100644
index 000000000..604158d9c
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/instance_id.py
@@ -0,0 +1,99 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Instance ID module.
+
+This module enables deleting instance IDs associated with Firebase projects.
+"""
+
+import requests
+
+from firebase_admin import _http_client
+from firebase_admin import _utils
+
+
+_IID_SERVICE_URL = 'https://console.firebase.google.com/v1/'
+_IID_ATTRIBUTE = '_iid'
+
+
+def _get_iid_service(app):
+ return _utils.get_app_service(app, _IID_ATTRIBUTE, _InstanceIdService)
+
+
+def delete_instance_id(instance_id, app=None):
+ """Deletes the specified instance ID and the associated data from Firebase.
+
+ Note that Google Analytics for Firebase uses its own form of Instance ID to
+ keep track of analytics data. Therefore deleting a regular Instance ID does
+ not delete Analytics data. See `Delete an Instance ID`_ for more information.
+
+ Args:
+ instance_id: A non-empty instance ID string.
+ app: An App instance (optional).
+
+ Raises:
+ InstanceIdError: If an error occurs while invoking the backend instance ID service.
+ ValueError: If the specified instance ID or app is invalid.
+
+ .. _Delete an Instance ID: https://firebase.google.com/support/privacy\
+ /manage-iids#delete_an_instance_id
+ """
+ _get_iid_service(app).delete_instance_id(instance_id)
+
+
+class _InstanceIdService:
+ """Provides methods for interacting with the remote instance ID service."""
+
+ error_codes = {
+ 400: 'Malformed instance ID argument.',
+ 401: 'Request not authorized.',
+ 403: 'Project does not match instance ID or the client does not have '
+ 'sufficient privileges.',
+ 404: 'Failed to find the instance ID.',
+ 409: 'Already deleted.',
+ 429: 'Request throttled out by the backend server.',
+ 500: 'Internal server error.',
+ 503: 'Backend servers are over capacity. Try again later.'
+ }
+
+ def __init__(self, app):
+ project_id = app.project_id
+ if not project_id:
+ raise ValueError(
+ 'Project ID is required to access Instance ID service. Either set the projectId '
+ 'option, or use service account credentials. Alternatively, set the '
+ 'GOOGLE_CLOUD_PROJECT environment variable.')
+ self._project_id = project_id
+ self._client = _http_client.JsonHttpClient(
+ credential=app.credential.get_credential(), base_url=_IID_SERVICE_URL)
+
+ def delete_instance_id(self, instance_id):
+ if not isinstance(instance_id, str) or not instance_id:
+ raise ValueError('Instance ID must be a non-empty string.')
+ path = 'project/{0}/instanceId/{1}'.format(self._project_id, instance_id)
+ try:
+ self._client.request('delete', path)
+ except requests.exceptions.RequestException as error:
+ msg = self._extract_message(instance_id, error)
+ raise _utils.handle_requests_error(error, msg)
+
+ def _extract_message(self, instance_id, error):
+ if error.response is None:
+ return None
+ status = error.response.status_code
+ msg = self.error_codes.get(status)
+ if msg:
+ return 'Instance ID "{0}": {1}'.format(instance_id, msg)
+
+ return 'Instance ID "{0}": {1}'.format(instance_id, error)
diff --git a/venv/Lib/site-packages/firebase_admin/messaging.py b/venv/Lib/site-packages/firebase_admin/messaging.py
new file mode 100644
index 000000000..217cf0a56
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/messaging.py
@@ -0,0 +1,495 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Cloud Messaging module."""
+
+import json
+
+import googleapiclient
+from googleapiclient import http
+from googleapiclient import _auth
+import requests
+
+import firebase_admin
+from firebase_admin import _http_client
+from firebase_admin import _messaging_encoder
+from firebase_admin import _messaging_utils
+from firebase_admin import _utils
+
+
+_MESSAGING_ATTRIBUTE = '_messaging'
+
+
+__all__ = [
+ 'AndroidConfig',
+ 'AndroidFCMOptions',
+ 'AndroidNotification',
+ 'APNSConfig',
+ 'APNSFCMOptions',
+ 'APNSPayload',
+ 'Aps',
+ 'ApsAlert',
+ 'BatchResponse',
+ 'CriticalSound',
+ 'ErrorInfo',
+ 'FCMOptions',
+ 'LightSettings',
+ 'Message',
+ 'MulticastMessage',
+ 'Notification',
+ 'QuotaExceededError',
+ 'SenderIdMismatchError',
+ 'SendResponse',
+ 'ThirdPartyAuthError',
+ 'TopicManagementResponse',
+ 'UnregisteredError',
+ 'WebpushConfig',
+ 'WebpushFCMOptions',
+ 'WebpushNotification',
+ 'WebpushNotificationAction',
+
+ 'send',
+ 'send_all',
+ 'send_multicast',
+ 'subscribe_to_topic',
+ 'unsubscribe_from_topic',
+]
+
+
+AndroidConfig = _messaging_utils.AndroidConfig
+AndroidFCMOptions = _messaging_utils.AndroidFCMOptions
+AndroidNotification = _messaging_utils.AndroidNotification
+APNSConfig = _messaging_utils.APNSConfig
+APNSFCMOptions = _messaging_utils.APNSFCMOptions
+APNSPayload = _messaging_utils.APNSPayload
+Aps = _messaging_utils.Aps
+ApsAlert = _messaging_utils.ApsAlert
+CriticalSound = _messaging_utils.CriticalSound
+FCMOptions = _messaging_utils.FCMOptions
+LightSettings = _messaging_utils.LightSettings
+Message = _messaging_encoder.Message
+MulticastMessage = _messaging_encoder.MulticastMessage
+Notification = _messaging_utils.Notification
+WebpushConfig = _messaging_utils.WebpushConfig
+WebpushFCMOptions = _messaging_utils.WebpushFCMOptions
+WebpushNotification = _messaging_utils.WebpushNotification
+WebpushNotificationAction = _messaging_utils.WebpushNotificationAction
+
+QuotaExceededError = _messaging_utils.QuotaExceededError
+SenderIdMismatchError = _messaging_utils.SenderIdMismatchError
+ThirdPartyAuthError = _messaging_utils.ThirdPartyAuthError
+UnregisteredError = _messaging_utils.UnregisteredError
+
+
+def _get_messaging_service(app):
+ return _utils.get_app_service(app, _MESSAGING_ATTRIBUTE, _MessagingService)
+
+def send(message, dry_run=False, app=None):
+ """Sends the given message via Firebase Cloud Messaging (FCM).
+
+ If the ``dry_run`` mode is enabled, the message will not be actually delivered to the
+ recipients. Instead FCM performs all the usual validations, and emulates the send operation.
+
+ Args:
+ message: An instance of ``messaging.Message``.
+ dry_run: A boolean indicating whether to run the operation in dry run mode (optional).
+ app: An App instance (optional).
+
+ Returns:
+ string: A message ID string that uniquely identifies the sent the message.
+
+ Raises:
+ FirebaseError: If an error occurs while sending the message to the FCM service.
+ ValueError: If the input arguments are invalid.
+ """
+ return _get_messaging_service(app).send(message, dry_run)
+
+def send_all(messages, dry_run=False, app=None):
+ """Sends the given list of messages via Firebase Cloud Messaging as a single batch.
+
+ If the ``dry_run`` mode is enabled, the message will not be actually delivered to the
+ recipients. Instead FCM performs all the usual validations, and emulates the send operation.
+
+ Args:
+ messages: A list of ``messaging.Message`` instances.
+ dry_run: A boolean indicating whether to run the operation in dry run mode (optional).
+ app: An App instance (optional).
+
+ Returns:
+ BatchResponse: A ``messaging.BatchResponse`` instance.
+
+ Raises:
+ FirebaseError: If an error occurs while sending the message to the FCM service.
+ ValueError: If the input arguments are invalid.
+ """
+ return _get_messaging_service(app).send_all(messages, dry_run)
+
+def send_multicast(multicast_message, dry_run=False, app=None):
+ """Sends the given mutlicast message to all tokens via Firebase Cloud Messaging (FCM).
+
+ If the ``dry_run`` mode is enabled, the message will not be actually delivered to the
+ recipients. Instead FCM performs all the usual validations, and emulates the send operation.
+
+ Args:
+ multicast_message: An instance of ``messaging.MulticastMessage``.
+ dry_run: A boolean indicating whether to run the operation in dry run mode (optional).
+ app: An App instance (optional).
+
+ Returns:
+ BatchResponse: A ``messaging.BatchResponse`` instance.
+
+ Raises:
+ FirebaseError: If an error occurs while sending the message to the FCM service.
+ ValueError: If the input arguments are invalid.
+ """
+ if not isinstance(multicast_message, MulticastMessage):
+ raise ValueError('Message must be an instance of messaging.MulticastMessage class.')
+ messages = [Message(
+ data=multicast_message.data,
+ notification=multicast_message.notification,
+ android=multicast_message.android,
+ webpush=multicast_message.webpush,
+ apns=multicast_message.apns,
+ fcm_options=multicast_message.fcm_options,
+ token=token
+ ) for token in multicast_message.tokens]
+ return _get_messaging_service(app).send_all(messages, dry_run)
+
+def subscribe_to_topic(tokens, topic, app=None):
+ """Subscribes a list of registration tokens to an FCM topic.
+
+ Args:
+ tokens: A non-empty list of device registration tokens. List may not have more than 1000
+ elements.
+ topic: Name of the topic to subscribe to. May contain the ``/topics/`` prefix.
+ app: An App instance (optional).
+
+ Returns:
+ TopicManagementResponse: A ``TopicManagementResponse`` instance.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with instance ID service.
+ ValueError: If the input arguments are invalid.
+ """
+ return _get_messaging_service(app).make_topic_management_request(
+ tokens, topic, 'iid/v1:batchAdd')
+
+def unsubscribe_from_topic(tokens, topic, app=None):
+ """Unsubscribes a list of registration tokens from an FCM topic.
+
+ Args:
+ tokens: A non-empty list of device registration tokens. List may not have more than 1000
+ elements.
+ topic: Name of the topic to unsubscribe from. May contain the ``/topics/`` prefix.
+ app: An App instance (optional).
+
+ Returns:
+ TopicManagementResponse: A ``TopicManagementResponse`` instance.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with instance ID service.
+ ValueError: If the input arguments are invalid.
+ """
+ return _get_messaging_service(app).make_topic_management_request(
+ tokens, topic, 'iid/v1:batchRemove')
+
+
+class ErrorInfo:
+ """An error encountered when performing a topic management operation."""
+
+ def __init__(self, index, reason):
+ self._index = index
+ self._reason = reason
+
+ @property
+ def index(self):
+ """Index of the registration token to which this error is related to."""
+ return self._index
+
+ @property
+ def reason(self):
+ """String describing the nature of the error."""
+ return self._reason
+
+
+class TopicManagementResponse:
+ """The response received from a topic management operation."""
+
+ def __init__(self, resp):
+ if not isinstance(resp, dict) or 'results' not in resp:
+ raise ValueError('Unexpected topic management response: {0}.'.format(resp))
+ self._success_count = 0
+ self._failure_count = 0
+ self._errors = []
+ for index, result in enumerate(resp['results']):
+ if 'error' in result:
+ self._failure_count += 1
+ self._errors.append(ErrorInfo(index, result['error']))
+ else:
+ self._success_count += 1
+
+ @property
+ def success_count(self):
+ """Number of tokens that were successfully subscribed or unsubscribed."""
+ return self._success_count
+
+ @property
+ def failure_count(self):
+ """Number of tokens that could not be subscribed or unsubscribed due to errors."""
+ return self._failure_count
+
+ @property
+ def errors(self):
+ """A list of ``messaging.ErrorInfo`` objects (possibly empty)."""
+ return self._errors
+
+
+class BatchResponse:
+ """The response received from a batch request to the FCM API."""
+
+ def __init__(self, responses):
+ self._responses = responses
+ self._success_count = len([resp for resp in responses if resp.success])
+
+ @property
+ def responses(self):
+ """A list of ``messaging.SendResponse`` objects (possibly empty)."""
+ return self._responses
+
+ @property
+ def success_count(self):
+ return self._success_count
+
+ @property
+ def failure_count(self):
+ return len(self.responses) - self.success_count
+
+
+class SendResponse:
+ """The response received from an individual batched request to the FCM API."""
+
+ def __init__(self, resp, exception):
+ self._exception = exception
+ self._message_id = None
+ if resp:
+ self._message_id = resp.get('name', None)
+
+ @property
+ def message_id(self):
+ """A message ID string that uniquely identifies the message."""
+ return self._message_id
+
+ @property
+ def success(self):
+ """A boolean indicating if the request was successful."""
+ return self._message_id is not None and not self._exception
+
+ @property
+ def exception(self):
+ """A ``FirebaseError`` if an error occurs while sending the message to the FCM service."""
+ return self._exception
+
+
+class _MessagingService:
+ """Service class that implements Firebase Cloud Messaging (FCM) functionality."""
+
+ FCM_URL = 'https://fcm.googleapis.com/v1/projects/{0}/messages:send'
+ FCM_BATCH_URL = 'https://fcm.googleapis.com/batch'
+ IID_URL = 'https://iid.googleapis.com'
+ IID_HEADERS = {'access_token_auth': 'true'}
+ JSON_ENCODER = _messaging_encoder.MessageEncoder()
+
+ FCM_ERROR_TYPES = {
+ 'APNS_AUTH_ERROR': ThirdPartyAuthError,
+ 'QUOTA_EXCEEDED': QuotaExceededError,
+ 'SENDER_ID_MISMATCH': SenderIdMismatchError,
+ 'THIRD_PARTY_AUTH_ERROR': ThirdPartyAuthError,
+ 'UNREGISTERED': UnregisteredError,
+ }
+
+ def __init__(self, app):
+ project_id = app.project_id
+ if not project_id:
+ raise ValueError(
+ 'Project ID is required to access Cloud Messaging service. Either set the '
+ 'projectId option, or use service account credentials. Alternatively, set the '
+ 'GOOGLE_CLOUD_PROJECT environment variable.')
+ self._fcm_url = _MessagingService.FCM_URL.format(project_id)
+ self._fcm_headers = {
+ 'X-GOOG-API-FORMAT-VERSION': '2',
+ 'X-FIREBASE-CLIENT': 'fire-admin-python/{0}'.format(firebase_admin.__version__),
+ }
+ timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS)
+ self._client = _http_client.JsonHttpClient(
+ credential=app.credential.get_credential(), timeout=timeout)
+ self._transport = _auth.authorized_http(app.credential.get_credential())
+
+ @classmethod
+ def encode_message(cls, message):
+ if not isinstance(message, Message):
+ raise ValueError('Message must be an instance of messaging.Message class.')
+ return cls.JSON_ENCODER.default(message)
+
+ def send(self, message, dry_run=False):
+ """Sends the given message to FCM via the FCM v1 API."""
+ data = self._message_data(message, dry_run)
+ try:
+ resp = self._client.body(
+ 'post',
+ url=self._fcm_url,
+ headers=self._fcm_headers,
+ json=data
+ )
+ except requests.exceptions.RequestException as error:
+ raise self._handle_fcm_error(error)
+ else:
+ return resp['name']
+
+ def send_all(self, messages, dry_run=False):
+ """Sends the given messages to FCM via the batch API."""
+ if not isinstance(messages, list):
+ raise ValueError('messages must be a list of messaging.Message instances.')
+ if len(messages) > 500:
+ raise ValueError('messages must not contain more than 500 elements.')
+
+ responses = []
+
+ def batch_callback(_, response, error):
+ exception = None
+ if error:
+ exception = self._handle_batch_error(error)
+ send_response = SendResponse(response, exception)
+ responses.append(send_response)
+
+ batch = http.BatchHttpRequest(
+ callback=batch_callback, batch_uri=_MessagingService.FCM_BATCH_URL)
+ for message in messages:
+ body = json.dumps(self._message_data(message, dry_run))
+ req = http.HttpRequest(
+ http=self._transport,
+ postproc=self._postproc,
+ uri=self._fcm_url,
+ method='POST',
+ body=body,
+ headers=self._fcm_headers
+ )
+ batch.add(req)
+
+ try:
+ batch.execute()
+ except googleapiclient.http.HttpError as error:
+ raise self._handle_batch_error(error)
+ else:
+ return BatchResponse(responses)
+
+ def make_topic_management_request(self, tokens, topic, operation):
+ """Invokes the IID service for topic management functionality."""
+ if isinstance(tokens, str):
+ tokens = [tokens]
+ if not isinstance(tokens, list) or not tokens:
+ raise ValueError('Tokens must be a string or a non-empty list of strings.')
+ invalid_str = [t for t in tokens if not isinstance(t, str) or not t]
+ if invalid_str:
+ raise ValueError('Tokens must be non-empty strings.')
+
+ if not isinstance(topic, str) or not topic:
+ raise ValueError('Topic must be a non-empty string.')
+ if not topic.startswith('/topics/'):
+ topic = '/topics/{0}'.format(topic)
+ data = {
+ 'to': topic,
+ 'registration_tokens': tokens,
+ }
+ url = '{0}/{1}'.format(_MessagingService.IID_URL, operation)
+ try:
+ resp = self._client.body(
+ 'post',
+ url=url,
+ json=data,
+ headers=_MessagingService.IID_HEADERS
+ )
+ except requests.exceptions.RequestException as error:
+ raise self._handle_iid_error(error)
+ else:
+ return TopicManagementResponse(resp)
+
+ def _message_data(self, message, dry_run):
+ data = {'message': _MessagingService.encode_message(message)}
+ if dry_run:
+ data['validate_only'] = True
+ return data
+
+ def _postproc(self, _, body):
+ """Handle response from batch API request."""
+ # This only gets called for 2xx responses.
+ return json.loads(body.decode())
+
+ def _handle_fcm_error(self, error):
+ """Handles errors received from the FCM API."""
+ return _utils.handle_platform_error_from_requests(
+ error, _MessagingService._build_fcm_error_requests)
+
+ def _handle_iid_error(self, error):
+ """Handles errors received from the Instance ID API."""
+ if error.response is None:
+ raise _utils.handle_requests_error(error)
+
+ data = {}
+ try:
+ parsed_body = error.response.json()
+ if isinstance(parsed_body, dict):
+ data = parsed_body
+ except ValueError:
+ pass
+
+ # IID error response format: {"error": "ErrorCode"}
+ code = data.get('error')
+ msg = None
+ if code:
+ msg = 'Error while calling the IID service: {0}'.format(code)
+ else:
+ msg = 'Unexpected HTTP response with status: {0}; body: {1}'.format(
+ error.response.status_code, error.response.content.decode())
+
+ return _utils.handle_requests_error(error, msg)
+
+ def _handle_batch_error(self, error):
+ """Handles errors received from the googleapiclient while making batch requests."""
+ return _utils.handle_platform_error_from_googleapiclient(
+ error, _MessagingService._build_fcm_error_googleapiclient)
+
+ @classmethod
+ def _build_fcm_error_requests(cls, error, message, error_dict):
+ """Parses an error response from the FCM API and creates a FCM-specific exception if
+ appropriate."""
+ exc_type = cls._build_fcm_error(error_dict)
+ return exc_type(message, cause=error, http_response=error.response) if exc_type else None
+
+ @classmethod
+ def _build_fcm_error_googleapiclient(cls, error, message, error_dict, http_response):
+ """Parses an error response from the FCM API and creates a FCM-specific exception if
+ appropriate."""
+ exc_type = cls._build_fcm_error(error_dict)
+ return exc_type(message, cause=error, http_response=http_response) if exc_type else None
+
+ @classmethod
+ def _build_fcm_error(cls, error_dict):
+ if not error_dict:
+ return None
+ fcm_code = None
+ for detail in error_dict.get('details', []):
+ if detail.get('@type') == 'type.googleapis.com/google.firebase.fcm.v1.FcmError':
+ fcm_code = detail.get('errorCode')
+ break
+ return _MessagingService.FCM_ERROR_TYPES.get(fcm_code)
diff --git a/venv/Lib/site-packages/firebase_admin/ml.py b/venv/Lib/site-packages/firebase_admin/ml.py
new file mode 100644
index 000000000..bcc4b9390
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/ml.py
@@ -0,0 +1,983 @@
+# Copyright 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase ML module.
+
+This module contains functions for creating, updating, getting, listing,
+deleting, publishing and unpublishing Firebase ML models.
+"""
+
+
+import datetime
+import re
+import time
+import os
+from urllib import parse
+
+import requests
+
+import firebase_admin
+from firebase_admin import _http_client
+from firebase_admin import _utils
+from firebase_admin import exceptions
+
+# pylint: disable=import-error,no-name-in-module
+try:
+ from firebase_admin import storage
+ _GCS_ENABLED = True
+except ImportError:
+ _GCS_ENABLED = False
+
+# pylint: disable=import-error,no-name-in-module
+try:
+ import tensorflow as tf
+ _TF_ENABLED = True
+except ImportError:
+ _TF_ENABLED = False
+
+_ML_ATTRIBUTE = '_ml'
+_MAX_PAGE_SIZE = 100
+_MODEL_ID_PATTERN = re.compile(r'^[A-Za-z0-9_-]{1,60}$')
+_DISPLAY_NAME_PATTERN = re.compile(r'^[A-Za-z0-9_-]{1,32}$')
+_TAG_PATTERN = re.compile(r'^[A-Za-z0-9_-]{1,32}$')
+_GCS_TFLITE_URI_PATTERN = re.compile(
+ r'^gs://(?P[a-z0-9_.-]{3,63})/(?P.+)$')
+_AUTO_ML_MODEL_PATTERN = re.compile(
+ r'^projects/(?P[a-z0-9-]{6,30})/locations/(?P[^/]+)/' +
+ r'models/(?P[A-Za-z0-9]+)$')
+_RESOURCE_NAME_PATTERN = re.compile(
+ r'^projects/(?P[a-z0-9-]{6,30})/models/(?P[A-Za-z0-9_-]{1,60})$')
+_OPERATION_NAME_PATTERN = re.compile(
+ r'^projects/(?P[a-z0-9-]{6,30})/operations/[^/]+$')
+
+
+def _get_ml_service(app):
+ """ Returns an _MLService instance for an App.
+
+ Args:
+ app: A Firebase App instance (or None to use the default App).
+
+ Returns:
+ _MLService: An _MLService for the specified App instance.
+
+ Raises:
+ ValueError: If the app argument is invalid.
+ """
+ return _utils.get_app_service(app, _ML_ATTRIBUTE, _MLService)
+
+
+def create_model(model, app=None):
+ """Creates a model in the current Firebase project.
+
+ Args:
+ model: An ml.Model to create.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ Model: The model that was created in Firebase ML.
+ """
+ ml_service = _get_ml_service(app)
+ return Model.from_dict(ml_service.create_model(model), app=app)
+
+
+def update_model(model, app=None):
+ """Updates a model's metadata or model file.
+
+ Args:
+ model: The ml.Model to update.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ Model: The updated model.
+ """
+ ml_service = _get_ml_service(app)
+ return Model.from_dict(ml_service.update_model(model), app=app)
+
+
+def publish_model(model_id, app=None):
+ """Publishes a Firebase ML model.
+
+ A published model can be downloaded to client apps.
+
+ Args:
+ model_id: The id of the model to publish.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ Model: The published model.
+ """
+ ml_service = _get_ml_service(app)
+ return Model.from_dict(ml_service.set_published(model_id, publish=True), app=app)
+
+
+def unpublish_model(model_id, app=None):
+ """Unpublishes a Firebase ML model.
+
+ Args:
+ model_id: The id of the model to unpublish.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ Model: The unpublished model.
+ """
+ ml_service = _get_ml_service(app)
+ return Model.from_dict(ml_service.set_published(model_id, publish=False), app=app)
+
+
+def get_model(model_id, app=None):
+ """Gets the model specified by the given ID.
+
+ Args:
+ model_id: The id of the model to get.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ Model: The requested model.
+ """
+ ml_service = _get_ml_service(app)
+ return Model.from_dict(ml_service.get_model(model_id), app=app)
+
+
+def list_models(list_filter=None, page_size=None, page_token=None, app=None):
+ """Lists the current project's models.
+
+ Args:
+ list_filter: a list filter string such as ``tags:'tag_1'``. None will return all models.
+ page_size: A number between 1 and 100 inclusive that specifies the maximum
+ number of models to return per page. None for default.
+ page_token: A next page token returned from a previous page of results. None
+ for first page of results.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ ListModelsPage: A (filtered) list of models.
+ """
+ ml_service = _get_ml_service(app)
+ return ListModelsPage(
+ ml_service.list_models, list_filter, page_size, page_token, app=app)
+
+
+def delete_model(model_id, app=None):
+ """Deletes a model from the current project.
+
+ Args:
+ model_id: The id of the model you wish to delete.
+ app: A Firebase app instance (or None to use the default app).
+ """
+ ml_service = _get_ml_service(app)
+ ml_service.delete_model(model_id)
+
+
+class Model:
+ """A Firebase ML Model object.
+
+ Args:
+ display_name: The display name of your model - used to identify your model in code.
+ tags: Optional list of strings associated with your model. Can be used in list queries.
+ model_format: A subclass of ModelFormat. (e.g. TFLiteFormat) Specifies the model details.
+ """
+ def __init__(self, display_name=None, tags=None, model_format=None):
+ self._app = None # Only needed for wait_for_unlo
+ self._data = {}
+ self._model_format = None
+
+ if display_name is not None:
+ self.display_name = display_name
+ if tags is not None:
+ self.tags = tags
+ if model_format is not None:
+ self.model_format = model_format
+
+ @classmethod
+ def from_dict(cls, data, app=None):
+ """Create an instance of the object from a dict."""
+ data_copy = dict(data)
+ tflite_format = None
+ tflite_format_data = data_copy.pop('tfliteModel', None)
+ data_copy.pop('@type', None) # Returned by Operations. (Not needed)
+ if tflite_format_data:
+ tflite_format = TFLiteFormat.from_dict(tflite_format_data)
+ model = Model(model_format=tflite_format)
+ model._data = data_copy # pylint: disable=protected-access
+ model._app = app # pylint: disable=protected-access
+ return model
+
+ def _update_from_dict(self, data):
+ copy = Model.from_dict(data)
+ self.model_format = copy.model_format
+ self._data = copy._data # pylint: disable=protected-access
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ # pylint: disable=protected-access
+ return self._data == other._data and self._model_format == other._model_format
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ @property
+ def model_id(self):
+ """The model's ID, unique to the project."""
+ if not self._data.get('name'):
+ return None
+ _, model_id = _validate_and_parse_name(self._data.get('name'))
+ return model_id
+
+ @property
+ def display_name(self):
+ """The model's display name, used to refer to the model in code and in
+ the Firebase console."""
+ return self._data.get('displayName')
+
+ @display_name.setter
+ def display_name(self, display_name):
+ self._data['displayName'] = _validate_display_name(display_name)
+ return self
+
+ @staticmethod
+ def _convert_to_millis(date_string):
+ if not date_string:
+ return None
+ format_str = '%Y-%m-%dT%H:%M:%S.%fZ'
+ epoch = datetime.datetime.utcfromtimestamp(0)
+ datetime_object = datetime.datetime.strptime(date_string, format_str)
+ millis = int((datetime_object - epoch).total_seconds() * 1000)
+ return millis
+
+ @property
+ def create_time(self):
+ """The time the model was created."""
+ return Model._convert_to_millis(self._data.get('createTime', None))
+
+ @property
+ def update_time(self):
+ """The time the model was last updated."""
+ return Model._convert_to_millis(self._data.get('updateTime', None))
+
+ @property
+ def validation_error(self):
+ """Validation error message."""
+ return self._data.get('state', {}).get('validationError', {}).get('message')
+
+ @property
+ def published(self):
+ """True if the model is published and available for clients to
+ download."""
+ return bool(self._data.get('state', {}).get('published'))
+
+ @property
+ def etag(self):
+ """The entity tag (ETag) of the model resource."""
+ return self._data.get('etag')
+
+ @property
+ def model_hash(self):
+ """SHA256 hash of the model binary."""
+ return self._data.get('modelHash')
+
+ @property
+ def tags(self):
+ """Tag strings, used for filtering query results."""
+ return self._data.get('tags')
+
+ @tags.setter
+ def tags(self, tags):
+ self._data['tags'] = _validate_tags(tags)
+ return self
+
+ @property
+ def locked(self):
+ """True if the Model object is locked by an active operation."""
+ return bool(self._data.get('activeOperations') and
+ len(self._data.get('activeOperations')) > 0)
+
+ def wait_for_unlocked(self, max_time_seconds=None):
+ """Waits for the model to be unlocked. (All active operations complete)
+
+ Args:
+ max_time_seconds: The maximum number of seconds to wait for the model to unlock.
+ (None for no limit)
+
+ Raises:
+ exceptions.DeadlineExceeded: If max_time_seconds passed and the model is still locked.
+ """
+ if not self.locked:
+ return
+ ml_service = _get_ml_service(self._app)
+ op_name = self._data.get('activeOperations')[0].get('name')
+ model_dict = ml_service.handle_operation(
+ ml_service.get_operation(op_name),
+ wait_for_operation=True,
+ max_time_seconds=max_time_seconds)
+ self._update_from_dict(model_dict)
+
+ @property
+ def model_format(self):
+ """The model's ``ModelFormat`` object, which represents the model's
+ format and storage location."""
+ return self._model_format
+
+ @model_format.setter
+ def model_format(self, model_format):
+ if model_format is not None:
+ _validate_model_format(model_format)
+ self._model_format = model_format #Can be None
+ return self
+
+ def as_dict(self, for_upload=False):
+ """Returns a serializable representation of the object."""
+ copy = dict(self._data)
+ if self._model_format:
+ copy.update(self._model_format.as_dict(for_upload=for_upload))
+ return copy
+
+
+class ModelFormat:
+ """Abstract base class representing a Model Format such as TFLite."""
+ def as_dict(self, for_upload=False):
+ """Returns a serializable representation of the object."""
+ raise NotImplementedError
+
+
+class TFLiteFormat(ModelFormat):
+ """Model format representing a TFLite model.
+
+ Args:
+ model_source: A TFLiteModelSource sub class. Specifies the details of the model source.
+ """
+ def __init__(self, model_source=None):
+ self._data = {}
+ self._model_source = None
+
+ if model_source is not None:
+ self.model_source = model_source
+
+ @classmethod
+ def from_dict(cls, data):
+ """Create an instance of the object from a dict."""
+ data_copy = dict(data)
+ tflite_format = TFLiteFormat(model_source=cls._init_model_source(data_copy))
+ tflite_format._data = data_copy # pylint: disable=protected-access
+ return tflite_format
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ # pylint: disable=protected-access
+ return self._data == other._data and self._model_source == other._model_source
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ @staticmethod
+ def _init_model_source(data):
+ gcs_tflite_uri = data.pop('gcsTfliteUri', None)
+ if gcs_tflite_uri:
+ return TFLiteGCSModelSource(gcs_tflite_uri=gcs_tflite_uri)
+ auto_ml_model = data.pop('automlModel', None)
+ if auto_ml_model:
+ return TFLiteAutoMlSource(auto_ml_model=auto_ml_model)
+ return None
+
+ @property
+ def model_source(self):
+ """The TF Lite model's location."""
+ return self._model_source
+
+ @model_source.setter
+ def model_source(self, model_source):
+ if model_source is not None:
+ if not isinstance(model_source, TFLiteModelSource):
+ raise TypeError('Model source must be a TFLiteModelSource object.')
+ self._model_source = model_source # Can be None
+
+ @property
+ def size_bytes(self):
+ """The size in bytes of the TF Lite model."""
+ return self._data.get('sizeBytes')
+
+ def as_dict(self, for_upload=False):
+ """Returns a serializable representation of the object."""
+ copy = dict(self._data)
+ if self._model_source:
+ copy.update(self._model_source.as_dict(for_upload=for_upload))
+ return {'tfliteModel': copy}
+
+
+class TFLiteModelSource:
+ """Abstract base class representing a model source for TFLite format models."""
+ def as_dict(self, for_upload=False):
+ """Returns a serializable representation of the object."""
+ raise NotImplementedError
+
+
+class _CloudStorageClient:
+ """Cloud Storage helper class"""
+
+ GCS_URI = 'gs://{0}/{1}'
+ BLOB_NAME = 'Firebase/ML/Models/{0}'
+
+ @staticmethod
+ def _assert_gcs_enabled():
+ if not _GCS_ENABLED:
+ raise ImportError('Failed to import the Cloud Storage library for Python. Make sure '
+ 'to install the "google-cloud-storage" module.')
+
+ @staticmethod
+ def _parse_gcs_tflite_uri(uri):
+ # GCS Bucket naming rules are complex. The regex is not comprehensive.
+ # See https://cloud.google.com/storage/docs/naming for full details.
+ matcher = _GCS_TFLITE_URI_PATTERN.match(uri)
+ if not matcher:
+ raise ValueError('GCS TFLite URI format is invalid.')
+ return matcher.group('bucket_name'), matcher.group('blob_name')
+
+ @staticmethod
+ def upload(bucket_name, model_file_name, app):
+ """Upload a model file to the specified Storage bucket."""
+ _CloudStorageClient._assert_gcs_enabled()
+
+ file_name = os.path.basename(model_file_name)
+ bucket = storage.bucket(bucket_name, app=app)
+ blob_name = _CloudStorageClient.BLOB_NAME.format(file_name)
+ blob = bucket.blob(blob_name)
+ blob.upload_from_filename(model_file_name)
+ return _CloudStorageClient.GCS_URI.format(bucket.name, blob_name)
+
+ @staticmethod
+ def sign_uri(gcs_tflite_uri, app):
+ """Makes the gcs_tflite_uri readable for GET for 10 minutes via signed_uri."""
+ _CloudStorageClient._assert_gcs_enabled()
+ bucket_name, blob_name = _CloudStorageClient._parse_gcs_tflite_uri(gcs_tflite_uri)
+ bucket = storage.bucket(bucket_name, app=app)
+ blob = bucket.blob(blob_name)
+ return blob.generate_signed_url(
+ version='v4',
+ expiration=datetime.timedelta(minutes=10),
+ method='GET'
+ )
+
+
+class TFLiteGCSModelSource(TFLiteModelSource):
+ """TFLite model source representing a tflite model file stored in GCS."""
+
+ _STORAGE_CLIENT = _CloudStorageClient()
+
+ def __init__(self, gcs_tflite_uri, app=None):
+ self._app = app
+ self._gcs_tflite_uri = _validate_gcs_tflite_uri(gcs_tflite_uri)
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ return self._gcs_tflite_uri == other._gcs_tflite_uri # pylint: disable=protected-access
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ @classmethod
+ def from_tflite_model_file(cls, model_file_name, bucket_name=None, app=None):
+ """Uploads the model file to an existing Google Cloud Storage bucket.
+
+ Args:
+ model_file_name: The name of the model file.
+ bucket_name: The name of an existing bucket. None to use the default bucket configured
+ in the app.
+ app: A Firebase app instance (or None to use the default app).
+
+ Returns:
+ TFLiteGCSModelSource: The source created from the model_file
+
+ Raises:
+ ImportError: If the Cloud Storage Library has not been installed.
+ """
+ gcs_uri = TFLiteGCSModelSource._STORAGE_CLIENT.upload(bucket_name, model_file_name, app)
+ return TFLiteGCSModelSource(gcs_tflite_uri=gcs_uri, app=app)
+
+ @staticmethod
+ def _assert_tf_enabled():
+ if not _TF_ENABLED:
+ raise ImportError('Failed to import the tensorflow library for Python. Make sure '
+ 'to install the tensorflow module.')
+ if not tf.version.VERSION.startswith('1.') and not tf.version.VERSION.startswith('2.'):
+ raise ImportError('Expected tensorflow version 1.x or 2.x, but found {0}'
+ .format(tf.version.VERSION))
+
+ @staticmethod
+ def _tf_convert_from_saved_model(saved_model_dir):
+ # Same for both v1.x and v2.x
+ converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir)
+ return converter.convert()
+
+ @staticmethod
+ def _tf_convert_from_keras_model(keras_model):
+ """Converts the given Keras model into a TF Lite model."""
+ # Version 1.x conversion function takes a model file. Version 2.x takes the model itself.
+ if tf.version.VERSION.startswith('1.'):
+ keras_file = 'firebase_keras_model.h5'
+ tf.keras.models.save_model(keras_model, keras_file)
+ converter = tf.lite.TFLiteConverter.from_keras_model_file(keras_file)
+ else:
+ converter = tf.lite.TFLiteConverter.from_keras_model(keras_model)
+
+ return converter.convert()
+
+ @classmethod
+ def from_saved_model(cls, saved_model_dir, model_file_name='firebase_ml_model.tflite',
+ bucket_name=None, app=None):
+ """Creates a Tensor Flow Lite model from the saved model, and uploads the model to GCS.
+
+ Args:
+ saved_model_dir: The saved model directory.
+ model_file_name: The name that the tflite model will be saved as in Cloud Storage.
+ bucket_name: The name of an existing bucket. None to use the default bucket configured
+ in the app.
+ app: Optional. A Firebase app instance (or None to use the default app)
+
+ Returns:
+ TFLiteGCSModelSource: The source created from the saved_model_dir
+
+ Raises:
+ ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed.
+ """
+ TFLiteGCSModelSource._assert_tf_enabled()
+ tflite_model = TFLiteGCSModelSource._tf_convert_from_saved_model(saved_model_dir)
+ with open(model_file_name, 'wb') as model_file:
+ model_file.write(tflite_model)
+ return TFLiteGCSModelSource.from_tflite_model_file(model_file_name, bucket_name, app)
+
+ @classmethod
+ def from_keras_model(cls, keras_model, model_file_name='firebase_ml_model.tflite',
+ bucket_name=None, app=None):
+ """Creates a Tensor Flow Lite model from the keras model, and uploads the model to GCS.
+
+ Args:
+ keras_model: A tf.keras model.
+ model_file_name: The name that the tflite model will be saved as in Cloud Storage.
+ bucket_name: The name of an existing bucket. None to use the default bucket configured
+ in the app.
+ app: Optional. A Firebase app instance (or None to use the default app)
+
+ Returns:
+ TFLiteGCSModelSource: The source created from the keras_model
+
+ Raises:
+ ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed.
+ """
+ TFLiteGCSModelSource._assert_tf_enabled()
+ tflite_model = TFLiteGCSModelSource._tf_convert_from_keras_model(keras_model)
+ with open(model_file_name, 'wb') as model_file:
+ model_file.write(tflite_model)
+ return TFLiteGCSModelSource.from_tflite_model_file(model_file_name, bucket_name, app)
+
+ @property
+ def gcs_tflite_uri(self):
+ """URI of the model file in Cloud Storage."""
+ return self._gcs_tflite_uri
+
+ @gcs_tflite_uri.setter
+ def gcs_tflite_uri(self, gcs_tflite_uri):
+ self._gcs_tflite_uri = _validate_gcs_tflite_uri(gcs_tflite_uri)
+
+ def _get_signed_gcs_tflite_uri(self):
+ """Signs the GCS uri, so the model file can be uploaded to Firebase ML and verified."""
+ return TFLiteGCSModelSource._STORAGE_CLIENT.sign_uri(self._gcs_tflite_uri, self._app)
+
+ def as_dict(self, for_upload=False):
+ """Returns a serializable representation of the object."""
+ if for_upload:
+ return {'gcsTfliteUri': self._get_signed_gcs_tflite_uri()}
+
+ return {'gcsTfliteUri': self._gcs_tflite_uri}
+
+
+class TFLiteAutoMlSource(TFLiteModelSource):
+ """TFLite model source representing a tflite model created with AutoML."""
+
+ def __init__(self, auto_ml_model, app=None):
+ self._app = app
+ self.auto_ml_model = auto_ml_model
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ return self.auto_ml_model == other.auto_ml_model
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ @property
+ def auto_ml_model(self):
+ """Resource name of the model, created by the AutoML API or Cloud console."""
+ return self._auto_ml_model
+
+ @auto_ml_model.setter
+ def auto_ml_model(self, auto_ml_model):
+ self._auto_ml_model = _validate_auto_ml_model(auto_ml_model)
+
+ def as_dict(self, for_upload=False):
+ """Returns a serializable representation of the object."""
+ # Upload is irrelevant for auto_ml models
+ return {'automlModel': self._auto_ml_model}
+
+
+class ListModelsPage:
+ """Represents a page of models in a Firebase project.
+
+ Provides methods for traversing the models included in this page, as well as
+ retrieving subsequent pages of models. The iterator returned by
+ ``iterate_all()`` can be used to iterate through all the models in the
+ Firebase project starting from this page.
+ """
+ def __init__(self, list_models_func, list_filter, page_size, page_token, app):
+ self._list_models_func = list_models_func
+ self._list_filter = list_filter
+ self._page_size = page_size
+ self._page_token = page_token
+ self._app = app
+ self._list_response = list_models_func(list_filter, page_size, page_token)
+
+ @property
+ def models(self):
+ """A list of Models from this page."""
+ return [
+ Model.from_dict(model, app=self._app) for model in self._list_response.get('models', [])
+ ]
+
+ @property
+ def list_filter(self):
+ """The filter string used to filter the models."""
+ return self._list_filter
+
+ @property
+ def next_page_token(self):
+ """Token identifying the next page of results."""
+ return self._list_response.get('nextPageToken', '')
+
+ @property
+ def has_next_page(self):
+ """True if more pages are available."""
+ return bool(self.next_page_token)
+
+ def get_next_page(self):
+ """Retrieves the next page of models if available.
+
+ Returns:
+ ListModelsPage: Next page of models, or None if this is the last page.
+ """
+ if self.has_next_page:
+ return ListModelsPage(
+ self._list_models_func,
+ self._list_filter,
+ self._page_size,
+ self.next_page_token,
+ self._app)
+ return None
+
+ def iterate_all(self):
+ """Retrieves an iterator for Models.
+
+ Returned iterator will iterate through all the models in the Firebase
+ project starting from this page. The iterator will never buffer more than
+ one page of models in memory at a time.
+
+ Returns:
+ iterator: An iterator of Model instances.
+ """
+ return _ModelIterator(self)
+
+
+class _ModelIterator:
+ """An iterator that allows iterating over models, one at a time.
+
+ This implementation loads a page of models into memory, and iterates on them.
+ When the whole page has been traversed, it loads another page. This class
+ never keeps more than one page of entries in memory.
+ """
+ def __init__(self, current_page):
+ if not isinstance(current_page, ListModelsPage):
+ raise TypeError('Current page must be a ListModelsPage')
+ self._current_page = current_page
+ self._index = 0
+
+ def next(self):
+ if self._index == len(self._current_page.models):
+ if self._current_page.has_next_page:
+ self._current_page = self._current_page.get_next_page()
+ self._index = 0
+ if self._index < len(self._current_page.models):
+ result = self._current_page.models[self._index]
+ self._index += 1
+ return result
+ raise StopIteration
+
+ def __next__(self):
+ return self.next()
+
+ def __iter__(self):
+ return self
+
+
+def _validate_and_parse_name(name):
+ # The resource name is added automatically from API call responses.
+ # The only way it could be invalid is if someone tries to
+ # create a model from a dictionary manually and does it incorrectly.
+ matcher = _RESOURCE_NAME_PATTERN.match(name)
+ if not matcher:
+ raise ValueError('Model resource name format is invalid.')
+ return matcher.group('project_id'), matcher.group('model_id')
+
+
+def _validate_model(model, update_mask=None):
+ if not isinstance(model, Model):
+ raise TypeError('Model must be an ml.Model.')
+ if update_mask is None and not model.display_name:
+ raise ValueError('Model must have a display name.')
+
+
+def _validate_model_id(model_id):
+ if not _MODEL_ID_PATTERN.match(model_id):
+ raise ValueError('Model ID format is invalid.')
+
+
+def _validate_operation_name(op_name):
+ if not _OPERATION_NAME_PATTERN.match(op_name):
+ raise ValueError('Operation name format is invalid.')
+ return op_name
+
+
+def _validate_display_name(display_name):
+ if not _DISPLAY_NAME_PATTERN.match(display_name):
+ raise ValueError('Display name format is invalid.')
+ return display_name
+
+
+def _validate_tags(tags):
+ if not isinstance(tags, list) or not \
+ all(isinstance(tag, str) for tag in tags):
+ raise TypeError('Tags must be a list of strings.')
+ if not all(_TAG_PATTERN.match(tag) for tag in tags):
+ raise ValueError('Tag format is invalid.')
+ return tags
+
+
+def _validate_gcs_tflite_uri(uri):
+ # GCS Bucket naming rules are complex. The regex is not comprehensive.
+ # See https://cloud.google.com/storage/docs/naming for full details.
+ if not _GCS_TFLITE_URI_PATTERN.match(uri):
+ raise ValueError('GCS TFLite URI format is invalid.')
+ return uri
+
+def _validate_auto_ml_model(model):
+ if not _AUTO_ML_MODEL_PATTERN.match(model):
+ raise ValueError('Model resource name format is invalid.')
+ return model
+
+
+def _validate_model_format(model_format):
+ if not isinstance(model_format, ModelFormat):
+ raise TypeError('Model format must be a ModelFormat object.')
+ return model_format
+
+
+def _validate_list_filter(list_filter):
+ if list_filter is not None:
+ if not isinstance(list_filter, str):
+ raise TypeError('List filter must be a string or None.')
+
+
+def _validate_page_size(page_size):
+ if page_size is not None:
+ if type(page_size) is not int: # pylint: disable=unidiomatic-typecheck
+ # Specifically type() to disallow boolean which is a subtype of int
+ raise TypeError('Page size must be a number or None.')
+ if page_size < 1 or page_size > _MAX_PAGE_SIZE:
+ raise ValueError('Page size must be a positive integer between '
+ '1 and {0}'.format(_MAX_PAGE_SIZE))
+
+
+def _validate_page_token(page_token):
+ if page_token is not None:
+ if not isinstance(page_token, str):
+ raise TypeError('Page token must be a string or None.')
+
+
+class _MLService:
+ """Firebase ML service."""
+
+ PROJECT_URL = 'https://firebaseml.googleapis.com/v1beta2/projects/{0}/'
+ OPERATION_URL = 'https://firebaseml.googleapis.com/v1beta2/'
+ POLL_EXPONENTIAL_BACKOFF_FACTOR = 1.5
+ POLL_BASE_WAIT_TIME_SECONDS = 3
+
+ def __init__(self, app):
+ self._project_id = app.project_id
+ if not self._project_id:
+ raise ValueError(
+ 'Project ID is required to access ML service. Either set the '
+ 'projectId option, or use service account credentials.')
+ self._project_url = _MLService.PROJECT_URL.format(self._project_id)
+ ml_headers = {
+ 'X-FIREBASE-CLIENT': 'fire-admin-python/{0}'.format(firebase_admin.__version__),
+ }
+ self._client = _http_client.JsonHttpClient(
+ credential=app.credential.get_credential(),
+ headers=ml_headers,
+ base_url=self._project_url)
+ self._operation_client = _http_client.JsonHttpClient(
+ credential=app.credential.get_credential(),
+ headers=ml_headers,
+ base_url=_MLService.OPERATION_URL)
+
+ def get_operation(self, op_name):
+ _validate_operation_name(op_name)
+ try:
+ return self._operation_client.body('get', url=op_name)
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
+
+ def _exponential_backoff(self, current_attempt, stop_time):
+ """Sleeps for the appropriate amount of time. Or throws deadline exceeded."""
+ delay_factor = pow(_MLService.POLL_EXPONENTIAL_BACKOFF_FACTOR, current_attempt)
+ wait_time_seconds = delay_factor * _MLService.POLL_BASE_WAIT_TIME_SECONDS
+
+ if stop_time is not None:
+ max_seconds_left = (stop_time - datetime.datetime.now()).total_seconds()
+ if max_seconds_left < 1: # allow a bit of time for rpc
+ raise exceptions.DeadlineExceededError('Polling max time exceeded.')
+ wait_time_seconds = min(wait_time_seconds, max_seconds_left - 1)
+ time.sleep(wait_time_seconds)
+
+ def handle_operation(self, operation, wait_for_operation=False, max_time_seconds=None):
+ """Handles long running operations.
+
+ Args:
+ operation: The operation to handle.
+ wait_for_operation: Should we allow polling for the operation to complete.
+ If no polling is requested, a locked model will be returned instead.
+ max_time_seconds: The maximum seconds to try polling for operation complete.
+ (None for no limit)
+
+ Returns:
+ dict: A dictionary of the returned model properties.
+
+ Raises:
+ TypeError: if the operation is not a dictionary.
+ ValueError: If the operation is malformed.
+ UnknownError: If the server responds with an unexpected response.
+ err: If the operation exceeds polling attempts or stop_time
+ """
+ if not isinstance(operation, dict):
+ raise TypeError('Operation must be a dictionary.')
+
+ if operation.get('done'):
+ # Operations which are immediately done don't have an operation name
+ if operation.get('response'):
+ return operation.get('response')
+ if operation.get('error'):
+ raise _utils.handle_operation_error(operation.get('error'))
+ raise exceptions.UnknownError(message='Internal Error: Malformed Operation.')
+
+ op_name = _validate_operation_name(operation.get('name'))
+ metadata = operation.get('metadata', {})
+ metadata_type = metadata.get('@type', '')
+ if not metadata_type.endswith('ModelOperationMetadata'):
+ raise TypeError('Unknown type of operation metadata.')
+ _, model_id = _validate_and_parse_name(metadata.get('name'))
+ current_attempt = 0
+ start_time = datetime.datetime.now()
+ stop_time = (None if max_time_seconds is None else
+ start_time + datetime.timedelta(seconds=max_time_seconds))
+ while wait_for_operation and not operation.get('done'):
+ # We just got this operation. Wait before getting another
+ # so we don't exceed the GetOperation maximum request rate.
+ self._exponential_backoff(current_attempt, stop_time)
+ operation = self.get_operation(op_name)
+ current_attempt += 1
+
+ if operation.get('done'):
+ if operation.get('response'):
+ return operation.get('response')
+ if operation.get('error'):
+ raise _utils.handle_operation_error(operation.get('error'))
+
+ # If the operation is not complete or timed out, return a (locked) model instead
+ return get_model(model_id).as_dict()
+
+
+ def create_model(self, model):
+ _validate_model(model)
+ try:
+ return self.handle_operation(
+ self._client.body('post', url='models', json=model.as_dict(for_upload=True)))
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
+
+ def update_model(self, model, update_mask=None):
+ _validate_model(model, update_mask)
+ path = 'models/{0}'.format(model.model_id)
+ if update_mask is not None:
+ path = path + '?updateMask={0}'.format(update_mask)
+ try:
+ return self.handle_operation(
+ self._client.body('patch', url=path, json=model.as_dict(for_upload=True)))
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
+
+ def set_published(self, model_id, publish):
+ _validate_model_id(model_id)
+ model_name = 'projects/{0}/models/{1}'.format(self._project_id, model_id)
+ model = Model.from_dict({
+ 'name': model_name,
+ 'state': {
+ 'published': publish
+ }
+ })
+ return self.update_model(model, update_mask='state.published')
+
+ def get_model(self, model_id):
+ _validate_model_id(model_id)
+ try:
+ return self._client.body('get', url='models/{0}'.format(model_id))
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
+
+ def list_models(self, list_filter, page_size, page_token):
+ """ lists Firebase ML models."""
+ _validate_list_filter(list_filter)
+ _validate_page_size(page_size)
+ _validate_page_token(page_token)
+ params = {}
+ if list_filter:
+ params['filter'] = list_filter
+ if page_size:
+ params['page_size'] = page_size
+ if page_token:
+ params['page_token'] = page_token
+ path = 'models'
+ if params:
+ param_str = parse.urlencode(sorted(params.items()), True)
+ path = path + '?' + param_str
+ try:
+ return self._client.body('get', url=path)
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
+
+ def delete_model(self, model_id):
+ _validate_model_id(model_id)
+ try:
+ self._client.body('delete', url='models/{0}'.format(model_id))
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
diff --git a/venv/Lib/site-packages/firebase_admin/project_management.py b/venv/Lib/site-packages/firebase_admin/project_management.py
new file mode 100644
index 000000000..ed292b80f
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/project_management.py
@@ -0,0 +1,664 @@
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Project Management module.
+
+This module enables management of resources in Firebase projects, such as Android and iOS apps.
+"""
+
+import base64
+import re
+import time
+
+import requests
+
+import firebase_admin
+from firebase_admin import exceptions
+from firebase_admin import _http_client
+from firebase_admin import _utils
+
+
+_PROJECT_MANAGEMENT_ATTRIBUTE = '_project_management'
+
+
+def _get_project_management_service(app):
+ return _utils.get_app_service(app, _PROJECT_MANAGEMENT_ATTRIBUTE, _ProjectManagementService)
+
+
+def android_app(app_id, app=None):
+ """Obtains a reference to an Android app in the associated Firebase project.
+
+ Args:
+ app_id: The app ID that identifies this Android app.
+ app: An App instance (optional).
+
+ Returns:
+ AndroidApp: An ``AndroidApp`` instance.
+ """
+ return AndroidApp(app_id=app_id, service=_get_project_management_service(app))
+
+
+def ios_app(app_id, app=None):
+ """Obtains a reference to an iOS app in the associated Firebase project.
+
+ Args:
+ app_id: The app ID that identifies this iOS app.
+ app: An App instance (optional).
+
+ Returns:
+ IOSApp: An ``IOSApp`` instance.
+ """
+ return IOSApp(app_id=app_id, service=_get_project_management_service(app))
+
+
+def list_android_apps(app=None):
+ """Lists all Android apps in the associated Firebase project.
+
+ Args:
+ app: An App instance (optional).
+
+ Returns:
+ list: a list of ``AndroidApp`` instances referring to each Android app in the Firebase
+ project.
+ """
+ return _get_project_management_service(app).list_android_apps()
+
+
+def list_ios_apps(app=None):
+ """Lists all iOS apps in the associated Firebase project.
+
+ Args:
+ app: An App instance (optional).
+
+ Returns:
+ list: a list of ``IOSApp`` instances referring to each iOS app in the Firebase project.
+ """
+ return _get_project_management_service(app).list_ios_apps()
+
+
+def create_android_app(package_name, display_name=None, app=None):
+ """Creates a new Android app in the associated Firebase project.
+
+ Args:
+ package_name: The package name of the Android app to be created.
+ display_name: A nickname for this Android app (optional).
+ app: An App instance (optional).
+
+ Returns:
+ AndroidApp: An ``AndroidApp`` instance that is a reference to the newly created app.
+ """
+ return _get_project_management_service(app).create_android_app(package_name, display_name)
+
+
+def create_ios_app(bundle_id, display_name=None, app=None):
+ """Creates a new iOS app in the associated Firebase project.
+
+ Args:
+ bundle_id: The bundle ID of the iOS app to be created.
+ display_name: A nickname for this iOS app (optional).
+ app: An App instance (optional).
+
+ Returns:
+ IOSApp: An ``IOSApp`` instance that is a reference to the newly created app.
+ """
+ return _get_project_management_service(app).create_ios_app(bundle_id, display_name)
+
+
+def _check_is_string_or_none(obj, field_name):
+ if obj is None or isinstance(obj, str):
+ return obj
+ raise ValueError('{0} must be a string.'.format(field_name))
+
+
+def _check_is_nonempty_string(obj, field_name):
+ if isinstance(obj, str) and obj:
+ return obj
+ raise ValueError('{0} must be a non-empty string.'.format(field_name))
+
+
+def _check_is_nonempty_string_or_none(obj, field_name):
+ if obj is None:
+ return None
+ return _check_is_nonempty_string(obj, field_name)
+
+
+def _check_not_none(obj, field_name):
+ if obj is None:
+ raise ValueError('{0} cannot be None.'.format(field_name))
+ return obj
+
+
+class AndroidApp:
+ """A reference to an Android app within a Firebase project.
+
+ Note: Unless otherwise specified, all methods defined in this class make an RPC.
+
+ Please use the module-level function ``android_app(app_id)`` to obtain instances of this class
+ instead of instantiating it directly.
+ """
+
+ def __init__(self, app_id, service):
+ self._app_id = app_id
+ self._service = service
+
+ @property
+ def app_id(self):
+ """Returns the app ID of the Android app to which this instance refers.
+
+ Note: This method does not make an RPC.
+
+ Returns:
+ string: The app ID of the Android app to which this instance refers.
+ """
+ return self._app_id
+
+ def get_metadata(self):
+ """Retrieves detailed information about this Android app.
+
+ Returns:
+ AndroidAppMetadata: An ``AndroidAppMetadata`` instance.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service.
+ """
+ return self._service.get_android_app_metadata(self._app_id)
+
+ def set_display_name(self, new_display_name):
+ """Updates the display name attribute of this Android app to the one given.
+
+ Args:
+ new_display_name: The new display name for this Android app.
+
+ Returns:
+ NoneType: None.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service.
+ """
+ return self._service.set_android_app_display_name(self._app_id, new_display_name)
+
+ def get_config(self):
+ """Retrieves the configuration artifact associated with this Android app."""
+ return self._service.get_android_app_config(self._app_id)
+
+ def get_sha_certificates(self):
+ """Retrieves the entire list of SHA certificates associated with this Android app.
+
+ Returns:
+ list: A list of ``SHACertificate`` instances.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service.
+ """
+ return self._service.get_sha_certificates(self._app_id)
+
+ def add_sha_certificate(self, certificate_to_add):
+ """Adds a SHA certificate to this Android app.
+
+ Args:
+ certificate_to_add: The SHA certificate to add.
+
+ Returns:
+ NoneType: None.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service. (For example, if the certificate_to_add already exists.)
+ """
+ return self._service.add_sha_certificate(self._app_id, certificate_to_add)
+
+ def delete_sha_certificate(self, certificate_to_delete):
+ """Removes a SHA certificate from this Android app.
+
+ Args:
+ certificate_to_delete: The SHA certificate to delete.
+
+ Returns:
+ NoneType: None.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service. (For example, if the certificate_to_delete is not found.)
+ """
+ return self._service.delete_sha_certificate(certificate_to_delete)
+
+
+class IOSApp:
+ """A reference to an iOS app within a Firebase project.
+
+ Note: Unless otherwise specified, all methods defined in this class make an RPC.
+
+ Please use the module-level function ``ios_app(app_id)`` to obtain instances of this class
+ instead of instantiating it directly.
+ """
+
+ def __init__(self, app_id, service):
+ self._app_id = app_id
+ self._service = service
+
+ @property
+ def app_id(self):
+ """Returns the app ID of the iOS app to which this instance refers.
+
+ Note: This method does not make an RPC.
+
+ Returns:
+ string: The app ID of the iOS app to which this instance refers.
+ """
+ return self._app_id
+
+ def get_metadata(self):
+ """Retrieves detailed information about this iOS app.
+
+ Returns:
+ IOSAppMetadata: An ``IOSAppMetadata`` instance.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service.
+ """
+ return self._service.get_ios_app_metadata(self._app_id)
+
+ def set_display_name(self, new_display_name):
+ """Updates the display name attribute of this iOS app to the one given.
+
+ Args:
+ new_display_name: The new display name for this iOS app.
+
+ Returns:
+ NoneType: None.
+
+ Raises:
+ FirebaseError: If an error occurs while communicating with the Firebase Project
+ Management Service.
+ """
+ return self._service.set_ios_app_display_name(self._app_id, new_display_name)
+
+ def get_config(self):
+ """Retrieves the configuration artifact associated with this iOS app."""
+ return self._service.get_ios_app_config(self._app_id)
+
+
+class _AppMetadata:
+ """Detailed information about a Firebase Android or iOS app."""
+
+ def __init__(self, name, app_id, display_name, project_id):
+ # _name is the fully qualified resource name of this Android or iOS app; currently it is not
+ # exposed to client code.
+ self._name = _check_is_nonempty_string(name, 'name')
+ self._app_id = _check_is_nonempty_string(app_id, 'app_id')
+ self._display_name = _check_is_string_or_none(display_name, 'display_name')
+ self._project_id = _check_is_nonempty_string(project_id, 'project_id')
+
+ @property
+ def app_id(self):
+ """The globally unique, Firebase-assigned identifier of this Android or iOS app.
+
+ This ID is unique even across apps of different platforms.
+ """
+ return self._app_id
+
+ @property
+ def display_name(self):
+ """The user-assigned display name of this Android or iOS app.
+
+ Note that the display name can be None if it has never been set by the user."""
+ return self._display_name
+
+ @property
+ def project_id(self):
+ """The permanent, globally unique, user-assigned ID of the parent Firebase project."""
+ return self._project_id
+
+ def __eq__(self, other):
+ if not isinstance(other, type(self)):
+ return False
+ # pylint: disable=protected-access
+ return (self._name == other._name and self.app_id == other.app_id and
+ self.display_name == other.display_name and self.project_id == other.project_id)
+ # pylint: enable=protected-access
+
+
+class AndroidAppMetadata(_AppMetadata):
+ """Android-specific information about an Android Firebase app."""
+
+ def __init__(self, package_name, name, app_id, display_name, project_id):
+ """Clients should not instantiate this class directly."""
+ super(AndroidAppMetadata, self).__init__(name, app_id, display_name, project_id)
+ self._package_name = _check_is_nonempty_string(package_name, 'package_name')
+
+ @property
+ def package_name(self):
+ """The canonical package name of this Android app as it would appear in the Play Store."""
+ return self._package_name
+
+ def __eq__(self, other):
+ return (super(AndroidAppMetadata, self).__eq__(other) and
+ self.package_name == other.package_name)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash(
+ (self._name, self.app_id, self.display_name, self.project_id, self.package_name))
+
+
+class IOSAppMetadata(_AppMetadata):
+ """iOS-specific information about an iOS Firebase app."""
+
+ def __init__(self, bundle_id, name, app_id, display_name, project_id):
+ """Clients should not instantiate this class directly."""
+ super(IOSAppMetadata, self).__init__(name, app_id, display_name, project_id)
+ self._bundle_id = _check_is_nonempty_string(bundle_id, 'bundle_id')
+
+ @property
+ def bundle_id(self):
+ """The canonical bundle ID of this iOS app as it would appear in the iOS AppStore."""
+ return self._bundle_id
+
+ def __eq__(self, other):
+ return super(IOSAppMetadata, self).__eq__(other) and self.bundle_id == other.bundle_id
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash((self._name, self.app_id, self.display_name, self.project_id, self.bundle_id))
+
+
+class SHACertificate:
+ """Represents a SHA-1 or SHA-256 certificate associated with an Android app."""
+
+ SHA_1 = 'SHA_1'
+ SHA_256 = 'SHA_256'
+
+ _SHA_1_RE = re.compile('^[0-9A-Fa-f]{40}$')
+ _SHA_256_RE = re.compile('^[0-9A-Fa-f]{64}$')
+
+ def __init__(self, sha_hash, name=None):
+ """Creates a new SHACertificate instance.
+
+ Args:
+ sha_hash: A string; the certificate hash for the Android app.
+ name: The fully qualified resource name of this certificate; note that this field should
+ be omitted if the instance is being constructed for the purpose of calling the
+ add_sha_certificate() method on an ``AndroidApp``.
+
+ Raises:
+ ValueError: If the sha_hash is not a valid SHA-1 or SHA-256 certificate hash.
+ """
+ _check_is_nonempty_string(sha_hash, 'sha_hash')
+ _check_is_nonempty_string_or_none(name, 'name')
+ self._name = name
+ self._sha_hash = sha_hash.lower()
+ if SHACertificate._SHA_1_RE.match(sha_hash):
+ self._cert_type = SHACertificate.SHA_1
+ elif SHACertificate._SHA_256_RE.match(sha_hash):
+ self._cert_type = SHACertificate.SHA_256
+ else:
+ raise ValueError(
+ 'The supplied certificate hash is neither a valid SHA-1 nor SHA_256 hash.')
+
+ @property
+ def name(self):
+ """Returns the fully qualified resource name of this certificate, if known.
+
+ Returns:
+ string: The fully qualified resource name of this certificate, if known; otherwise, the
+ empty string.
+ """
+ return self._name
+
+ @property
+ def sha_hash(self):
+ """Returns the certificate hash.
+
+ Returns:
+ string: The certificate hash.
+ """
+ return self._sha_hash
+
+ @property
+ def cert_type(self):
+ """Returns the type of the SHA certificate encoded in the hash.
+
+ Returns:
+ string: One of 'SHA_1' or 'SHA_256'.
+ """
+ return self._cert_type
+
+ def __eq__(self, other):
+ if not isinstance(other, SHACertificate):
+ return False
+ return (self.name == other.name and self.sha_hash == other.sha_hash and
+ self.cert_type == other.cert_type)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash((self.name, self.sha_hash, self.cert_type))
+
+
+class _ProjectManagementService:
+ """Provides methods for interacting with the Firebase Project Management Service."""
+
+ BASE_URL = 'https://firebase.googleapis.com'
+ MAXIMUM_LIST_APPS_PAGE_SIZE = 100
+ MAXIMUM_POLLING_ATTEMPTS = 8
+ POLL_BASE_WAIT_TIME_SECONDS = 0.5
+ POLL_EXPONENTIAL_BACKOFF_FACTOR = 1.5
+
+ ANDROID_APPS_RESOURCE_NAME = 'androidApps'
+ ANDROID_APP_IDENTIFIER_NAME = 'packageName'
+ IOS_APPS_RESOURCE_NAME = 'iosApps'
+ IOS_APP_IDENTIFIER_NAME = 'bundleId'
+
+ def __init__(self, app):
+ project_id = app.project_id
+ if not project_id:
+ raise ValueError(
+ 'Project ID is required to access the Firebase Project Management Service. Either '
+ 'set the projectId option, or use service account credentials. Alternatively, set '
+ 'the GOOGLE_CLOUD_PROJECT environment variable.')
+ self._project_id = project_id
+ version_header = 'Python/Admin/{0}'.format(firebase_admin.__version__)
+ timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS)
+ self._client = _http_client.JsonHttpClient(
+ credential=app.credential.get_credential(),
+ base_url=_ProjectManagementService.BASE_URL,
+ headers={'X-Client-Version': version_header},
+ timeout=timeout)
+
+ def get_android_app_metadata(self, app_id):
+ return self._get_app_metadata(
+ platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME,
+ identifier_name=_ProjectManagementService.ANDROID_APP_IDENTIFIER_NAME,
+ metadata_class=AndroidAppMetadata,
+ app_id=app_id)
+
+ def get_ios_app_metadata(self, app_id):
+ return self._get_app_metadata(
+ platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME,
+ identifier_name=_ProjectManagementService.IOS_APP_IDENTIFIER_NAME,
+ metadata_class=IOSAppMetadata,
+ app_id=app_id)
+
+ def _get_app_metadata(self, platform_resource_name, identifier_name, metadata_class, app_id):
+ """Retrieves detailed information about an Android or iOS app."""
+ _check_is_nonempty_string(app_id, 'app_id')
+ path = '/v1beta1/projects/-/{0}/{1}'.format(platform_resource_name, app_id)
+ response = self._make_request('get', path)
+ return metadata_class(
+ response[identifier_name],
+ name=response['name'],
+ app_id=response['appId'],
+ display_name=response.get('displayName') or None,
+ project_id=response['projectId'])
+
+ def set_android_app_display_name(self, app_id, new_display_name):
+ self._set_display_name(
+ app_id=app_id,
+ new_display_name=new_display_name,
+ platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME)
+
+ def set_ios_app_display_name(self, app_id, new_display_name):
+ self._set_display_name(
+ app_id=app_id,
+ new_display_name=new_display_name,
+ platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME)
+
+ def _set_display_name(self, app_id, new_display_name, platform_resource_name):
+ """Sets the display name of an Android or iOS app."""
+ path = '/v1beta1/projects/-/{0}/{1}?updateMask=displayName'.format(
+ platform_resource_name, app_id)
+ request_body = {'displayName': new_display_name}
+ self._make_request('patch', path, json=request_body)
+
+ def list_android_apps(self):
+ return self._list_apps(
+ platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME,
+ app_class=AndroidApp)
+
+ def list_ios_apps(self):
+ return self._list_apps(
+ platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME,
+ app_class=IOSApp)
+
+ def _list_apps(self, platform_resource_name, app_class):
+ """Lists all the Android or iOS apps within the Firebase project."""
+ path = '/v1beta1/projects/{0}/{1}?pageSize={2}'.format(
+ self._project_id,
+ platform_resource_name,
+ _ProjectManagementService.MAXIMUM_LIST_APPS_PAGE_SIZE)
+ response = self._make_request('get', path)
+ apps_list = []
+ while True:
+ apps = response.get('apps')
+ if not apps:
+ break
+ apps_list.extend(app_class(app_id=app['appId'], service=self) for app in apps)
+ next_page_token = response.get('nextPageToken')
+ if not next_page_token:
+ break
+ # Retrieve the next page of apps.
+ path = '/v1beta1/projects/{0}/{1}?pageToken={2}&pageSize={3}'.format(
+ self._project_id,
+ platform_resource_name,
+ next_page_token,
+ _ProjectManagementService.MAXIMUM_LIST_APPS_PAGE_SIZE)
+ response = self._make_request('get', path)
+ return apps_list
+
+ def create_android_app(self, package_name, display_name=None):
+ return self._create_app(
+ platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME,
+ identifier_name=_ProjectManagementService.ANDROID_APP_IDENTIFIER_NAME,
+ identifier=package_name,
+ display_name=display_name,
+ app_class=AndroidApp)
+
+ def create_ios_app(self, bundle_id, display_name=None):
+ return self._create_app(
+ platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME,
+ identifier_name=_ProjectManagementService.IOS_APP_IDENTIFIER_NAME,
+ identifier=bundle_id,
+ display_name=display_name,
+ app_class=IOSApp)
+
+ def _create_app(
+ self,
+ platform_resource_name,
+ identifier_name,
+ identifier,
+ display_name,
+ app_class):
+ """Creates an Android or iOS app."""
+ _check_is_string_or_none(display_name, 'display_name')
+ path = '/v1beta1/projects/{0}/{1}'.format(self._project_id, platform_resource_name)
+ request_body = {identifier_name: identifier}
+ if display_name:
+ request_body['displayName'] = display_name
+ response = self._make_request('post', path, json=request_body)
+ operation_name = response['name']
+ poll_response = self._poll_app_creation(operation_name)
+ return app_class(app_id=poll_response['appId'], service=self)
+
+ def _poll_app_creation(self, operation_name):
+ """Polls the Long-Running Operation repeatedly until it is done with exponential backoff."""
+ for current_attempt in range(_ProjectManagementService.MAXIMUM_POLLING_ATTEMPTS):
+ delay_factor = pow(
+ _ProjectManagementService.POLL_EXPONENTIAL_BACKOFF_FACTOR, current_attempt)
+ wait_time_seconds = delay_factor * _ProjectManagementService.POLL_BASE_WAIT_TIME_SECONDS
+ time.sleep(wait_time_seconds)
+ path = '/v1/{0}'.format(operation_name)
+ poll_response, http_response = self._body_and_response('get', path)
+ done = poll_response.get('done')
+ if done:
+ response = poll_response.get('response')
+ if response:
+ return response
+
+ raise exceptions.UnknownError(
+ 'Polling finished, but the operation terminated in an error.',
+ http_response=http_response)
+ raise exceptions.DeadlineExceededError('Polling deadline exceeded.')
+
+ def get_android_app_config(self, app_id):
+ return self._get_app_config(
+ platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME,
+ app_id=app_id)
+
+ def get_ios_app_config(self, app_id):
+ return self._get_app_config(
+ platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME, app_id=app_id)
+
+ def _get_app_config(self, platform_resource_name, app_id):
+ path = '/v1beta1/projects/-/{0}/{1}/config'.format(platform_resource_name, app_id)
+ response = self._make_request('get', path)
+ # In Python 2.7, the base64 module works with strings, while in Python 3, it works with
+ # bytes objects. This line works in both versions.
+ return base64.standard_b64decode(response['configFileContents']).decode(encoding='utf-8')
+
+ def get_sha_certificates(self, app_id):
+ path = '/v1beta1/projects/-/androidApps/{0}/sha'.format(app_id)
+ response = self._make_request('get', path)
+ cert_list = response.get('certificates') or []
+ return [SHACertificate(sha_hash=cert['shaHash'], name=cert['name']) for cert in cert_list]
+
+ def add_sha_certificate(self, app_id, certificate_to_add):
+ path = '/v1beta1/projects/-/androidApps/{0}/sha'.format(app_id)
+ sha_hash = _check_not_none(certificate_to_add, 'certificate_to_add').sha_hash
+ cert_type = certificate_to_add.cert_type
+ request_body = {'shaHash': sha_hash, 'certType': cert_type}
+ self._make_request('post', path, json=request_body)
+
+ def delete_sha_certificate(self, certificate_to_delete):
+ name = _check_not_none(certificate_to_delete, 'certificate_to_delete').name
+ path = '/v1beta1/{0}'.format(name)
+ self._make_request('delete', path)
+
+ def _make_request(self, method, url, json=None):
+ body, _ = self._body_and_response(method, url, json)
+ return body
+
+ def _body_and_response(self, method, url, json=None):
+ try:
+ return self._client.body_and_response(method=method, url=url, json=json)
+ except requests.exceptions.RequestException as error:
+ raise _utils.handle_platform_error_from_requests(error)
diff --git a/venv/Lib/site-packages/firebase_admin/storage.py b/venv/Lib/site-packages/firebase_admin/storage.py
new file mode 100644
index 000000000..16f48e273
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/storage.py
@@ -0,0 +1,82 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase Cloud Storage module.
+
+This module contains utilities for accessing Google Cloud Storage buckets associated with
+Firebase apps. This requires the ``google-cloud-storage`` Python module.
+"""
+
+# pylint: disable=import-error,no-name-in-module
+try:
+ from google.cloud import storage
+except ImportError:
+ raise ImportError('Failed to import the Cloud Storage library for Python. Make sure '
+ 'to install the "google-cloud-storage" module.')
+
+from firebase_admin import _utils
+
+
+_STORAGE_ATTRIBUTE = '_storage'
+
+def bucket(name=None, app=None):
+ """Returns a handle to a Google Cloud Storage bucket.
+
+ If the name argument is not provided, uses the 'storageBucket' option specified when
+ initializing the App. If that is also not available raises an error. This function
+ does not make any RPC calls.
+
+ Args:
+ name: Name of a cloud storage bucket (optional).
+ app: An App instance (optional).
+
+ Returns:
+ google.cloud.storage.Bucket: A handle to the specified bucket.
+
+ Raises:
+ ValueError: If a bucket name is not specified either via options or method arguments,
+ or if the specified bucket name is not a valid string.
+ """
+ client = _utils.get_app_service(app, _STORAGE_ATTRIBUTE, _StorageClient.from_app)
+ return client.bucket(name)
+
+
+class _StorageClient:
+ """Holds a Google Cloud Storage client instance."""
+
+ def __init__(self, credentials, project, default_bucket):
+ self._client = storage.Client(credentials=credentials, project=project)
+ self._default_bucket = default_bucket
+
+ @classmethod
+ def from_app(cls, app):
+ credentials = app.credential.get_credential()
+ default_bucket = app.options.get('storageBucket')
+ # Specifying project ID is not required, but providing it when available
+ # significantly speeds up the initialization of the storage client.
+ return _StorageClient(credentials, app.project_id, default_bucket)
+
+ def bucket(self, name=None):
+ """Returns a handle to the specified Cloud Storage Bucket."""
+ bucket_name = name if name is not None else self._default_bucket
+ if bucket_name is None:
+ raise ValueError(
+ 'Storage bucket name not specified. Specify the bucket name via the '
+ '"storageBucket" option when initializing the App, or specify the bucket '
+ 'name explicitly when calling the storage.bucket() function.')
+ if not bucket_name or not isinstance(bucket_name, str):
+ raise ValueError(
+ 'Invalid storage bucket name: "{0}". Bucket name must be a non-empty '
+ 'string.'.format(bucket_name))
+ return self._client.bucket(bucket_name)
diff --git a/venv/Lib/site-packages/firebase_admin/tenant_mgt.py b/venv/Lib/site-packages/firebase_admin/tenant_mgt.py
new file mode 100644
index 000000000..396a819fb
--- /dev/null
+++ b/venv/Lib/site-packages/firebase_admin/tenant_mgt.py
@@ -0,0 +1,445 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Firebase tenant management module.
+
+This module contains functions for creating and configuring authentication tenants within a
+Google Cloud Identity Platform (GCIP) instance.
+"""
+
+import re
+import threading
+
+import requests
+
+import firebase_admin
+from firebase_admin import auth
+from firebase_admin import _auth_utils
+from firebase_admin import _http_client
+from firebase_admin import _utils
+
+
+_TENANT_MGT_ATTRIBUTE = '_tenant_mgt'
+_MAX_LIST_TENANTS_RESULTS = 100
+_DISPLAY_NAME_PATTERN = re.compile('^[a-zA-Z][a-zA-Z0-9-]{3,19}$')
+
+
+__all__ = [
+ 'ListTenantsPage',
+ 'Tenant',
+ 'TenantIdMismatchError',
+ 'TenantNotFoundError',
+
+ 'auth_for_tenant',
+ 'create_tenant',
+ 'delete_tenant',
+ 'get_tenant',
+ 'list_tenants',
+ 'update_tenant',
+]
+
+
+TenantIdMismatchError = _auth_utils.TenantIdMismatchError
+TenantNotFoundError = _auth_utils.TenantNotFoundError
+
+
+def auth_for_tenant(tenant_id, app=None):
+ """Gets an Auth Client instance scoped to the given tenant ID.
+
+ Args:
+ tenant_id: A tenant ID string.
+ app: An App instance (optional).
+
+ Returns:
+ auth.Client: An ``auth.Client`` object.
+
+ Raises:
+ ValueError: If the tenant ID is None, empty or not a string.
+ """
+ tenant_mgt_service = _get_tenant_mgt_service(app)
+ return tenant_mgt_service.auth_for_tenant(tenant_id)
+
+
+def get_tenant(tenant_id, app=None):
+ """Gets the tenant corresponding to the given ``tenant_id``.
+
+ Args:
+ tenant_id: A tenant ID string.
+ app: An App instance (optional).
+
+ Returns:
+ Tenant: A tenant object.
+
+ Raises:
+ ValueError: If the tenant ID is None, empty or not a string.
+ TenantNotFoundError: If no tenant exists by the given ID.
+ FirebaseError: If an error occurs while retrieving the tenant.
+ """
+ tenant_mgt_service = _get_tenant_mgt_service(app)
+ return tenant_mgt_service.get_tenant(tenant_id)
+
+
+def create_tenant(
+ display_name, allow_password_sign_up=None, enable_email_link_sign_in=None, app=None):
+ """Creates a new tenant from the given options.
+
+ Args:
+ display_name: Display name string for the new tenant. Must begin with a letter and contain
+ only letters, digits and hyphens. Length must be between 4 and 20.
+ allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in
+ provider (optional).
+ enable_email_link_sign_in: A boolean indicating whether to enable or disable email link
+ sign-in (optional). Disabling this makes the password required for email sign-in.
+ app: An App instance (optional).
+
+ Returns:
+ Tenant: A tenant object.
+
+ Raises:
+ ValueError: If any of the given arguments are invalid.
+ FirebaseError: If an error occurs while creating the tenant.
+ """
+ tenant_mgt_service = _get_tenant_mgt_service(app)
+ return tenant_mgt_service.create_tenant(
+ display_name=display_name, allow_password_sign_up=allow_password_sign_up,
+ enable_email_link_sign_in=enable_email_link_sign_in)
+
+
+def update_tenant(
+ tenant_id, display_name=None, allow_password_sign_up=None, enable_email_link_sign_in=None,
+ app=None):
+ """Updates an existing tenant with the given options.
+
+ Args:
+ tenant_id: ID of the tenant to update.
+ display_name: Updated display name string for the tenant (optional).
+ allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in
+ provider.
+ enable_email_link_sign_in: A boolean indicating whether to enable or disable email link
+ sign-in. Disabling this makes the password required for email sign-in.
+ app: An App instance (optional).
+
+ Returns:
+ Tenant: The updated tenant object.
+
+ Raises:
+ ValueError: If any of the given arguments are invalid.
+ TenantNotFoundError: If no tenant exists by the given ID.
+ FirebaseError: If an error occurs while creating the tenant.
+ """
+ tenant_mgt_service = _get_tenant_mgt_service(app)
+ return tenant_mgt_service.update_tenant(
+ tenant_id, display_name=display_name, allow_password_sign_up=allow_password_sign_up,
+ enable_email_link_sign_in=enable_email_link_sign_in)
+
+
+def delete_tenant(tenant_id, app=None):
+ """Deletes the tenant corresponding to the given ``tenant_id``.
+
+ Args:
+ tenant_id: A tenant ID string.
+ app: An App instance (optional).
+
+ Raises:
+ ValueError: If the tenant ID is None, empty or not a string.
+ TenantNotFoundError: If no tenant exists by the given ID.
+ FirebaseError: If an error occurs while retrieving the tenant.
+ """
+ tenant_mgt_service = _get_tenant_mgt_service(app)
+ tenant_mgt_service.delete_tenant(tenant_id)
+
+
+def list_tenants(page_token=None, max_results=_MAX_LIST_TENANTS_RESULTS, app=None):
+ """Retrieves a page of tenants from a Firebase project.
+
+ The ``page_token`` argument governs the starting point of the page. The ``max_results``
+ argument governs the maximum number of tenants that may be included in the returned page.
+ This function never returns None. If there are no user accounts in the Firebase project, this
+ returns an empty page.
+
+ Args:
+ page_token: A non-empty page token string, which indicates the starting point of the page
+ (optional). Defaults to ``None``, which will retrieve the first page of users.
+ max_results: A positive integer indicating the maximum number of users to include in the
+ returned page (optional). Defaults to 100, which is also the maximum number allowed.
+ app: An App instance (optional).
+
+ Returns:
+ ListTenantsPage: A page of tenants.
+
+ Raises:
+ ValueError: If ``max_results`` or ``page_token`` are invalid.
+ FirebaseError: If an error occurs while retrieving the user accounts.
+ """
+ tenant_mgt_service = _get_tenant_mgt_service(app)
+ def download(page_token, max_results):
+ return tenant_mgt_service.list_tenants(page_token, max_results)
+ return ListTenantsPage(download, page_token, max_results)
+
+
+def _get_tenant_mgt_service(app):
+ return _utils.get_app_service(app, _TENANT_MGT_ATTRIBUTE, _TenantManagementService)
+
+
+class Tenant:
+ """Represents a tenant in a multi-tenant application.
+
+ Multi-tenancy support requires Google Cloud Identity Platform (GCIP). To learn more about
+ GCIP including pricing and features, see https://cloud.google.com/identity-platform.
+
+ Before multi-tenancy can be used in a Google Cloud Identity Platform project, tenants must be
+ enabled in that project via the Cloud Console UI. A Tenant instance provides information
+ such as the display name, tenant identifier and email authentication configuration.
+ """
+
+ def __init__(self, data):
+ if not isinstance(data, dict):
+ raise ValueError('Invalid data argument in Tenant constructor: {0}'.format(data))
+ if not 'name' in data:
+ raise ValueError('Tenant response missing required keys.')
+
+ self._data = data
+
+ @property
+ def tenant_id(self):
+ name = self._data['name']
+ return name.split('/')[-1]
+
+ @property
+ def display_name(self):
+ return self._data.get('displayName')
+
+ @property
+ def allow_password_sign_up(self):
+ return self._data.get('allowPasswordSignup', False)
+
+ @property
+ def enable_email_link_sign_in(self):
+ return self._data.get('enableEmailLinkSignin', False)
+
+
+class _TenantManagementService:
+ """Firebase tenant management service."""
+
+ TENANT_MGT_URL = 'https://identitytoolkit.googleapis.com/v2beta1'
+
+ def __init__(self, app):
+ credential = app.credential.get_credential()
+ version_header = 'Python/Admin/{0}'.format(firebase_admin.__version__)
+ base_url = '{0}/projects/{1}'.format(self.TENANT_MGT_URL, app.project_id)
+ self.app = app
+ self.client = _http_client.JsonHttpClient(
+ credential=credential, base_url=base_url, headers={'X-Client-Version': version_header})
+ self.tenant_clients = {}
+ self.lock = threading.RLock()
+
+ def auth_for_tenant(self, tenant_id):
+ """Gets an Auth Client instance scoped to the given tenant ID."""
+ if not isinstance(tenant_id, str) or not tenant_id:
+ raise ValueError(
+ 'Invalid tenant ID: {0}. Tenant ID must be a non-empty string.'.format(tenant_id))
+
+ with self.lock:
+ if tenant_id in self.tenant_clients:
+ return self.tenant_clients[tenant_id]
+
+ client = auth.Client(self.app, tenant_id=tenant_id)
+ self.tenant_clients[tenant_id] = client
+ return client
+
+ def get_tenant(self, tenant_id):
+ """Gets the tenant corresponding to the given ``tenant_id``."""
+ if not isinstance(tenant_id, str) or not tenant_id:
+ raise ValueError(
+ 'Invalid tenant ID: {0}. Tenant ID must be a non-empty string.'.format(tenant_id))
+
+ try:
+ body = self.client.body('get', '/tenants/{0}'.format(tenant_id))
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+ else:
+ return Tenant(body)
+
+ def create_tenant(
+ self, display_name, allow_password_sign_up=None, enable_email_link_sign_in=None):
+ """Creates a new tenant from the given parameters."""
+
+ payload = {'displayName': _validate_display_name(display_name)}
+ if allow_password_sign_up is not None:
+ payload['allowPasswordSignup'] = _auth_utils.validate_boolean(
+ allow_password_sign_up, 'allowPasswordSignup')
+ if enable_email_link_sign_in is not None:
+ payload['enableEmailLinkSignin'] = _auth_utils.validate_boolean(
+ enable_email_link_sign_in, 'enableEmailLinkSignin')
+
+ try:
+ body = self.client.body('post', '/tenants', json=payload)
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+ else:
+ return Tenant(body)
+
+ def update_tenant(
+ self, tenant_id, display_name=None, allow_password_sign_up=None,
+ enable_email_link_sign_in=None):
+ """Updates the specified tenant with the given parameters."""
+ if not isinstance(tenant_id, str) or not tenant_id:
+ raise ValueError('Tenant ID must be a non-empty string.')
+
+ payload = {}
+ if display_name is not None:
+ payload['displayName'] = _validate_display_name(display_name)
+ if allow_password_sign_up is not None:
+ payload['allowPasswordSignup'] = _auth_utils.validate_boolean(
+ allow_password_sign_up, 'allowPasswordSignup')
+ if enable_email_link_sign_in is not None:
+ payload['enableEmailLinkSignin'] = _auth_utils.validate_boolean(
+ enable_email_link_sign_in, 'enableEmailLinkSignin')
+
+ if not payload:
+ raise ValueError('At least one parameter must be specified for update.')
+
+ url = '/tenants/{0}'.format(tenant_id)
+ update_mask = ','.join(_auth_utils.build_update_mask(payload))
+ params = 'updateMask={0}'.format(update_mask)
+ try:
+ body = self.client.body('patch', url, json=payload, params=params)
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+ else:
+ return Tenant(body)
+
+ def delete_tenant(self, tenant_id):
+ """Deletes the tenant corresponding to the given ``tenant_id``."""
+ if not isinstance(tenant_id, str) or not tenant_id:
+ raise ValueError(
+ 'Invalid tenant ID: {0}. Tenant ID must be a non-empty string.'.format(tenant_id))
+
+ try:
+ self.client.request('delete', '/tenants/{0}'.format(tenant_id))
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+
+ def list_tenants(self, page_token=None, max_results=_MAX_LIST_TENANTS_RESULTS):
+ """Retrieves a batch of tenants."""
+ if page_token is not None:
+ if not isinstance(page_token, str) or not page_token:
+ raise ValueError('Page token must be a non-empty string.')
+ if not isinstance(max_results, int):
+ raise ValueError('Max results must be an integer.')
+ if max_results < 1 or max_results > _MAX_LIST_TENANTS_RESULTS:
+ raise ValueError(
+ 'Max results must be a positive integer less than or equal to '
+ '{0}.'.format(_MAX_LIST_TENANTS_RESULTS))
+
+ payload = {'pageSize': max_results}
+ if page_token:
+ payload['pageToken'] = page_token
+ try:
+ return self.client.body('get', '/tenants', params=payload)
+ except requests.exceptions.RequestException as error:
+ raise _auth_utils.handle_auth_backend_error(error)
+
+
+class ListTenantsPage:
+ """Represents a page of tenants fetched from a Firebase project.
+
+ Provides methods for traversing tenants included in this page, as well as retrieving
+ subsequent pages of tenants. The iterator returned by ``iterate_all()`` can be used to iterate
+ through all tenants in the Firebase project starting from this page.
+ """
+
+ def __init__(self, download, page_token, max_results):
+ self._download = download
+ self._max_results = max_results
+ self._current = download(page_token, max_results)
+
+ @property
+ def tenants(self):
+ """A list of ``ExportedUserRecord`` instances available in this page."""
+ return [Tenant(data) for data in self._current.get('tenants', [])]
+
+ @property
+ def next_page_token(self):
+ """Page token string for the next page (empty string indicates no more pages)."""
+ return self._current.get('nextPageToken', '')
+
+ @property
+ def has_next_page(self):
+ """A boolean indicating whether more pages are available."""
+ return bool(self.next_page_token)
+
+ def get_next_page(self):
+ """Retrieves the next page of tenants, if available.
+
+ Returns:
+ ListTenantsPage: Next page of tenants, or None if this is the last page.
+ """
+ if self.has_next_page:
+ return ListTenantsPage(self._download, self.next_page_token, self._max_results)
+ return None
+
+ def iterate_all(self):
+ """Retrieves an iterator for tenants.
+
+ Returned iterator will iterate through all the tenants in the Firebase project
+ starting from this page. The iterator will never buffer more than one page of tenants
+ in memory at a time.
+
+ Returns:
+ iterator: An iterator of Tenant instances.
+ """
+ return _TenantIterator(self)
+
+
+class _TenantIterator:
+ """An iterator that allows iterating over tenants.
+
+ This implementation loads a page of tenants into memory, and iterates on them. When the whole
+ page has been traversed, it loads another page. This class never keeps more than one page
+ of entries in memory.
+ """
+
+ def __init__(self, current_page):
+ if not current_page:
+ raise ValueError('Current page must not be None.')
+ self._current_page = current_page
+ self._index = 0
+
+ def next(self):
+ if self._index == len(self._current_page.tenants):
+ if self._current_page.has_next_page:
+ self._current_page = self._current_page.get_next_page()
+ self._index = 0
+ if self._index < len(self._current_page.tenants):
+ result = self._current_page.tenants[self._index]
+ self._index += 1
+ return result
+ raise StopIteration
+
+ def __next__(self):
+ return self.next()
+
+ def __iter__(self):
+ return self
+
+
+def _validate_display_name(display_name):
+ if not isinstance(display_name, str):
+ raise ValueError('Invalid type for displayName')
+ if not _DISPLAY_NAME_PATTERN.search(display_name):
+ raise ValueError(
+ 'displayName must start with a letter and only consist of letters, digits and '
+ 'hyphens with 4-20 characters.')
+ return display_name
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/__init__.py b/venv/Lib/site-packages/google/_async_resumable_media/__init__.py
new file mode 100644
index 000000000..8c3da244e
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/__init__.py
@@ -0,0 +1,61 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for Google Media Downloads and Resumable Uploads.
+
+This package has some general purposes modules, e.g.
+:mod:`~google.resumable_media.common`, but the majority of the
+public interface will be contained in subpackages.
+
+===========
+Subpackages
+===========
+
+Each subpackage is tailored to a specific transport library:
+
+* the :mod:`~google.resumable_media.requests` subpackage uses the ``requests``
+ transport library.
+
+.. _requests: http://docs.python-requests.org/
+
+==========
+Installing
+==========
+
+To install with `pip`_:
+
+.. code-block:: console
+
+ $ pip install --upgrade google-resumable-media
+
+.. _pip: https://pip.pypa.io/
+"""
+
+
+from google.resumable_media.common import DataCorruption
+from google.resumable_media.common import InvalidResponse
+from google.resumable_media.common import PERMANENT_REDIRECT
+from google.resumable_media.common import RetryStrategy
+from google.resumable_media.common import TOO_MANY_REQUESTS
+from google.resumable_media.common import UPLOAD_CHUNK_SIZE
+
+
+__all__ = [
+ u"DataCorruption",
+ u"InvalidResponse",
+ u"PERMANENT_REDIRECT",
+ u"RetryStrategy",
+ u"TOO_MANY_REQUESTS",
+ u"UPLOAD_CHUNK_SIZE",
+]
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..6338182b1
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_download.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_download.cpython-36.pyc
new file mode 100644
index 000000000..67e18add2
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_download.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..76dec292a
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_upload.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_upload.cpython-36.pyc
new file mode 100644
index 000000000..ba8290cb2
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/__pycache__/_upload.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/_download.py b/venv/Lib/site-packages/google/_async_resumable_media/_download.py
new file mode 100644
index 000000000..0c9e61ef3
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/_download.py
@@ -0,0 +1,553 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Virtual bases classes for downloading media from Google APIs."""
+
+
+import re
+
+from six.moves import http_client
+
+
+from google._async_resumable_media import _helpers
+from google.resumable_media import common
+
+
+_CONTENT_RANGE_RE = re.compile(
+ r"bytes (?P\d+)-(?P\d+)/(?P\d+)",
+ flags=re.IGNORECASE,
+)
+_ACCEPTABLE_STATUS_CODES = (http_client.OK, http_client.PARTIAL_CONTENT)
+_GET = u"GET"
+_ZERO_CONTENT_RANGE_HEADER = u"bytes */0"
+
+
+class DownloadBase(object):
+ """Base class for download helpers.
+
+ Defines core shared behavior across different download types.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded.
+ end (int): The last byte in a range to be downloaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ """
+
+ def __init__(self, media_url, stream=None, start=None, end=None, headers=None):
+ self.media_url = media_url
+ self._stream = stream
+ self.start = start
+ self.end = end
+ if headers is None:
+ headers = {}
+ self._headers = headers
+ self._finished = False
+ self._retry_strategy = common.RetryStrategy()
+
+ @property
+ def finished(self):
+ """bool: Flag indicating if the download has completed."""
+ return self._finished
+
+ @staticmethod
+ def _get_status_code(response):
+ """Access the status code from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_headers(response):
+ """Access the headers from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class Download(DownloadBase):
+ """Helper to manage downloading a resource from a Google API.
+
+ "Slices" of the resource can be retrieved by specifying a range
+ with ``start`` and / or ``end``. However, in typical usage, neither
+ ``start`` nor ``end`` is expected to be provided.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, but ``end`` is provided, will download from the
+ beginning to ``end`` of the media.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, but ``start`` is provided, will download from the
+ ``start`` to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The response headers must contain
+ a checksum of the requested type. If the headers lack an
+ appropriate checksum (for instance in the case of transcoded or
+ ranged downloads where the remote service does not know the
+ correct checksum) an INFO-level log will be emitted. Supported
+ values are "md5", "crc32c" and None.
+ """
+
+ def __init__(
+ self, media_url, stream=None, start=None, end=None, headers=None, checksum="md5"
+ ):
+ super(Download, self).__init__(
+ media_url, stream=stream, start=start, end=end, headers=headers
+ )
+ self.checksum = checksum
+
+ def _prepare_request(self):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Returns:
+ Tuple[str, str, NoneType, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always GET)
+ * the URL for the request
+ * the body of the request (always :data:`None`)
+ * headers for the request
+
+ Raises:
+ ValueError: If the current :class:`Download` has already
+ finished.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"A download can only be used once.")
+
+ add_bytes_range(self.start, self.end, self._headers)
+ return _GET, self.media_url, None, self._headers
+
+ def _process_response(self, response):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ # Tombstone the current Download so it cannot be used again.
+ self._finished = True
+ _helpers.require_status_code(
+ response, _ACCEPTABLE_STATUS_CODES, self._get_status_code
+ )
+
+ def consume(self, transport, timeout=None):
+ """Consume the resource to be downloaded.
+
+ If a ``stream`` is attached to this download, then the downloaded
+ resource will be written to the stream.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class ChunkedDownload(DownloadBase):
+ """Download a resource in chunks from a Google API.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each
+ request.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ will be used to concatenate chunks of the resource as they are
+ downloaded.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, defaults to ``0``.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, will download to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with each request, e.g. headers for data encryption
+ key headers.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each request.
+
+ Raises:
+ ValueError: If ``start`` is negative.
+ """
+
+ def __init__(self, media_url, chunk_size, stream, start=0, end=None, headers=None):
+ if start < 0:
+ raise ValueError(
+ u"On a chunked download the starting " u"value cannot be negative."
+ )
+ super(ChunkedDownload, self).__init__(
+ media_url, stream=stream, start=start, end=end, headers=headers
+ )
+ self.chunk_size = chunk_size
+ self._bytes_downloaded = 0
+ self._total_bytes = None
+ self._invalid = False
+
+ @property
+ def bytes_downloaded(self):
+ """int: Number of bytes that have been downloaded."""
+ return self._bytes_downloaded
+
+ @property
+ def total_bytes(self):
+ """Optional[int]: The total number of bytes to be downloaded."""
+ return self._total_bytes
+
+ @property
+ def invalid(self):
+ """bool: Indicates if the download is in an invalid state.
+
+ This will occur if a call to :meth:`consume_next_chunk` fails.
+ """
+ return self._invalid
+
+ def _get_byte_range(self):
+ """Determines the byte range for the next request.
+
+ Returns:
+ Tuple[int, int]: The pair of begin and end byte for the next
+ chunked request.
+ """
+ curr_start = self.start + self.bytes_downloaded
+ curr_end = curr_start + self.chunk_size - 1
+ # Make sure ``curr_end`` does not exceed ``end``.
+ if self.end is not None:
+ curr_end = min(curr_end, self.end)
+ # Make sure ``curr_end`` does not exceed ``total_bytes - 1``.
+ if self.total_bytes is not None:
+ curr_end = min(curr_end, self.total_bytes - 1)
+ return curr_start, curr_end
+
+ def _prepare_request(self):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ .. note:
+
+ This method will be used multiple times, so ``headers`` will
+ be mutated in between requests. However, we don't make a copy
+ since the same keys are being updated.
+
+ Returns:
+ Tuple[str, str, NoneType, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always GET)
+ * the URL for the request
+ * the body of the request (always :data:`None`)
+ * headers for the request
+
+ Raises:
+ ValueError: If the current download has finished.
+ ValueError: If the current download is invalid.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"Download has finished.")
+ if self.invalid:
+ raise ValueError(u"Download is invalid and cannot be re-used.")
+
+ curr_start, curr_end = self._get_byte_range()
+ add_bytes_range(curr_start, curr_end, self._headers)
+ return _GET, self.media_url, None, self._headers
+
+ def _make_invalid(self):
+ """Simple setter for ``invalid``.
+
+ This is intended to be passed along as a callback to helpers that
+ raise an exception so they can mark this instance as invalid before
+ raising.
+ """
+ self._invalid = True
+
+ async def _process_response(self, response):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O. This is based on the `sans-I/O`_ philosophy.
+
+ For the time being, this **does require** some form of I/O to write
+ a chunk to ``stream``. However, this will (almost) certainly not be
+ network I/O.
+
+ Updates the current state after consuming a chunk. First,
+ increments ``bytes_downloaded`` by the number of bytes in the
+ ``content-length`` header.
+
+ If ``total_bytes`` is already set, this assumes (but does not check)
+ that we already have the correct value and doesn't bother to check
+ that it agrees with the headers.
+
+ We expect the **total** length to be in the ``content-range`` header,
+ but this header is only present on requests which sent the ``range``
+ header. This response header should be of the form
+ ``bytes {start}-{end}/{total}`` and ``{end} - {start} + 1``
+ should be the same as the ``Content-Length``.
+
+ Args:
+ response (object): The HTTP response object (need headers).
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the number
+ of bytes in the body doesn't match the content length header.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ # Verify the response before updating the current instance.
+ if _check_for_zero_content_range(
+ response, self._get_status_code, self._get_headers
+ ):
+ self._finished = True
+ return
+
+ _helpers.require_status_code(
+ response,
+ _ACCEPTABLE_STATUS_CODES,
+ self._get_status_code,
+ callback=self._make_invalid,
+ )
+ headers = self._get_headers(response)
+ response_body = await self._get_body(response)
+
+ start_byte, end_byte, total_bytes = get_range_info(
+ response, self._get_headers, callback=self._make_invalid
+ )
+
+ transfer_encoding = headers.get(u"transfer-encoding")
+
+ if transfer_encoding is None:
+ content_length = _helpers.header_required(
+ response,
+ u"content-length",
+ self._get_headers,
+ callback=self._make_invalid,
+ )
+ num_bytes = int(content_length)
+
+ if len(response_body) != num_bytes:
+ self._make_invalid()
+ raise common.InvalidResponse(
+ response,
+ u"Response is different size than content-length",
+ u"Expected",
+ num_bytes,
+ u"Received",
+ len(response_body),
+ )
+ else:
+ # 'content-length' header not allowed with chunked encoding.
+ num_bytes = end_byte - start_byte + 1
+
+ # First update ``bytes_downloaded``.
+ self._bytes_downloaded += num_bytes
+ # If the end byte is past ``end`` or ``total_bytes - 1`` we are done.
+ if self.end is not None and end_byte >= self.end:
+ self._finished = True
+ elif end_byte >= total_bytes - 1:
+ self._finished = True
+ # NOTE: We only use ``total_bytes`` if not already known.
+ if self.total_bytes is None:
+ self._total_bytes = total_bytes
+ # Write the response body to the stream.
+ self._stream.write(response_body)
+
+ def consume_next_chunk(self, transport, timeout=None):
+ """Consume the next chunk of the resource to be downloaded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+def add_bytes_range(start, end, headers):
+ """Add a bytes range to a header dictionary.
+
+ Some possible inputs and the corresponding bytes ranges::
+
+ >>> headers = {}
+ >>> add_bytes_range(None, None, headers)
+ >>> headers
+ {}
+ >>> add_bytes_range(500, 999, headers)
+ >>> headers['range']
+ 'bytes=500-999'
+ >>> add_bytes_range(None, 499, headers)
+ >>> headers['range']
+ 'bytes=0-499'
+ >>> add_bytes_range(-500, None, headers)
+ >>> headers['range']
+ 'bytes=-500'
+ >>> add_bytes_range(9500, None, headers)
+ >>> headers['range']
+ 'bytes=9500-'
+
+ Args:
+ start (Optional[int]): The first byte in a range. Can be zero,
+ positive, negative or :data:`None`.
+ end (Optional[int]): The last byte in a range. Assumed to be
+ positive.
+ headers (Mapping[str, str]): A headers mapping which can have the
+ bytes range added if at least one of ``start`` or ``end``
+ is not :data:`None`.
+ """
+ if start is None:
+ if end is None:
+ # No range to add.
+ return
+ else:
+ # NOTE: This assumes ``end`` is non-negative.
+ bytes_range = u"0-{:d}".format(end)
+ else:
+ if end is None:
+ if start < 0:
+ bytes_range = u"{:d}".format(start)
+ else:
+ bytes_range = u"{:d}-".format(start)
+ else:
+ # NOTE: This is invalid if ``start < 0``.
+ bytes_range = u"{:d}-{:d}".format(start, end)
+
+ headers[_helpers.RANGE_HEADER] = u"bytes=" + bytes_range
+
+
+def get_range_info(response, get_headers, callback=_helpers.do_nothing):
+ """Get the start, end and total bytes from a content range header.
+
+ Args:
+ response (object): An HTTP response object.
+ get_headers (Callable[Any, Mapping[str, str]]): Helper to get headers
+ from an HTTP response.
+ callback (Optional[Callable]): A callback that takes no arguments,
+ to be executed when an exception is being raised.
+
+ Returns:
+ Tuple[int, int, int]: The start byte, end byte and total bytes.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the
+ ``Content-Range`` header is not of the form
+ ``bytes {start}-{end}/{total}``.
+ """
+ content_range = _helpers.header_required(
+ response, _helpers.CONTENT_RANGE_HEADER, get_headers, callback=callback
+ )
+ match = _CONTENT_RANGE_RE.match(content_range)
+ if match is None:
+ callback()
+ raise common.InvalidResponse(
+ response,
+ u"Unexpected content-range header",
+ content_range,
+ u'Expected to be of the form "bytes {start}-{end}/{total}"',
+ )
+
+ return (
+ int(match.group(u"start_byte")),
+ int(match.group(u"end_byte")),
+ int(match.group(u"total_bytes")),
+ )
+
+
+def _check_for_zero_content_range(response, get_status_code, get_headers):
+ """Validate if response status code is 416 and content range is zero.
+
+ This is the special case for handling zero bytes files.
+
+ Args:
+ response (object): An HTTP response object.
+ get_status_code (Callable[Any, int]): Helper to get a status code
+ from a response.
+ get_headers (Callable[Any, Mapping[str, str]]): Helper to get headers
+ from an HTTP response.
+
+ Returns:
+ bool: True if content range total bytes is zero, false otherwise.
+ """
+ if get_status_code(response) == http_client.REQUESTED_RANGE_NOT_SATISFIABLE:
+ content_range = _helpers.header_required(
+ response,
+ _helpers.CONTENT_RANGE_HEADER,
+ get_headers,
+ callback=_helpers.do_nothing,
+ )
+ if content_range == _ZERO_CONTENT_RANGE_HEADER:
+ return True
+ return False
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/_helpers.py b/venv/Lib/site-packages/google/_async_resumable_media/_helpers.py
new file mode 100644
index 000000000..65673b482
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/_helpers.py
@@ -0,0 +1,198 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared utilities used by both downloads and uploads."""
+
+import logging
+import random
+import time
+
+
+from six.moves import http_client
+
+
+from google.resumable_media import common
+
+
+RANGE_HEADER = u"range"
+CONTENT_RANGE_HEADER = u"content-range"
+RETRYABLE = (
+ common.TOO_MANY_REQUESTS,
+ http_client.INTERNAL_SERVER_ERROR,
+ http_client.BAD_GATEWAY,
+ http_client.SERVICE_UNAVAILABLE,
+ http_client.GATEWAY_TIMEOUT,
+)
+
+_SLOW_CRC32C_WARNING = (
+ "Currently using crcmod in pure python form. This is a slow "
+ "implementation. Python 3 has a faster implementation, `google-crc32c`, "
+ "which will be used if it is installed."
+)
+_HASH_HEADER = u"x-goog-hash"
+_MISSING_CHECKSUM = u"""\
+No {checksum_type} checksum was returned from the service while downloading {}
+(which happens for composite objects), so client-side content integrity
+checking is not being performed."""
+_LOGGER = logging.getLogger(__name__)
+
+
+def do_nothing():
+ """Simple default callback."""
+
+
+def header_required(response, name, get_headers, callback=do_nothing):
+ """Checks that a specific header is in a headers dictionary.
+
+ Args:
+ response (object): An HTTP response object, expected to have a
+ ``headers`` attribute that is a ``Mapping[str, str]``.
+ name (str): The name of a required header.
+ get_headers (Callable[Any, Mapping[str, str]]): Helper to get headers
+ from an HTTP response.
+ callback (Optional[Callable]): A callback that takes no arguments,
+ to be executed when an exception is being raised.
+
+ Returns:
+ str: The desired header.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the header
+ is missing.
+ """
+ headers = get_headers(response)
+ if name not in headers:
+ callback()
+ raise common.InvalidResponse(
+ response, u"Response headers must contain header", name
+ )
+
+ return headers[name]
+
+
+def require_status_code(response, status_codes, get_status_code, callback=do_nothing):
+ """Require a response has a status code among a list.
+
+ Args:
+ response (object): The HTTP response object.
+ status_codes (tuple): The acceptable status codes.
+ get_status_code (Callable[Any, int]): Helper to get a status code
+ from a response.
+ callback (Optional[Callable]): A callback that takes no arguments,
+ to be executed when an exception is being raised.
+
+ Returns:
+ int: The status code.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status code
+ is not one of the values in ``status_codes``.
+ """
+ status_code = get_status_code(response)
+ if status_code not in status_codes:
+ callback()
+ raise common.InvalidResponse(
+ response,
+ u"Request failed with status code",
+ status_code,
+ u"Expected one of",
+ *status_codes
+ )
+ return status_code
+
+
+def calculate_retry_wait(base_wait, max_sleep):
+ """Calculate the amount of time to wait before a retry attempt.
+
+ Wait time grows exponentially with the number of attempts, until
+ it hits ``max_sleep``.
+
+ A random amount of jitter (between 0 and 1 seconds) is added to spread out
+ retry attempts from different clients.
+
+ Args:
+ base_wait (float): The "base" wait time (i.e. without any jitter)
+ that will be doubled until it reaches the maximum sleep.
+ max_sleep (float): Maximum value that a sleep time is allowed to be.
+
+ Returns:
+ Tuple[float, float]: The new base wait time as well as the wait time
+ to be applied (with a random amount of jitter between 0 and 1 seconds
+ added).
+ """
+ new_base_wait = 2.0 * base_wait
+ if new_base_wait > max_sleep:
+ new_base_wait = max_sleep
+
+ jitter_ms = random.randint(0, 1000)
+ return new_base_wait, new_base_wait + 0.001 * jitter_ms
+
+
+async def wait_and_retry(func, get_status_code, retry_strategy):
+ """Attempts to retry a call to ``func`` until success.
+
+ Expects ``func`` to return an HTTP response and uses ``get_status_code``
+ to check if the response is retry-able.
+
+ Will retry until :meth:`~.RetryStrategy.retry_allowed` (on the current
+ ``retry_strategy``) returns :data:`False`. Uses
+ :func:`calculate_retry_wait` to double the wait time (with jitter) after
+ each attempt.
+
+ Args:
+ func (Callable): A callable that takes no arguments and produces
+ an HTTP response which will be checked as retry-able.
+ get_status_code (Callable[Any, int]): Helper to get a status code
+ from a response.
+ retry_strategy (~google.resumable_media.common.RetryStrategy): The
+ strategy to use if the request fails and must be retried.
+
+ Returns:
+ object: The return value of ``func``.
+ """
+
+ response = await func()
+
+ if get_status_code(response) not in RETRYABLE:
+ return response
+
+ total_sleep = 0.0
+ num_retries = 0
+ base_wait = 0.5 # When doubled will give 1.0
+ while retry_strategy.retry_allowed(total_sleep, num_retries):
+ base_wait, wait_time = calculate_retry_wait(base_wait, retry_strategy.max_sleep)
+ num_retries += 1
+ total_sleep += wait_time
+ time.sleep(wait_time)
+ response = await func()
+ if get_status_code(response) not in RETRYABLE:
+ return response
+
+ return response
+
+
+class _DoNothingHash(object):
+ """Do-nothing hash object.
+
+ Intended as a stand-in for ``hashlib.md5`` or a crc32c checksum
+ implementation in cases where it isn't necessary to compute the hash.
+ """
+
+ def update(self, unused_chunk):
+ """Do-nothing ``update`` method.
+
+ Intended to match the interface of ``hashlib.md5`` and other checksums.
+ Args:
+ unused_chunk (bytes): A chunk of data.
+ """
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/_upload.py b/venv/Lib/site-packages/google/_async_resumable_media/_upload.py
new file mode 100644
index 000000000..f95d91f4d
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/_upload.py
@@ -0,0 +1,979 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Virtual bases classes for uploading media via Google APIs.
+
+Supported here are:
+
+* simple (media) uploads
+* multipart uploads that contain both metadata and a small file as payload
+* resumable uploads (with metadata as well)
+"""
+
+
+import json
+import os
+import random
+import sys
+
+import six
+from six.moves import http_client
+
+from google import _async_resumable_media
+from google._async_resumable_media import _helpers
+from google.resumable_media import _helpers as sync_helpers
+from google.resumable_media import _upload as sync_upload
+from google.resumable_media import common
+
+
+from google.resumable_media._upload import (
+ _CONTENT_TYPE_HEADER,
+ _CONTENT_RANGE_TEMPLATE,
+ _RANGE_UNKNOWN_TEMPLATE,
+ _EMPTY_RANGE_TEMPLATE,
+ _BOUNDARY_FORMAT,
+ _MULTIPART_SEP,
+ _CRLF,
+ _MULTIPART_BEGIN,
+ _RELATED_HEADER,
+ _BYTES_RANGE_RE,
+ _STREAM_ERROR_TEMPLATE,
+ _POST,
+ _PUT,
+ _UPLOAD_CHECKSUM_MISMATCH_MESSAGE,
+ _UPLOAD_METADATA_NO_APPROPRIATE_CHECKSUM_MESSAGE,
+)
+
+
+class UploadBase(object):
+ """Base class for upload helpers.
+
+ Defines core shared behavior across different upload types.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def __init__(self, upload_url, headers=None):
+ self.upload_url = upload_url
+ if headers is None:
+ headers = {}
+ self._headers = headers
+ self._finished = False
+ self._retry_strategy = common.RetryStrategy()
+
+ @property
+ def finished(self):
+ """bool: Flag indicating if the upload has completed."""
+ return self._finished
+
+ def _process_response(self, response):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 200.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ # Tombstone the current upload so it cannot be used again (in either
+ # failure or success).
+ self._finished = True
+ _helpers.require_status_code(response, (http_client.OK,), self._get_status_code)
+
+ @staticmethod
+ def _get_status_code(response):
+ """Access the status code from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_headers(response):
+ """Access the headers from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class SimpleUpload(UploadBase):
+ """Upload a resource to a Google API.
+
+ A **simple** media upload sends no metadata and completes the upload
+ in a single request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def _prepare_request(self, data, content_type):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ .. note:
+
+ This method will be used only once, so ``headers`` will be
+ mutated by having a new key added to it.
+
+ Args:
+ data (bytes): The resource content to be uploaded.
+ content_type (str): The content type for the request.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always POST)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ Raises:
+ ValueError: If the current upload has already finished.
+ TypeError: If ``data`` isn't bytes.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"An upload can only be used once.")
+
+ if not isinstance(data, six.binary_type):
+ raise TypeError(u"`data` must be bytes, received", type(data))
+ self._headers[_CONTENT_TYPE_HEADER] = content_type
+ return _POST, self.upload_url, data, self._headers
+
+ def transmit(self, transport, data, content_type, timeout=None):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ data (bytes): The resource content to be uploaded.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class MultipartUpload(UploadBase):
+ """Upload a resource with metadata to a Google API.
+
+ A **multipart** upload sends both metadata and the resource in a single
+ (multipart) request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The request metadata will be amended
+ to include the computed value. Using this option will override a
+ manually-set checksum value. Supported values are "md5", "crc32c"
+ and None. The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def __init__(self, upload_url, headers=None, checksum=None):
+ super(MultipartUpload, self).__init__(upload_url, headers=headers)
+ self._checksum_type = checksum
+
+ def _prepare_request(self, data, metadata, content_type):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ .. note:
+
+ This method will be used only once, so ``headers`` will be
+ mutated by having a new key added to it.
+
+ Args:
+ data (bytes): The resource content to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always POST)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ Raises:
+ ValueError: If the current upload has already finished.
+ TypeError: If ``data`` isn't bytes.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"An upload can only be used once.")
+
+ if not isinstance(data, six.binary_type):
+ raise TypeError(u"`data` must be bytes, received", type(data))
+
+ checksum_object = sync_helpers._get_checksum_object(self._checksum_type)
+
+ if checksum_object:
+ checksum_object.update(data)
+ actual_checksum = sync_helpers.prepare_checksum_digest(
+ checksum_object.digest()
+ )
+ metadata_key = sync_helpers._get_metadata_key(self._checksum_type)
+ metadata[metadata_key] = actual_checksum
+
+ content, multipart_boundary = construct_multipart_request(
+ data, metadata, content_type
+ )
+ multipart_content_type = _RELATED_HEADER + multipart_boundary + b'"'
+
+ self._headers[_CONTENT_TYPE_HEADER] = multipart_content_type
+
+ return _POST, self.upload_url, content, self._headers
+
+ def transmit(self, transport, data, metadata, content_type, timeout=None):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ data (bytes): The resource content to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class ResumableUpload(UploadBase, sync_upload.ResumableUpload):
+ """Initiate and fulfill a resumable upload to a Google API.
+
+ A **resumable** upload sends an initial request with the resource metadata
+ and then gets assigned an upload ID / upload URL to send bytes to.
+ Using the upload URL, the upload is then done in chunks (determined by
+ the user) until all bytes have been uploaded.
+
+ Args:
+ upload_url (str): The URL where the resumable upload will be initiated.
+ chunk_size (int): The size of each chunk used to upload the resource.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the :meth:`initiate` request, e.g. headers for
+ encrypted data. These **will not** be sent with
+ :meth:`transmit_next_chunk` or :meth:`recover` requests.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be read
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. The corrupted file will not be deleted from the remote
+ host automatically. Supported values are "md5", "crc32c" and None.
+ The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+
+ Raises:
+ ValueError: If ``chunk_size`` is not a multiple of
+ :data:`.UPLOAD_CHUNK_SIZE`.
+ """
+
+ def __init__(self, upload_url, chunk_size, checksum=None, headers=None):
+ super(ResumableUpload, self).__init__(upload_url, headers=headers)
+ if chunk_size % _async_resumable_media.UPLOAD_CHUNK_SIZE != 0:
+ raise ValueError(
+ u"{} KB must divide chunk size".format(
+ _async_resumable_media.UPLOAD_CHUNK_SIZE / 1024
+ )
+ )
+ self._chunk_size = chunk_size
+ self._stream = None
+ self._content_type = None
+ self._bytes_uploaded = 0
+ self._bytes_checksummed = 0
+ self._checksum_type = checksum
+ self._checksum_object = None
+ self._total_bytes = None
+ self._resumable_url = None
+ self._invalid = False
+
+ @property
+ def invalid(self):
+ """bool: Indicates if the upload is in an invalid state.
+
+ This will occur if a call to :meth:`transmit_next_chunk` fails.
+ To recover from such a failure, call :meth:`recover`.
+ """
+ return self._invalid
+
+ @property
+ def chunk_size(self):
+ """int: The size of each chunk used to upload the resource."""
+ return self._chunk_size
+
+ @property
+ def resumable_url(self):
+ """Optional[str]: The URL of the in-progress resumable upload."""
+ return self._resumable_url
+
+ @property
+ def bytes_uploaded(self):
+ """int: Number of bytes that have been uploaded."""
+ return self._bytes_uploaded
+
+ @property
+ def total_bytes(self):
+ """Optional[int]: The total number of bytes to be uploaded.
+
+ If this upload is initiated (via :meth:`initiate`) with
+ ``stream_final=True``, this value will be populated based on the size
+ of the ``stream`` being uploaded. (By default ``stream_final=True``.)
+
+ If this upload is initiated with ``stream_final=False``,
+ :attr:`total_bytes` will be :data:`None` since it cannot be
+ determined from the stream.
+ """
+ return self._total_bytes
+
+ def _prepare_initiate_request(
+ self, stream, metadata, content_type, total_bytes=None, stream_final=True
+ ):
+ """Prepare the contents of HTTP request to initiate upload.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ stream (IO[bytes]): The stream (i.e. file-like object) that will
+ be uploaded. The stream **must** be at the beginning (i.e.
+ ``stream.tell() == 0``).
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ total_bytes (Optional[int]): The total number of bytes to be
+ uploaded. If specified, the upload size **will not** be
+ determined from the stream (even if ``stream_final=True``).
+ stream_final (Optional[bool]): Indicates if the ``stream`` is
+ "final" (i.e. no more bytes will be added to it). In this case
+ we determine the upload size from the size of the stream. If
+ ``total_bytes`` is passed, this argument will be ignored.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always POST)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ Raises:
+ ValueError: If the current upload has already been initiated.
+ ValueError: If ``stream`` is not at the beginning.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.resumable_url is not None:
+ raise ValueError(u"This upload has already been initiated.")
+ if stream.tell() != 0:
+ raise ValueError(u"Stream must be at beginning.")
+
+ self._stream = stream
+ self._content_type = content_type
+ headers = {
+ _CONTENT_TYPE_HEADER: u"application/json; charset=UTF-8",
+ u"x-upload-content-type": content_type,
+ }
+ # Set the total bytes if possible.
+ if total_bytes is not None:
+ self._total_bytes = total_bytes
+ elif stream_final:
+ self._total_bytes = get_total_bytes(stream)
+ # Add the total bytes to the headers if set.
+ if self._total_bytes is not None:
+ content_length = u"{:d}".format(self._total_bytes)
+ headers[u"x-upload-content-length"] = content_length
+
+ headers.update(self._headers)
+ payload = json.dumps(metadata).encode(u"utf-8")
+ return _POST, self.upload_url, payload, headers
+
+ def _process_initiate_response(self, response):
+ """Process the response from an HTTP request that initiated upload.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ This method takes the URL from the ``Location`` header and stores it
+ for future use. Within that URL, we assume the ``upload_id`` query
+ parameter has been included, but we do not check.
+
+ Args:
+ response (object): The HTTP response object (need headers).
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ _helpers.require_status_code(
+ response,
+ (http_client.OK,),
+ self._get_status_code,
+ callback=self._make_invalid,
+ )
+ self._resumable_url = _helpers.header_required(
+ response, u"location", self._get_headers
+ )
+
+ def initiate(
+ self,
+ transport,
+ stream,
+ metadata,
+ content_type,
+ total_bytes=None,
+ stream_final=True,
+ timeout=None,
+ ):
+ """Initiate a resumable upload.
+
+ By default, this method assumes your ``stream`` is in a "final"
+ state ready to transmit. However, ``stream_final=False`` can be used
+ to indicate that the size of the resource is not known. This can happen
+ if bytes are being dynamically fed into ``stream``, e.g. if the stream
+ is attached to application logs.
+
+ If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be
+ read from the stream every time :meth:`transmit_next_chunk` is called.
+ If one of those reads produces strictly fewer bites than the chunk
+ size, the upload will be concluded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ stream (IO[bytes]): The stream (i.e. file-like object) that will
+ be uploaded. The stream **must** be at the beginning (i.e.
+ ``stream.tell() == 0``).
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ total_bytes (Optional[int]): The total number of bytes to be
+ uploaded. If specified, the upload size **will not** be
+ determined from the stream (even if ``stream_final=True``).
+ stream_final (Optional[bool]): Indicates if the ``stream`` is
+ "final" (i.e. no more bytes will be added to it). In this case
+ we determine the upload size from the size of the stream. If
+ ``total_bytes`` is passed, this argument will be ignored.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ def _prepare_request(self):
+ """Prepare the contents of HTTP request to upload a chunk.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O. This is based on the `sans-I/O`_ philosophy.
+
+ For the time being, this **does require** some form of I/O to read
+ a chunk from ``stream`` (via :func:`get_next_chunk`). However, this
+ will (almost) certainly not be network I/O.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always PUT)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ The headers **do not** incorporate the ``_headers`` on the
+ current instance.
+
+ Raises:
+ ValueError: If the current upload has finished.
+ ValueError: If the current upload is in an invalid state.
+ ValueError: If the current upload has not been initiated.
+ ValueError: If the location in the stream (i.e. ``stream.tell()``)
+ does not agree with ``bytes_uploaded``.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"Upload has finished.")
+ if self.invalid:
+ raise ValueError(
+ u"Upload is in an invalid state. To recover call `recover()`."
+ )
+ if self.resumable_url is None:
+ raise ValueError(
+ u"This upload has not been initiated. Please call "
+ u"initiate() before beginning to transmit chunks."
+ )
+
+ start_byte, payload, content_range = get_next_chunk(
+ self._stream, self._chunk_size, self._total_bytes
+ )
+ if start_byte != self.bytes_uploaded:
+ msg = _STREAM_ERROR_TEMPLATE.format(start_byte, self.bytes_uploaded)
+ raise ValueError(msg)
+
+ self._update_checksum(start_byte, payload)
+
+ headers = {
+ _CONTENT_TYPE_HEADER: self._content_type,
+ _helpers.CONTENT_RANGE_HEADER: content_range,
+ }
+ return _PUT, self.resumable_url, payload, headers
+
+ def _make_invalid(self):
+ """Simple setter for ``invalid``.
+
+ This is intended to be passed along as a callback to helpers that
+ raise an exception so they can mark this instance as invalid before
+ raising.
+ """
+ self._invalid = True
+
+ async def _process_response(self, response, bytes_sent):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+ bytes_sent (int): The number of bytes sent in the request that
+ ``response`` was returned for.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is 308 and the ``range`` header is not of the form
+ ``bytes 0-{end}``.
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 200 or 308.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ status_code = _helpers.require_status_code(
+ response,
+ (http_client.OK, _async_resumable_media.PERMANENT_REDIRECT),
+ self._get_status_code,
+ callback=self._make_invalid,
+ )
+ if status_code == http_client.OK:
+ # NOTE: We use the "local" information of ``bytes_sent`` to update
+ # ``bytes_uploaded``, but do not verify this against other
+ # state. However, there may be some other information:
+ #
+ # * a ``size`` key in JSON response body
+ # * the ``total_bytes`` attribute (if set)
+ # * ``stream.tell()`` (relying on fact that ``initiate()``
+ # requires stream to be at the beginning)
+ self._bytes_uploaded = self._bytes_uploaded + bytes_sent
+ # Tombstone the current upload so it cannot be used again.
+ self._finished = True
+ # Validate the checksum. This can raise an exception on failure.
+ await self._validate_checksum(response)
+ else:
+ bytes_range = _helpers.header_required(
+ response,
+ _helpers.RANGE_HEADER,
+ self._get_headers,
+ callback=self._make_invalid,
+ )
+ match = _BYTES_RANGE_RE.match(bytes_range)
+ if match is None:
+ self._make_invalid()
+ raise common.InvalidResponse(
+ response,
+ u'Unexpected "range" header',
+ bytes_range,
+ u'Expected to be of the form "bytes=0-{end}"',
+ )
+ self._bytes_uploaded = int(match.group(u"end_byte")) + 1
+
+ async def _validate_checksum(self, response):
+ """Check the computed checksum, if any, against the response headers.
+ Args:
+ response (object): The HTTP response object.
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the checksum
+ computed locally and the checksum reported by the remote host do
+ not match.
+ """
+ if self._checksum_type is None:
+ return
+ metadata_key = sync_helpers._get_metadata_key(self._checksum_type)
+ metadata = await response.json()
+ remote_checksum = metadata.get(metadata_key)
+ if remote_checksum is None:
+ raise common.InvalidResponse(
+ response,
+ _UPLOAD_METADATA_NO_APPROPRIATE_CHECKSUM_MESSAGE.format(metadata_key),
+ self._get_headers(response),
+ )
+ local_checksum = sync_helpers.prepare_checksum_digest(
+ self._checksum_object.digest()
+ )
+ if local_checksum != remote_checksum:
+ raise common.DataCorruption(
+ response,
+ _UPLOAD_CHECKSUM_MISMATCH_MESSAGE.format(
+ self._checksum_type.upper(), local_checksum, remote_checksum
+ ),
+ )
+
+ def transmit_next_chunk(self, transport, timeout=None):
+ """Transmit the next chunk of the resource to be uploaded.
+
+ If the current upload was initiated with ``stream_final=False``,
+ this method will dynamically determine if the upload has completed.
+ The upload will be considered complete if the stream produces
+ fewer than :attr:`chunk_size` bytes when a chunk is read from it.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ def _prepare_recover_request(self):
+ """Prepare the contents of HTTP request to recover from failure.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O. This is based on the `sans-I/O`_ philosophy.
+
+ We assume that the :attr:`resumable_url` is set (i.e. the only way
+ the upload can end up :attr:`invalid` is if it has been initiated.
+
+ Returns:
+ Tuple[str, str, NoneType, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always PUT)
+ * the URL for the request
+ * the body of the request (always :data:`None`)
+ * headers for the request
+
+ The headers **do not** incorporate the ``_headers`` on the
+ current instance.
+
+ Raises:
+ ValueError: If the current upload is not in an invalid state.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if not self.invalid:
+ raise ValueError(u"Upload is not in invalid state, no need to recover.")
+
+ headers = {_helpers.CONTENT_RANGE_HEADER: u"bytes */*"}
+ return _PUT, self.resumable_url, None, headers
+
+ def _process_recover_response(self, response):
+ """Process the response from an HTTP request to recover from failure.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 308.
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is 308 and the ``range`` header is not of the form
+ ``bytes 0-{end}``.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ _helpers.require_status_code(
+ response,
+ (_async_resumable_media.PERMANENT_REDIRECT,),
+ self._get_status_code,
+ )
+ headers = self._get_headers(response)
+ if _helpers.RANGE_HEADER in headers:
+ bytes_range = headers[_helpers.RANGE_HEADER]
+ match = _BYTES_RANGE_RE.match(bytes_range)
+ if match is None:
+ raise common.InvalidResponse(
+ response,
+ u'Unexpected "range" header',
+ bytes_range,
+ u'Expected to be of the form "bytes=0-{end}"',
+ )
+ self._bytes_uploaded = int(match.group(u"end_byte")) + 1
+ else:
+ # In this case, the upload has not "begun".
+ self._bytes_uploaded = 0
+
+ self._stream.seek(self._bytes_uploaded)
+ self._invalid = False
+
+ def recover(self, transport):
+ """Recover from a failure.
+
+ This method should be used when a :class:`ResumableUpload` is in an
+ :attr:`~ResumableUpload.invalid` state due to a request failure.
+
+ This will verify the progress with the server and make sure the
+ current upload is in a valid state before :meth:`transmit_next_chunk`
+ can be used again.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+def get_boundary():
+ """Get a random boundary for a multipart request.
+
+ Returns:
+ bytes: The boundary used to separate parts of a multipart request.
+ """
+ random_int = random.randrange(sys.maxsize)
+ boundary = _BOUNDARY_FORMAT.format(random_int)
+ # NOTE: Neither % formatting nor .format() are available for byte strings
+ # in Python 3.4, so we must use unicode strings as templates.
+ return boundary.encode(u"utf-8")
+
+
+def construct_multipart_request(data, metadata, content_type):
+ """Construct a multipart request body.
+
+ Args:
+ data (bytes): The resource content (UTF-8 encoded as bytes)
+ to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+
+ Returns:
+ Tuple[bytes, bytes]: The multipart request body and the boundary used
+ between each part.
+ """
+ multipart_boundary = get_boundary()
+ json_bytes = json.dumps(metadata).encode(u"utf-8")
+ content_type = content_type.encode(u"utf-8")
+ # Combine the two parts into a multipart payload.
+ # NOTE: We'd prefer a bytes template but are restricted by Python 3.4.
+ boundary_sep = _MULTIPART_SEP + multipart_boundary
+ content = (
+ boundary_sep
+ + _MULTIPART_BEGIN
+ + json_bytes
+ + _CRLF
+ + boundary_sep
+ + _CRLF
+ + b"content-type: "
+ + content_type
+ + _CRLF
+ + _CRLF
+ + data # Empty line between headers and body.
+ + _CRLF
+ + boundary_sep
+ + _MULTIPART_SEP
+ )
+
+ return content, multipart_boundary
+
+
+def get_total_bytes(stream):
+ """Determine the total number of bytes in a stream.
+
+ Args:
+ stream (IO[bytes]): The stream (i.e. file-like object).
+
+ Returns:
+ int: The number of bytes.
+ """
+ current_position = stream.tell()
+ # NOTE: ``.seek()`` **should** return the same value that ``.tell()``
+ # returns, but in Python 2, ``file`` objects do not.
+ stream.seek(0, os.SEEK_END)
+ end_position = stream.tell()
+ # Go back to the initial position.
+ stream.seek(current_position)
+
+ return end_position
+
+
+def get_next_chunk(stream, chunk_size, total_bytes):
+ """Get a chunk from an I/O stream.
+
+ The ``stream`` may have fewer bytes remaining than ``chunk_size``
+ so it may not always be the case that
+ ``end_byte == start_byte + chunk_size - 1``.
+
+ Args:
+ stream (IO[bytes]): The stream (i.e. file-like object).
+ chunk_size (int): The size of the chunk to be read from the ``stream``.
+ total_bytes (Optional[int]): The (expected) total number of bytes
+ in the ``stream``.
+
+ Returns:
+ Tuple[int, bytes, str]: Triple of:
+
+ * the start byte index
+ * the content in between the start and end bytes (inclusive)
+ * content range header for the chunk (slice) that has been read
+
+ Raises:
+ ValueError: If ``total_bytes == 0`` but ``stream.read()`` yields
+ non-empty content.
+ ValueError: If there is no data left to consume. This corresponds
+ exactly to the case ``end_byte < start_byte``, which can only
+ occur if ``end_byte == start_byte - 1``.
+ """
+ start_byte = stream.tell()
+ if total_bytes is not None and start_byte + chunk_size >= total_bytes > 0:
+ payload = stream.read(total_bytes - start_byte)
+ else:
+ payload = stream.read(chunk_size)
+ end_byte = stream.tell() - 1
+
+ num_bytes_read = len(payload)
+ if total_bytes is None:
+ if num_bytes_read < chunk_size:
+ # We now **KNOW** the total number of bytes.
+ total_bytes = end_byte + 1
+ elif total_bytes == 0:
+ # NOTE: We also expect ``start_byte == 0`` here but don't check
+ # because ``_prepare_initiate_request()`` requires the
+ # stream to be at the beginning.
+ if num_bytes_read != 0:
+ raise ValueError(
+ u"Stream specified as empty, but produced non-empty content."
+ )
+ else:
+ if num_bytes_read == 0:
+ raise ValueError(
+ u"Stream is already exhausted. There is no content remaining."
+ )
+
+ content_range = get_content_range(start_byte, end_byte, total_bytes)
+ return start_byte, payload, content_range
+
+
+def get_content_range(start_byte, end_byte, total_bytes):
+ """Convert start, end and total into content range header.
+
+ If ``total_bytes`` is not known, uses "bytes {start}-{end}/*".
+ If we are dealing with an empty range (i.e. ``end_byte < start_byte``)
+ then "bytes */{total}" is used.
+
+ This function **ASSUMES** that if the size is not known, the caller will
+ not also pass an empty range.
+
+ Args:
+ start_byte (int): The start (inclusive) of the byte range.
+ end_byte (int): The end (inclusive) of the byte range.
+ total_bytes (Optional[int]): The number of bytes in the byte
+ range (if known).
+
+ Returns:
+ str: The content range header.
+ """
+ if total_bytes is None:
+ return _RANGE_UNKNOWN_TEMPLATE.format(start_byte, end_byte)
+ elif end_byte < start_byte:
+ return _EMPTY_RANGE_TEMPLATE.format(total_bytes)
+ else:
+ return _CONTENT_RANGE_TEMPLATE.format(start_byte, end_byte, total_bytes)
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/__init__.py b/venv/Lib/site-packages/google/_async_resumable_media/requests/__init__.py
new file mode 100644
index 000000000..aaaa28369
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/requests/__init__.py
@@ -0,0 +1,678 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""``requests`` utilities for Google Media Downloads and Resumable Uploads.
+
+This sub-package assumes callers will use the `requests`_ library
+as transport and `google-auth`_ for sending authenticated HTTP traffic
+with ``requests``.
+
+.. _requests: http://docs.python-requests.org/
+.. _google-auth: https://google-auth.readthedocs.io/
+
+====================
+Authorized Transport
+====================
+
+To use ``google-auth`` and ``requests`` to create an authorized transport
+that has read-only access to Google Cloud Storage (GCS):
+
+.. testsetup:: get-credentials
+
+ import google.auth
+ import google.auth.credentials as creds_mod
+ import mock
+
+ def mock_default(scopes=None):
+ credentials = mock.Mock(spec=creds_mod.Credentials)
+ return credentials, u'mock-project'
+
+ # Patch the ``default`` function on the module.
+ original_default = google.auth.default
+ google.auth.default = mock_default
+
+.. doctest:: get-credentials
+
+ >>> import google.auth
+ >>> import google.auth.transport.requests as tr_requests
+ >>>
+ >>> ro_scope = u'https://www.googleapis.com/auth/devstorage.read_only'
+ >>> credentials, _ = google.auth.default(scopes=(ro_scope,))
+ >>> transport = tr_requests.AuthorizedSession(credentials)
+ >>> transport
+
+
+.. testcleanup:: get-credentials
+
+ # Put back the correct ``default`` function on the module.
+ google.auth.default = original_default
+
+================
+Simple Downloads
+================
+
+To download an object from Google Cloud Storage, construct the media URL
+for the GCS object and download it with an authorized transport that has
+access to the resource:
+
+.. testsetup:: basic-download
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'bucket-foo'
+ blob_name = u'file.txt'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response.headers[u'Content-Length'] = u'1364156'
+ fake_content = mock.MagicMock(spec=['__len__'])
+ fake_content.__len__.return_value = 1364156
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: basic-download
+
+ >>> from google.resumable_media.requests import Download
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/download/storage/v1/b/'
+ ... u'{bucket}/o/{blob_name}?alt=media')
+ >>> media_url = url_template.format(
+ ... bucket=bucket, blob_name=blob_name)
+ >>>
+ >>> download = Download(media_url)
+ >>> response = download.consume(transport)
+ >>> download.finished
+ True
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '1364156'
+ >>> len(response.content)
+ 1364156
+
+To download only a portion of the bytes in the object,
+specify ``start`` and ``end`` byte positions (both optional):
+
+.. testsetup:: basic-download-with-slice
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import Download
+
+ media_url = u'http://test.invalid'
+ start = 4096
+ end = 8191
+ slice_size = end - start + 1
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.PARTIAL_CONTENT)
+ fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size)
+ content_range = u'bytes {:d}-{:d}/1364156'.format(start, end)
+ fake_response.headers[u'Content-Range'] = content_range
+ fake_content = mock.MagicMock(spec=['__len__'])
+ fake_content.__len__.return_value = slice_size
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: basic-download-with-slice
+
+ >>> download = Download(media_url, start=4096, end=8191)
+ >>> response = download.consume(transport)
+ >>> download.finished
+ True
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '4096'
+ >>> response.headers[u'Content-Range']
+ 'bytes 4096-8191/1364156'
+ >>> len(response.content)
+ 4096
+
+=================
+Chunked Downloads
+=================
+
+For very large objects or objects of unknown size, it may make more sense
+to download the object in chunks rather than all at once. This can be done
+to avoid dropped connections with a poor internet connection or can allow
+multiple chunks to be downloaded in parallel to speed up the total
+download.
+
+A :class:`.ChunkedDownload` uses the same media URL and authorized
+transport that a basic :class:`.Download` would use, but also
+requires a chunk size and a write-able byte ``stream``. The chunk size is used
+to determine how much of the resouce to consume with each request and the
+stream is to allow the resource to be written out (e.g. to disk) without
+having to fit in memory all at once.
+
+.. testsetup:: chunked-download
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ media_url = u'http://test.invalid'
+
+ fifty_mb = 50 * 1024 * 1024
+ one_gb = 1024 * 1024 * 1024
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.PARTIAL_CONTENT)
+ fake_response.headers[u'Content-Length'] = u'{:d}'.format(fifty_mb)
+ content_range = u'bytes 0-{:d}/{:d}'.format(fifty_mb - 1, one_gb)
+ fake_response.headers[u'Content-Range'] = content_range
+ fake_content_begin = b'The beginning of the chunk...'
+ fake_content = fake_content_begin + b'1' * (fifty_mb - 29)
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: chunked-download
+
+ >>> from google.resumable_media.requests import ChunkedDownload
+ >>>
+ >>> chunk_size = 50 * 1024 * 1024 # 50MB
+ >>> stream = io.BytesIO()
+ >>> download = ChunkedDownload(
+ ... media_url, chunk_size, stream)
+ >>> # Check the state of the download before starting.
+ >>> download.bytes_downloaded
+ 0
+ >>> download.total_bytes is None
+ True
+ >>> response = download.consume_next_chunk(transport)
+ >>> # Check the state of the download after consuming one chunk.
+ >>> download.finished
+ False
+ >>> download.bytes_downloaded # chunk_size
+ 52428800
+ >>> download.total_bytes # 1GB
+ 1073741824
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '52428800'
+ >>> response.headers[u'Content-Range']
+ 'bytes 0-52428799/1073741824'
+ >>> len(response.content) == chunk_size
+ True
+ >>> stream.seek(0)
+ 0
+ >>> stream.read(29)
+ b'The beginning of the chunk...'
+
+The download will change it's ``finished`` status to :data:`True`
+once the final chunk is consumed. In some cases, the final chunk may
+not be the same size as the other chunks:
+
+.. testsetup:: chunked-download-end
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ChunkedDownload
+
+ media_url = u'http://test.invalid'
+
+ fifty_mb = 50 * 1024 * 1024
+ one_gb = 1024 * 1024 * 1024
+ stream = mock.Mock(spec=['write'])
+ download = ChunkedDownload(media_url, fifty_mb, stream)
+ download._bytes_downloaded = 20 * fifty_mb
+ download._total_bytes = one_gb
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.PARTIAL_CONTENT)
+ slice_size = one_gb - 20 * fifty_mb
+ fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size)
+ content_range = u'bytes {:d}-{:d}/{:d}'.format(
+ 20 * fifty_mb, one_gb - 1, one_gb)
+ fake_response.headers[u'Content-Range'] = content_range
+ fake_content = mock.MagicMock(spec=['__len__'])
+ fake_content.__len__.return_value = slice_size
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: chunked-download-end
+
+ >>> # The state of the download in progress.
+ >>> download.finished
+ False
+ >>> download.bytes_downloaded # 20 chunks at 50MB
+ 1048576000
+ >>> download.total_bytes # 1GB
+ 1073741824
+ >>> response = download.consume_next_chunk(transport)
+ >>> # The state of the download after consuming the final chunk.
+ >>> download.finished
+ True
+ >>> download.bytes_downloaded == download.total_bytes
+ True
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '25165824'
+ >>> response.headers[u'Content-Range']
+ 'bytes 1048576000-1073741823/1073741824'
+ >>> len(response.content) < download.chunk_size
+ True
+
+In addition, a :class:`.ChunkedDownload` can also take optional
+``start`` and ``end`` byte positions.
+
+==============
+Simple Uploads
+==============
+
+Among the three supported upload classes, the simplest is
+:class:`.SimpleUpload`. A simple upload should be used when the resource
+being uploaded is small and when there is no metadata (other than the name)
+associated with the resource.
+
+.. testsetup:: simple-upload
+
+ import json
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ payload = {
+ u'bucket': bucket,
+ u'contentType': u'text/plain',
+ u'md5Hash': u'M0XLEsX9/sMdiI+4pB4CAQ==',
+ u'name': blob_name,
+ u'size': u'27',
+ }
+ fake_response._content = json.dumps(payload).encode(u'utf-8')
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+.. doctest:: simple-upload
+ :options: +NORMALIZE_WHITESPACE
+
+ >>> from google.resumable_media.requests import SimpleUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=media&'
+ ... u'name={blob_name}')
+ >>> upload_url = url_template.format(
+ ... bucket=bucket, blob_name=blob_name)
+ >>>
+ >>> upload = SimpleUpload(upload_url)
+ >>> data = b'Some not too large content.'
+ >>> content_type = u'text/plain'
+ >>> response = upload.transmit(transport, data, content_type)
+ >>> upload.finished
+ True
+ >>> response
+
+ >>> json_response = response.json()
+ >>> json_response[u'bucket'] == bucket
+ True
+ >>> json_response[u'name'] == blob_name
+ True
+ >>> json_response[u'contentType'] == content_type
+ True
+ >>> json_response[u'md5Hash']
+ 'M0XLEsX9/sMdiI+4pB4CAQ=='
+ >>> int(json_response[u'size']) == len(data)
+ True
+
+In the rare case that an upload fails, an :exc:`.InvalidResponse`
+will be raised:
+
+.. testsetup:: simple-upload-fail
+
+ import time
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google import resumable_media
+ from google.resumable_media import _helpers
+ from google.resumable_media.requests import SimpleUpload as constructor
+
+ upload_url = u'http://test.invalid'
+ data = b'Some not too large content.'
+ content_type = u'text/plain'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.SERVICE_UNAVAILABLE)
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ time_sleep = time.sleep
+ def dont_sleep(seconds):
+ raise RuntimeError(u'No sleep', seconds)
+
+ def SimpleUpload(*args, **kwargs):
+ upload = constructor(*args, **kwargs)
+ # Mock the cumulative sleep to avoid retries (and `time.sleep()`).
+ upload._retry_strategy = resumable_media.RetryStrategy(
+ max_cumulative_retry=-1.0)
+ return upload
+
+ time.sleep = dont_sleep
+
+.. doctest:: simple-upload-fail
+ :options: +NORMALIZE_WHITESPACE
+
+ >>> upload = SimpleUpload(upload_url)
+ >>> error = None
+ >>> try:
+ ... upload.transmit(transport, data, content_type)
+ ... except resumable_media.InvalidResponse as caught_exc:
+ ... error = caught_exc
+ ...
+ >>> error
+ InvalidResponse('Request failed with status code', 503,
+ 'Expected one of', )
+ >>> error.response
+
+ >>>
+ >>> upload.finished
+ True
+
+.. testcleanup:: simple-upload-fail
+
+ # Put back the correct ``sleep`` function on the ``time`` module.
+ time.sleep = time_sleep
+
+Even in the case of failure, we see that the upload is
+:attr:`~.SimpleUpload.finished`, i.e. it cannot be re-used.
+
+=================
+Multipart Uploads
+=================
+
+After the simple upload, the :class:`.MultipartUpload` can be used to
+achieve essentially the same task. However, a multipart upload allows some
+metadata about the resource to be sent along as well. (This is the "multi":
+we send a first part with the metadata and a second part with the actual
+bytes in the resource.)
+
+Usage is similar to the simple upload, but :meth:`~.MultipartUpload.transmit`
+accepts an extra required argument: ``metadata``.
+
+.. testsetup:: multipart-upload
+
+ import json
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+ data = b'Some not too large content.'
+ content_type = u'text/plain'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ payload = {
+ u'bucket': bucket,
+ u'name': blob_name,
+ u'metadata': {u'color': u'grurple'},
+ }
+ fake_response._content = json.dumps(payload).encode(u'utf-8')
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+.. doctest:: multipart-upload
+
+ >>> from google.resumable_media.requests import MultipartUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=multipart')
+ >>> upload_url = url_template.format(bucket=bucket)
+ >>>
+ >>> upload = MultipartUpload(upload_url)
+ >>> metadata = {
+ ... u'name': blob_name,
+ ... u'metadata': {
+ ... u'color': u'grurple',
+ ... },
+ ... }
+ >>> response = upload.transmit(transport, data, metadata, content_type)
+ >>> upload.finished
+ True
+ >>> response
+
+ >>> json_response = response.json()
+ >>> json_response[u'bucket'] == bucket
+ True
+ >>> json_response[u'name'] == blob_name
+ True
+ >>> json_response[u'metadata'] == metadata[u'metadata']
+ True
+
+As with the simple upload, in the case of failure an :exc:`.InvalidResponse`
+is raised, enclosing the :attr:`~.InvalidResponse.response` that caused
+the failure and the ``upload`` object cannot be re-used after a failure.
+
+=================
+Resumable Uploads
+=================
+
+A :class:`.ResumableUpload` deviates from the other two upload classes:
+it transmits a resource over the course of multiple requests. This
+is intended to be used in cases where:
+
+* the size of the resource is not known (i.e. it is generated on the fly)
+* requests must be short-lived
+* the client has request **size** limitations
+* the resource is too large to fit into memory
+
+In general, a resource should be sent in a **single** request to avoid
+latency and reduce QPS. See `GCS best practices`_ for more things to
+consider when using a resumable upload.
+
+.. _GCS best practices: https://cloud.google.com/storage/docs/\
+ best-practices#uploading
+
+After creating a :class:`.ResumableUpload` instance, a
+**resumable upload session** must be initiated to let the server know that
+a series of chunked upload requests will be coming and to obtain an
+``upload_id`` for the session. In contrast to the other two upload classes,
+:meth:`~.ResumableUpload.initiate` takes a byte ``stream`` as input rather
+than raw bytes as ``data``. This can be a file object, a :class:`~io.BytesIO`
+object or any other stream implementing the same interface.
+
+.. testsetup:: resumable-initiate
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+ data = b'Some resumable bytes.'
+ content_type = u'text/plain'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ upload_id = u'ABCdef189XY_super_serious'
+ resumable_url_template = (
+ u'https://www.googleapis.com/upload/storage/v1/b/{bucket}'
+ u'/o?uploadType=resumable&upload_id={upload_id}')
+ resumable_url = resumable_url_template.format(
+ bucket=bucket, upload_id=upload_id)
+ fake_response.headers[u'location'] = resumable_url
+ fake_response.headers[u'x-guploader-uploadid'] = upload_id
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+.. doctest:: resumable-initiate
+
+ >>> from google.resumable_media.requests import ResumableUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=resumable')
+ >>> upload_url = url_template.format(bucket=bucket)
+ >>>
+ >>> chunk_size = 1024 * 1024 # 1MB
+ >>> upload = ResumableUpload(upload_url, chunk_size)
+ >>> stream = io.BytesIO(data)
+ >>> # The upload doesn't know how "big" it is until seeing a stream.
+ >>> upload.total_bytes is None
+ True
+ >>> metadata = {u'name': blob_name}
+ >>> response = upload.initiate(transport, stream, metadata, content_type)
+ >>> response
+
+ >>> upload.resumable_url == response.headers[u'Location']
+ True
+ >>> upload.total_bytes == len(data)
+ True
+ >>> upload_id = response.headers[u'X-GUploader-UploadID']
+ >>> upload_id
+ 'ABCdef189XY_super_serious'
+ >>> upload.resumable_url == upload_url + u'&upload_id=' + upload_id
+ True
+
+Once a :class:`.ResumableUpload` has been initiated, the resource is
+transmitted in chunks until completion:
+
+.. testsetup:: resumable-transmit
+
+ import io
+ import json
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google import resumable_media
+ import google.resumable_media.requests.upload as upload_mod
+
+ data = b'01234567891'
+ stream = io.BytesIO(data)
+ # Create an "already initiated" upload.
+ upload_url = u'http://test.invalid'
+ chunk_size = 256 * 1024 # 256KB
+ upload = upload_mod.ResumableUpload(upload_url, chunk_size)
+ upload._resumable_url = u'http://test.invalid?upload_id=mocked'
+ upload._stream = stream
+ upload._content_type = u'text/plain'
+ upload._total_bytes = len(data)
+
+ # After-the-fact update the chunk size so that len(data)
+ # is split into three.
+ upload._chunk_size = 4
+ # Make three fake responses.
+ fake_response0 = requests.Response()
+ fake_response0.status_code = resumable_media.PERMANENT_REDIRECT
+ fake_response0.headers[u'range'] = u'bytes=0-3'
+
+ fake_response1 = requests.Response()
+ fake_response1.status_code = resumable_media.PERMANENT_REDIRECT
+ fake_response1.headers[u'range'] = u'bytes=0-7'
+
+ fake_response2 = requests.Response()
+ fake_response2.status_code = int(http_client.OK)
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+ payload = {
+ u'bucket': bucket,
+ u'name': blob_name,
+ u'size': u'{:d}'.format(len(data)),
+ }
+ fake_response2._content = json.dumps(payload).encode(u'utf-8')
+
+ # Use the fake responses to mock a transport.
+ responses = [fake_response0, fake_response1, fake_response2]
+ put_method = mock.Mock(side_effect=responses, spec=[])
+ transport = mock.Mock(request=put_method, spec=['request'])
+
+.. doctest:: resumable-transmit
+
+ >>> response0 = upload.transmit_next_chunk(transport)
+ >>> response0
+
+ >>> upload.finished
+ False
+ >>> upload.bytes_uploaded == upload.chunk_size
+ True
+ >>>
+ >>> response1 = upload.transmit_next_chunk(transport)
+ >>> response1
+
+ >>> upload.finished
+ False
+ >>> upload.bytes_uploaded == 2 * upload.chunk_size
+ True
+ >>>
+ >>> response2 = upload.transmit_next_chunk(transport)
+ >>> response2
+
+ >>> upload.finished
+ True
+ >>> upload.bytes_uploaded == upload.total_bytes
+ True
+ >>> json_response = response2.json()
+ >>> json_response[u'bucket'] == bucket
+ True
+ >>> json_response[u'name'] == blob_name
+ True
+"""
+from google._async_resumable_media.requests.download import ChunkedDownload
+from google._async_resumable_media.requests.download import Download
+from google._async_resumable_media.requests.upload import MultipartUpload
+from google._async_resumable_media.requests.download import RawChunkedDownload
+from google._async_resumable_media.requests.download import RawDownload
+from google._async_resumable_media.requests.upload import ResumableUpload
+from google._async_resumable_media.requests.upload import SimpleUpload
+
+
+__all__ = [
+ u"ChunkedDownload",
+ u"Download",
+ u"MultipartUpload",
+ u"RawChunkedDownload",
+ u"RawDownload",
+ u"ResumableUpload",
+ u"SimpleUpload",
+]
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..40235643b
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc
new file mode 100644
index 000000000..4eb89f12a
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/download.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/download.cpython-36.pyc
new file mode 100644
index 000000000..e709e7f63
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/download.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/upload.cpython-36.pyc b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/upload.cpython-36.pyc
new file mode 100644
index 000000000..b367c85d4
Binary files /dev/null and b/venv/Lib/site-packages/google/_async_resumable_media/requests/__pycache__/upload.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/_request_helpers.py b/venv/Lib/site-packages/google/_async_resumable_media/requests/_request_helpers.py
new file mode 100644
index 000000000..f628ea467
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/requests/_request_helpers.py
@@ -0,0 +1,155 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared utilities used by both downloads and uploads.
+
+This utilities are explicitly catered to ``requests``-like transports.
+"""
+
+
+import functools
+
+from google._async_resumable_media import _helpers
+from google.resumable_media import common
+
+import google.auth.transport._aiohttp_requests as aiohttp_requests
+import aiohttp
+
+_DEFAULT_RETRY_STRATEGY = common.RetryStrategy()
+_SINGLE_GET_CHUNK_SIZE = 8192
+
+
+# The number of seconds to wait to establish a connection
+# (connect() call on socket). Avoid setting this to a multiple of 3 to not
+# Align with TCP Retransmission timing. (typically 2.5-3s)
+_DEFAULT_CONNECT_TIMEOUT = 61
+# The number of seconds to wait between bytes sent from the server.
+_DEFAULT_READ_TIMEOUT = 60
+_DEFAULT_TIMEOUT = aiohttp.ClientTimeout(
+ connect=_DEFAULT_CONNECT_TIMEOUT, sock_read=_DEFAULT_READ_TIMEOUT
+)
+
+
+class RequestsMixin(object):
+ """Mix-in class implementing ``requests``-specific behavior.
+
+ These are methods that are more general purpose, with implementations
+ specific to the types defined in ``requests``.
+ """
+
+ @staticmethod
+ def _get_status_code(response):
+ """Access the status code from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ int: The status code.
+ """
+ return response.status
+
+ @staticmethod
+ def _get_headers(response):
+ """Access the headers from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ ~requests.structures.CaseInsensitiveDict: The header mapping (keys
+ are case-insensitive).
+ """
+ # For Async testing,`_headers` is modified instead of headers
+ # access via the internal field.
+ return response._headers
+
+ @staticmethod
+ async def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ bytes: The body of the ``response``.
+ """
+ wrapped_response = aiohttp_requests._CombinedResponse(response)
+ content = await wrapped_response.data.read()
+ return content
+
+
+class RawRequestsMixin(RequestsMixin):
+ @staticmethod
+ async def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ bytes: The body of the ``response``.
+ """
+
+ wrapped_response = aiohttp_requests._CombinedResponse(response)
+ content = await wrapped_response.raw_content()
+ return content
+
+
+async def http_request(
+ transport,
+ method,
+ url,
+ data=None,
+ headers=None,
+ retry_strategy=_DEFAULT_RETRY_STRATEGY,
+ **transport_kwargs
+):
+ """Make an HTTP request.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can make
+ authenticated requests via a ``request()`` method. This method
+ must accept an HTTP method, an upload URL, a ``data`` keyword
+ argument and a ``headers`` keyword argument.
+ method (str): The HTTP method for the request.
+ url (str): The URL for the request.
+ data (Optional[bytes]): The body of the request.
+ headers (Mapping[str, str]): The headers for the request (``transport``
+ may also add additional headers).
+ retry_strategy (~google.resumable_media.common.RetryStrategy): The
+ strategy to use if the request fails and must be retried.
+ transport_kwargs (Dict[str, str]): Extra keyword arguments to be
+ passed along to ``transport.request``.
+
+ Returns:
+ ~requests.Response: The return value of ``transport.request()``.
+ """
+
+ # NOTE(asyncio/aiohttp): Sync versions use a tuple for two timeouts,
+ # default connect timeout and read timeout. Since async requests only
+ # accepts a single value, this is using the connect timeout. This logic
+ # diverges from the sync implementation.
+ if "timeout" not in transport_kwargs:
+ timeout = _DEFAULT_TIMEOUT
+ transport_kwargs["timeout"] = timeout
+
+ func = functools.partial(
+ transport.request, method, url, data=data, headers=headers, **transport_kwargs
+ )
+
+ resp = await _helpers.wait_and_retry(
+ func, RequestsMixin._get_status_code, retry_strategy
+ )
+ return resp
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/download.py b/venv/Lib/site-packages/google/_async_resumable_media/requests/download.py
new file mode 100644
index 000000000..5ac97c598
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/requests/download.py
@@ -0,0 +1,461 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Support for downloading media from Google APIs."""
+
+import urllib3.response
+
+from google._async_resumable_media import _download
+from google._async_resumable_media import _helpers
+from google._async_resumable_media.requests import _request_helpers
+from google.resumable_media import common
+from google.resumable_media import _helpers as sync_helpers
+from google.resumable_media.requests import download
+
+_CHECKSUM_MISMATCH = download._CHECKSUM_MISMATCH
+
+
+class Download(_request_helpers.RequestsMixin, _download.Download):
+ """Helper to manage downloading a resource from a Google API.
+
+ "Slices" of the resource can be retrieved by specifying a range
+ with ``start`` and / or ``end``. However, in typical usage, neither
+ ``start`` nor ``end`` is expected to be provided.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, but ``end`` is provided, will download from the
+ beginning to ``end`` of the media.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, but ``start`` is provided, will download from the
+ ``start`` to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The response headers must contain
+ a checksum of the requested type. If the headers lack an
+ appropriate checksum (for instance in the case of transcoded or
+ ranged downloads where the remote service does not know the
+ correct checksum) an INFO-level log will be emitted. Supported
+ values are "md5", "crc32c" and None. The default is "md5".
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ """
+
+ async def _write_to_stream(self, response):
+ """Write response body to a write-able stream.
+
+ .. note:
+
+ This method assumes that the ``_stream`` attribute is set on the
+ current download.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ """
+
+ # `_get_expected_checksum()` may return None even if a checksum was
+ # requested, in which case it will emit an info log _MISSING_CHECKSUM.
+ # If an invalid checksum type is specified, this will raise ValueError.
+ expected_checksum, checksum_object = sync_helpers._get_expected_checksum(
+ response, self._get_headers, self.media_url, checksum_type=self.checksum
+ )
+
+ local_checksum_object = _add_decoder(response, checksum_object)
+
+ async for chunk in response.content.iter_chunked(
+ _request_helpers._SINGLE_GET_CHUNK_SIZE
+ ):
+ self._stream.write(chunk)
+ local_checksum_object.update(chunk)
+
+ if expected_checksum is None:
+ return
+
+ else:
+ actual_checksum = sync_helpers.prepare_checksum_digest(
+ checksum_object.digest()
+ )
+ if actual_checksum != expected_checksum:
+ msg = _CHECKSUM_MISMATCH.format(
+ self.media_url,
+ expected_checksum,
+ actual_checksum,
+ checksum_type=self.checksum.upper(),
+ )
+ raise common.DataCorruption(response, msg)
+
+ async def consume(self, transport, timeout=_request_helpers._DEFAULT_TIMEOUT):
+ """Consume the resource to be downloaded.
+
+ If a ``stream`` is attached to this download, then the downloaded
+ resource will be written to the stream.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ ValueError: If the current :class:`Download` has already
+ finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ request_kwargs = {
+ u"data": payload,
+ u"headers": headers,
+ u"retry_strategy": self._retry_strategy,
+ u"timeout": timeout,
+ }
+
+ if self._stream is not None:
+ request_kwargs[u"stream"] = True
+
+ result = await _request_helpers.http_request(
+ transport, method, url, **request_kwargs
+ )
+
+ self._process_response(result)
+
+ if self._stream is not None:
+ await self._write_to_stream(result)
+
+ return result
+
+
+class RawDownload(_request_helpers.RawRequestsMixin, _download.Download):
+ """Helper to manage downloading a raw resource from a Google API.
+
+ "Slices" of the resource can be retrieved by specifying a range
+ with ``start`` and / or ``end``. However, in typical usage, neither
+ ``start`` nor ``end`` is expected to be provided.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, but ``end`` is provided, will download from the
+ beginning to ``end`` of the media.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, but ``start`` is provided, will download from the
+ ``start`` to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The response headers must contain
+ a checksum of the requested type. If the headers lack an
+ appropriate checksum (for instance in the case of transcoded or
+ ranged downloads where the remote service does not know the
+ correct checksum) an INFO-level log will be emitted. Supported
+ values are "md5", "crc32c" and None. The default is "md5".
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ """
+
+ async def _write_to_stream(self, response):
+ """Write response body to a write-able stream.
+
+ .. note:
+
+ This method assumes that the ``_stream`` attribute is set on the
+ current download.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ """
+
+ # `_get_expected_checksum()` may return None even if a checksum was
+ # requested, in which case it will emit an info log _MISSING_CHECKSUM.
+ # If an invalid checksum type is specified, this will raise ValueError.
+ expected_checksum, checksum_object = sync_helpers._get_expected_checksum(
+ response, self._get_headers, self.media_url, checksum_type=self.checksum
+ )
+
+ async for chunk in response.content.iter_chunked(
+ _request_helpers._SINGLE_GET_CHUNK_SIZE
+ ):
+ self._stream.write(chunk)
+ checksum_object.update(chunk)
+
+ if expected_checksum is None:
+ return
+ else:
+ actual_checksum = sync_helpers.prepare_checksum_digest(
+ checksum_object.digest()
+ )
+
+ if actual_checksum != expected_checksum:
+ msg = _CHECKSUM_MISMATCH.format(
+ self.media_url,
+ expected_checksum,
+ actual_checksum,
+ checksum_type=self.checksum.upper(),
+ )
+ raise common.DataCorruption(response, msg)
+
+ async def consume(self, transport, timeout=_request_helpers._DEFAULT_TIMEOUT):
+ """Consume the resource to be downloaded.
+
+ If a ``stream`` is attached to this download, then the downloaded
+ resource will be written to the stream.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ ValueError: If the current :class:`Download` has already
+ finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ result = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ )
+
+ self._process_response(result)
+
+ if self._stream is not None:
+ await self._write_to_stream(result)
+
+ return result
+
+
+class ChunkedDownload(_request_helpers.RequestsMixin, _download.ChunkedDownload):
+ """Download a resource in chunks from a Google API.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each
+ request.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ will be used to concatenate chunks of the resource as they are
+ downloaded.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, defaults to ``0``.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, will download to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with each request, e.g. headers for data encryption
+ key headers.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each request.
+
+ Raises:
+ ValueError: If ``start`` is negative.
+ """
+
+ async def consume_next_chunk(
+ self, transport, timeout=_request_helpers._DEFAULT_TIMEOUT
+ ):
+
+ """
+ Consume the next chunk of the resource to be downloaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ValueError: If the current download has finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ result = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+
+ await self._process_response(result)
+ return result
+
+
+class RawChunkedDownload(_request_helpers.RawRequestsMixin, _download.ChunkedDownload):
+ """Download a raw resource in chunks from a Google API.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each
+ request.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ will be used to concatenate chunks of the resource as they are
+ downloaded.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, defaults to ``0``.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, will download to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with each request, e.g. headers for data encryption
+ key headers.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each request.
+
+ Raises:
+ ValueError: If ``start`` is negative.
+ """
+
+ async def consume_next_chunk(
+ self, transport, timeout=_request_helpers._DEFAULT_TIMEOUT
+ ):
+ """Consume the next chunk of the resource to be downloaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ValueError: If the current download has finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ result = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ await self._process_response(result)
+ return result
+
+
+def _add_decoder(response_raw, checksum):
+ """Patch the ``_decoder`` on a ``urllib3`` response.
+
+ This is so that we can intercept the compressed bytes before they are
+ decoded.
+
+ Only patches if the content encoding is ``gzip``.
+
+ Args:
+ response_raw (urllib3.response.HTTPResponse): The raw response for
+ an HTTP request.
+ checksum (object):
+ A checksum which will be updated with compressed bytes.
+
+ Returns:
+ object: Either the original ``checksum`` if ``_decoder`` is not
+ patched, or a ``_DoNothingHash`` if the decoder is patched, since the
+ caller will no longer need to hash to decoded bytes.
+ """
+
+ encoding = response_raw.headers.get(u"content-encoding", u"").lower()
+ if encoding != u"gzip":
+ return checksum
+
+ response_raw._decoder = _GzipDecoder(checksum)
+ return _helpers._DoNothingHash()
+
+
+class _GzipDecoder(urllib3.response.GzipDecoder):
+ """Custom subclass of ``urllib3`` decoder for ``gzip``-ed bytes.
+
+ Allows a checksum function to see the compressed bytes before they are
+ decoded. This way the checksum of the compressed value can be computed.
+
+ Args:
+ checksum (object):
+ A checksum which will be updated with compressed bytes.
+ """
+
+ def __init__(self, checksum):
+ super(_GzipDecoder, self).__init__()
+ self._checksum = checksum
+
+ def decompress(self, data):
+ """Decompress the bytes.
+
+ Args:
+ data (bytes): The compressed bytes to be decompressed.
+
+ Returns:
+ bytes: The decompressed bytes from ``data``.
+ """
+ self._checksum.update(data)
+ return super(_GzipDecoder, self).decompress(data)
diff --git a/venv/Lib/site-packages/google/_async_resumable_media/requests/upload.py b/venv/Lib/site-packages/google/_async_resumable_media/requests/upload.py
new file mode 100644
index 000000000..8a1291a5b
--- /dev/null
+++ b/venv/Lib/site-packages/google/_async_resumable_media/requests/upload.py
@@ -0,0 +1,515 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Support for resumable uploads.
+
+Also supported here are simple (media) uploads and multipart
+uploads that contain both metadata and a small file as payload.
+"""
+
+
+from google._async_resumable_media import _upload
+from google._async_resumable_media.requests import _request_helpers
+
+
+class SimpleUpload(_request_helpers.RequestsMixin, _upload.SimpleUpload):
+ """Upload a resource to a Google API.
+
+ A **simple** media upload sends no metadata and completes the upload
+ in a single request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ async def transmit(
+ self,
+ transport,
+ data,
+ content_type,
+ timeout=_request_helpers._DEFAULT_TIMEOUT,
+ ):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ data (bytes): The resource content to be uploaded.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_request(data, content_type)
+
+ response = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_response(response)
+ return response
+
+
+class MultipartUpload(_request_helpers.RequestsMixin, _upload.MultipartUpload):
+ """Upload a resource with metadata to a Google API.
+
+ A **multipart** upload sends both metadata and the resource in a single
+ (multipart) request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The request metadata will be amended
+ to include the computed value. Using this option will override a
+ manually-set checksum value. Supported values are "md5",
+ "crc32c" and None. The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ async def transmit(
+ self,
+ transport,
+ data,
+ metadata,
+ content_type,
+ timeout=_request_helpers._DEFAULT_TIMEOUT,
+ ):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ data (bytes): The resource content to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_request(
+ data, metadata, content_type
+ )
+
+ response = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_response(response)
+ return response
+
+
+class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload):
+ """Initiate and fulfill a resumable upload to a Google API.
+
+ A **resumable** upload sends an initial request with the resource metadata
+ and then gets assigned an upload ID / upload URL to send bytes to.
+ Using the upload URL, the upload is then done in chunks (determined by
+ the user) until all bytes have been uploaded.
+
+ When constructing a resumable upload, only the resumable upload URL and
+ the chunk size are required:
+
+ .. testsetup:: resumable-constructor
+
+ bucket = u'bucket-foo'
+
+ .. doctest:: resumable-constructor
+
+ >>> from google.resumable_media.requests import ResumableUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=resumable')
+ >>> upload_url = url_template.format(bucket=bucket)
+ >>>
+ >>> chunk_size = 3 * 1024 * 1024 # 3MB
+ >>> upload = ResumableUpload(upload_url, chunk_size)
+
+ When initiating an upload (via :meth:`initiate`), the caller is expected
+ to pass the resource being uploaded as a file-like ``stream``. If the size
+ of the resource is explicitly known, it can be passed in directly:
+
+ .. testsetup:: resumable-explicit-size
+
+ import os
+ import tempfile
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ResumableUpload
+
+ upload_url = u'http://test.invalid'
+ chunk_size = 3 * 1024 * 1024 # 3MB
+ upload = ResumableUpload(upload_url, chunk_size)
+
+ file_desc, filename = tempfile.mkstemp()
+ os.close(file_desc)
+
+ data = b'some bytes!'
+ with open(filename, u'wb') as file_obj:
+ file_obj.write(data)
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ resumable_url = u'http://test.invalid?upload_id=7up'
+ fake_response.headers[u'location'] = resumable_url
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ .. doctest:: resumable-explicit-size
+
+ >>> import os
+ >>>
+ >>> upload.total_bytes is None
+ True
+ >>>
+ >>> stream = open(filename, u'rb')
+ >>> total_bytes = os.path.getsize(filename)
+ >>> metadata = {u'name': filename}
+ >>> response = upload.initiate(
+ ... transport, stream, metadata, u'text/plain',
+ ... total_bytes=total_bytes)
+ >>> response
+
+ >>>
+ >>> upload.total_bytes == total_bytes
+ True
+
+ .. testcleanup:: resumable-explicit-size
+
+ os.remove(filename)
+
+ If the stream is in a "final" state (i.e. it won't have any more bytes
+ written to it), the total number of bytes can be determined implicitly
+ from the ``stream`` itself:
+
+ .. testsetup:: resumable-implicit-size
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ResumableUpload
+
+ upload_url = u'http://test.invalid'
+ chunk_size = 3 * 1024 * 1024 # 3MB
+ upload = ResumableUpload(upload_url, chunk_size)
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ resumable_url = u'http://test.invalid?upload_id=7up'
+ fake_response.headers[u'location'] = resumable_url
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ data = b'some MOAR bytes!'
+ metadata = {u'name': u'some-file.jpg'}
+ content_type = u'image/jpeg'
+
+ .. doctest:: resumable-implicit-size
+
+ >>> stream = io.BytesIO(data)
+ >>> response = upload.initiate(
+ ... transport, stream, metadata, content_type)
+ >>>
+ >>> upload.total_bytes == len(data)
+ True
+
+ If the size of the resource is **unknown** when the upload is initiated,
+ the ``stream_final`` argument can be used. This might occur if the
+ resource is being dynamically created on the client (e.g. application
+ logs). To use this argument:
+
+ .. testsetup:: resumable-unknown-size
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ResumableUpload
+
+ upload_url = u'http://test.invalid'
+ chunk_size = 3 * 1024 * 1024 # 3MB
+ upload = ResumableUpload(upload_url, chunk_size)
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ resumable_url = u'http://test.invalid?upload_id=7up'
+ fake_response.headers[u'location'] = resumable_url
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ metadata = {u'name': u'some-file.jpg'}
+ content_type = u'application/octet-stream'
+
+ stream = io.BytesIO(b'data')
+
+ .. doctest:: resumable-unknown-size
+
+ >>> response = upload.initiate(
+ ... transport, stream, metadata, content_type,
+ ... stream_final=False)
+ >>>
+ >>> upload.total_bytes is None
+ True
+
+ Args:
+ upload_url (str): The URL where the resumable upload will be initiated.
+ chunk_size (int): The size of each chunk used to upload the resource.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the :meth:`initiate` request, e.g. headers for
+ encrypted data. These **will not** be sent with
+ :meth:`transmit_next_chunk` or :meth:`recover` requests.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be checked
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. The corrupted file will not be deleted from the remote
+ host automatically. Supported values are "md5", "crc32c" and None.
+ The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+
+ Raises:
+ ValueError: If ``chunk_size`` is not a multiple of
+ :data:`.UPLOAD_CHUNK_SIZE`.
+ """
+
+ async def initiate(
+ self,
+ transport,
+ stream,
+ metadata,
+ content_type,
+ total_bytes=None,
+ stream_final=True,
+ timeout=_request_helpers._DEFAULT_TIMEOUT,
+ ):
+ """Initiate a resumable upload.
+
+ By default, this method assumes your ``stream`` is in a "final"
+ state ready to transmit. However, ``stream_final=False`` can be used
+ to indicate that the size of the resource is not known. This can happen
+ if bytes are being dynamically fed into ``stream``, e.g. if the stream
+ is attached to application logs.
+
+ If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be
+ read from the stream every time :meth:`transmit_next_chunk` is called.
+ If one of those reads produces strictly fewer bites than the chunk
+ size, the upload will be concluded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ stream (IO[bytes]): The stream (i.e. file-like object) that will
+ be uploaded. The stream **must** be at the beginning (i.e.
+ ``stream.tell() == 0``).
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ total_bytes (Optional[int]): The total number of bytes to be
+ uploaded. If specified, the upload size **will not** be
+ determined from the stream (even if ``stream_final=True``).
+ stream_final (Optional[bool]): Indicates if the ``stream`` is
+ "final" (i.e. no more bytes will be added to it). In this case
+ we determine the upload size from the size of the stream. If
+ ``total_bytes`` is passed, this argument will be ignored.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_initiate_request(
+ stream,
+ metadata,
+ content_type,
+ total_bytes=total_bytes,
+ stream_final=stream_final,
+ )
+ response = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_initiate_response(response)
+ return response
+
+ async def transmit_next_chunk(
+ self, transport, timeout=_request_helpers._DEFAULT_TIMEOUT
+ ):
+ """Transmit the next chunk of the resource to be uploaded.
+
+ If the current upload was initiated with ``stream_final=False``,
+ this method will dynamically determine if the upload has completed.
+ The upload will be considered complete if the stream produces
+ fewer than :attr:`chunk_size` bytes when a chunk is read from it.
+
+ In the case of failure, an exception is thrown that preserves the
+ failed response:
+
+ .. testsetup:: bad-response
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google import resumable_media
+ import google.resumable_media.requests.upload as upload_mod
+
+ transport = mock.Mock(spec=['request'])
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.BAD_REQUEST)
+ transport.request.return_value = fake_response
+
+ upload_url = u'http://test.invalid'
+ upload = upload_mod.ResumableUpload(
+ upload_url, resumable_media.UPLOAD_CHUNK_SIZE)
+ # Fake that the upload has been initiate()-d
+ data = b'data is here'
+ upload._stream = io.BytesIO(data)
+ upload._total_bytes = len(data)
+ upload._resumable_url = u'http://test.invalid?upload_id=nope'
+
+ .. doctest:: bad-response
+ :options: +NORMALIZE_WHITESPACE
+
+ >>> error = None
+ >>> try:
+ ... upload.transmit_next_chunk(transport)
+ ... except resumable_media.InvalidResponse as caught_exc:
+ ... error = caught_exc
+ ...
+ >>> error
+ InvalidResponse('Request failed with status code', 400,
+ 'Expected one of', , 308)
+ >>> error.response
+
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 200 or 308.
+ ~google.resumable_media.common.DataCorruption: If this is the final
+ chunk, a checksum validation was requested, and the checksum
+ does not match or is not available.
+ """
+ method, url, payload, headers = self._prepare_request()
+ response = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ await self._process_response(response, len(payload))
+ return response
+
+ async def recover(self, transport):
+ """Recover from a failure.
+
+ This method should be used when a :class:`ResumableUpload` is in an
+ :attr:`~ResumableUpload.invalid` state due to a request failure.
+
+ This will verify the progress with the server and make sure the
+ current upload is in a valid state before :meth:`transmit_next_chunk`
+ can be used again.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_recover_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ response = await _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ )
+ self._process_recover_response(response)
+ return response
diff --git a/venv/Lib/site-packages/google/api_core/__init__.py b/venv/Lib/site-packages/google/api_core/__init__.py
new file mode 100644
index 000000000..605dd8be7
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google API Core.
+
+This package contains common code and utilties used by Google client libraries.
+"""
+
+from google.api_core import version as api_core_version
+
+__version__ = api_core_version.__version__
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..6abd8c0e3
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/bidi.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/bidi.cpython-36.pyc
new file mode 100644
index 000000000..360e9221a
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/bidi.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/client_info.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/client_info.cpython-36.pyc
new file mode 100644
index 000000000..f9ef18c3f
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/client_info.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/client_options.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/client_options.cpython-36.pyc
new file mode 100644
index 000000000..f9b8dd560
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/client_options.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/datetime_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/datetime_helpers.cpython-36.pyc
new file mode 100644
index 000000000..8eeaca6cc
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/datetime_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/exceptions.cpython-36.pyc
new file mode 100644
index 000000000..1aa8e211d
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/exceptions.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/general_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/general_helpers.cpython-36.pyc
new file mode 100644
index 000000000..c829e1784
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/general_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/grpc_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/grpc_helpers.cpython-36.pyc
new file mode 100644
index 000000000..a6c5abd9d
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/grpc_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/grpc_helpers_async.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/grpc_helpers_async.cpython-36.pyc
new file mode 100644
index 000000000..6dc0128bc
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/grpc_helpers_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/iam.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/iam.cpython-36.pyc
new file mode 100644
index 000000000..9c47daa7f
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/iam.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/operation.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/operation.cpython-36.pyc
new file mode 100644
index 000000000..22398b290
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/operation.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/operation_async.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/operation_async.cpython-36.pyc
new file mode 100644
index 000000000..f1ef3b5ec
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/operation_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/page_iterator.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/page_iterator.cpython-36.pyc
new file mode 100644
index 000000000..a5f8f7513
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/page_iterator.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/page_iterator_async.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/page_iterator_async.cpython-36.pyc
new file mode 100644
index 000000000..c2737213e
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/page_iterator_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/path_template.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/path_template.cpython-36.pyc
new file mode 100644
index 000000000..e45cfd186
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/path_template.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/protobuf_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/protobuf_helpers.cpython-36.pyc
new file mode 100644
index 000000000..3941a6a34
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/protobuf_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/retry.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/retry.cpython-36.pyc
new file mode 100644
index 000000000..c6366e616
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/retry.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/retry_async.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/retry_async.cpython-36.pyc
new file mode 100644
index 000000000..121274344
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/retry_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/timeout.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/timeout.cpython-36.pyc
new file mode 100644
index 000000000..7106d73e4
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/timeout.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/__pycache__/version.cpython-36.pyc
new file mode 100644
index 000000000..b3278e4dd
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/__pycache__/version.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/bidi.py b/venv/Lib/site-packages/google/api_core/bidi.py
new file mode 100644
index 000000000..be52d97d4
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/bidi.py
@@ -0,0 +1,735 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bi-directional streaming RPC helpers."""
+
+import collections
+import datetime
+import logging
+import threading
+import time
+
+from six.moves import queue
+
+from google.api_core import exceptions
+
+_LOGGER = logging.getLogger(__name__)
+_BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream"
+
+
+class _RequestQueueGenerator(object):
+ """A helper for sending requests to a gRPC stream from a Queue.
+
+ This generator takes requests off a given queue and yields them to gRPC.
+
+ This helper is useful when you have an indeterminate, indefinite, or
+ otherwise open-ended set of requests to send through a request-streaming
+ (or bidirectional) RPC.
+
+ The reason this is necessary is because gRPC takes an iterator as the
+ request for request-streaming RPCs. gRPC consumes this iterator in another
+ thread to allow it to block while generating requests for the stream.
+ However, if the generator blocks indefinitely gRPC will not be able to
+ clean up the thread as it'll be blocked on `next(iterator)` and not be able
+ to check the channel status to stop iterating. This helper mitigates that
+ by waiting on the queue with a timeout and checking the RPC state before
+ yielding.
+
+ Finally, it allows for retrying without swapping queues because if it does
+ pull an item off the queue when the RPC is inactive, it'll immediately put
+ it back and then exit. This is necessary because yielding the item in this
+ case will cause gRPC to discard it. In practice, this means that the order
+ of messages is not guaranteed. If such a thing is necessary it would be
+ easy to use a priority queue.
+
+ Example::
+
+ requests = request_queue_generator(q)
+ call = stub.StreamingRequest(iter(requests))
+ requests.call = call
+
+ for response in call:
+ print(response)
+ q.put(...)
+
+ Note that it is possible to accomplish this behavior without "spinning"
+ (using a queue timeout). One possible way would be to use more threads to
+ multiplex the grpc end event with the queue, another possible way is to
+ use selectors and a custom event/queue object. Both of these approaches
+ are significant from an engineering perspective for small benefit - the
+ CPU consumed by spinning is pretty minuscule.
+
+ Args:
+ queue (queue.Queue): The request queue.
+ period (float): The number of seconds to wait for items from the queue
+ before checking if the RPC is cancelled. In practice, this
+ determines the maximum amount of time the request consumption
+ thread will live after the RPC is cancelled.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is done independently of the request queue to allow fo
+ easily restarting streams that require some initial configuration
+ request.
+ """
+
+ def __init__(self, queue, period=1, initial_request=None):
+ self._queue = queue
+ self._period = period
+ self._initial_request = initial_request
+ self.call = None
+
+ def _is_active(self):
+ # Note: there is a possibility that this starts *before* the call
+ # property is set. So we have to check if self.call is set before
+ # seeing if it's active.
+ if self.call is not None and not self.call.is_active():
+ return False
+ else:
+ return True
+
+ def __iter__(self):
+ if self._initial_request is not None:
+ if callable(self._initial_request):
+ yield self._initial_request()
+ else:
+ yield self._initial_request
+
+ while True:
+ try:
+ item = self._queue.get(timeout=self._period)
+ except queue.Empty:
+ if not self._is_active():
+ _LOGGER.debug(
+ "Empty queue and inactive call, exiting request " "generator."
+ )
+ return
+ else:
+ # call is still active, keep waiting for queue items.
+ continue
+
+ # The consumer explicitly sent "None", indicating that the request
+ # should end.
+ if item is None:
+ _LOGGER.debug("Cleanly exiting request generator.")
+ return
+
+ if not self._is_active():
+ # We have an item, but the call is closed. We should put the
+ # item back on the queue so that the next call can consume it.
+ self._queue.put(item)
+ _LOGGER.debug(
+ "Inactive call, replacing item on queue and exiting "
+ "request generator."
+ )
+ return
+
+ yield item
+
+
+class _Throttle(object):
+ """A context manager limiting the total entries in a sliding time window.
+
+ If more than ``access_limit`` attempts are made to enter the context manager
+ instance in the last ``time window`` interval, the exceeding requests block
+ until enough time elapses.
+
+ The context manager instances are thread-safe and can be shared between
+ multiple threads. If multiple requests are blocked and waiting to enter,
+ the exact order in which they are allowed to proceed is not determined.
+
+ Example::
+
+ max_three_per_second = _Throttle(
+ access_limit=3, time_window=datetime.timedelta(seconds=1)
+ )
+
+ for i in range(5):
+ with max_three_per_second as time_waited:
+ print("{}: Waited {} seconds to enter".format(i, time_waited))
+
+ Args:
+ access_limit (int): the maximum number of entries allowed in the time window
+ time_window (datetime.timedelta): the width of the sliding time window
+ """
+
+ def __init__(self, access_limit, time_window):
+ if access_limit < 1:
+ raise ValueError("access_limit argument must be positive")
+
+ if time_window <= datetime.timedelta(0):
+ raise ValueError("time_window argument must be a positive timedelta")
+
+ self._time_window = time_window
+ self._access_limit = access_limit
+ self._past_entries = collections.deque(
+ maxlen=access_limit
+ ) # least recent first
+ self._entry_lock = threading.Lock()
+
+ def __enter__(self):
+ with self._entry_lock:
+ cutoff_time = datetime.datetime.now() - self._time_window
+
+ # drop the entries that are too old, as they are no longer relevant
+ while self._past_entries and self._past_entries[0] < cutoff_time:
+ self._past_entries.popleft()
+
+ if len(self._past_entries) < self._access_limit:
+ self._past_entries.append(datetime.datetime.now())
+ return 0.0 # no waiting was needed
+
+ to_wait = (self._past_entries[0] - cutoff_time).total_seconds()
+ time.sleep(to_wait)
+
+ self._past_entries.append(datetime.datetime.now())
+ return to_wait
+
+ def __exit__(self, *_):
+ pass
+
+ def __repr__(self):
+ return "{}(access_limit={}, time_window={})".format(
+ self.__class__.__name__, self._access_limit, repr(self._time_window)
+ )
+
+
+class BidiRpc(object):
+ """A helper for consuming a bi-directional streaming RPC.
+
+ This maps gRPC's built-in interface which uses a request iterator and a
+ response iterator into a socket-like :func:`send` and :func:`recv`. This
+ is a more useful pattern for long-running or asymmetric streams (streams
+ where there is not a direct correlation between the requests and
+ responses).
+
+ Example::
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+ rpc = BidiRpc(
+ stub.StreamingRpc,
+ initial_request=initial_request,
+ metadata=[('name', 'value')]
+ )
+
+ rpc.open()
+
+ while rpc.is_active():
+ print(rpc.recv())
+ rpc.send(example_pb2.StreamingRpcRequest(
+ data='example'))
+
+ This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`.
+
+ Args:
+ start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
+ start the RPC.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is useful if an initial request is needed to start the
+ stream.
+ metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
+ the request.
+ """
+
+ def __init__(self, start_rpc, initial_request=None, metadata=None):
+ self._start_rpc = start_rpc
+ self._initial_request = initial_request
+ self._rpc_metadata = metadata
+ self._request_queue = queue.Queue()
+ self._request_generator = None
+ self._is_active = False
+ self._callbacks = []
+ self.call = None
+
+ def add_done_callback(self, callback):
+ """Adds a callback that will be called when the RPC terminates.
+
+ This occurs when the RPC errors or is successfully terminated.
+
+ Args:
+ callback (Callable[[grpc.Future], None]): The callback to execute.
+ It will be provided with the same gRPC future as the underlying
+ stream which will also be a :class:`grpc.Call`.
+ """
+ self._callbacks.append(callback)
+
+ def _on_call_done(self, future):
+ for callback in self._callbacks:
+ callback(future)
+
+ def open(self):
+ """Opens the stream."""
+ if self.is_active:
+ raise ValueError("Can not open an already open stream.")
+
+ request_generator = _RequestQueueGenerator(
+ self._request_queue, initial_request=self._initial_request
+ )
+ call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+
+ request_generator.call = call
+
+ # TODO: api_core should expose the future interface for wrapped
+ # callables as well.
+ if hasattr(call, "_wrapped"): # pragma: NO COVER
+ call._wrapped.add_done_callback(self._on_call_done)
+ else:
+ call.add_done_callback(self._on_call_done)
+
+ self._request_generator = request_generator
+ self.call = call
+
+ def close(self):
+ """Closes the stream."""
+ if self.call is None:
+ return
+
+ self._request_queue.put(None)
+ self.call.cancel()
+ self._request_generator = None
+ # Don't set self.call to None. Keep it around so that send/recv can
+ # raise the error.
+
+ def send(self, request):
+ """Queue a message to be sent on the stream.
+
+ Send is non-blocking.
+
+ If the underlying RPC has been closed, this will raise.
+
+ Args:
+ request (protobuf.Message): The request to send.
+ """
+ if self.call is None:
+ raise ValueError("Can not send() on an RPC that has never been open()ed.")
+
+ # Don't use self.is_active(), as ResumableBidiRpc will overload it
+ # to mean something semantically different.
+ if self.call.is_active():
+ self._request_queue.put(request)
+ else:
+ # calling next should cause the call to raise.
+ next(self.call)
+
+ def recv(self):
+ """Wait for a message to be returned from the stream.
+
+ Recv is blocking.
+
+ If the underlying RPC has been closed, this will raise.
+
+ Returns:
+ protobuf.Message: The received message.
+ """
+ if self.call is None:
+ raise ValueError("Can not recv() on an RPC that has never been open()ed.")
+
+ return next(self.call)
+
+ @property
+ def is_active(self):
+ """bool: True if this stream is currently open and active."""
+ return self.call is not None and self.call.is_active()
+
+ @property
+ def pending_requests(self):
+ """int: Returns an estimate of the number of queued requests."""
+ return self._request_queue.qsize()
+
+
+def _never_terminate(future_or_error):
+ """By default, no errors cause BiDi termination."""
+ return False
+
+
+class ResumableBidiRpc(BidiRpc):
+ """A :class:`BidiRpc` that can automatically resume the stream on errors.
+
+ It uses the ``should_recover`` arg to determine if it should re-establish
+ the stream on error.
+
+ Example::
+
+ def should_recover(exc):
+ return (
+ isinstance(exc, grpc.RpcError) and
+ exc.code() == grpc.StatusCode.UNVAILABLE)
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+
+ metadata = [('header_name', 'value')]
+
+ rpc = ResumableBidiRpc(
+ stub.StreamingRpc,
+ should_recover=should_recover,
+ initial_request=initial_request,
+ metadata=metadata
+ )
+
+ rpc.open()
+
+ while rpc.is_active():
+ print(rpc.recv())
+ rpc.send(example_pb2.StreamingRpcRequest(
+ data='example'))
+
+ Args:
+ start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
+ start the RPC.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is useful if an initial request is needed to start the
+ stream.
+ should_recover (Callable[[Exception], bool]): A function that returns
+ True if the stream should be recovered. This will be called
+ whenever an error is encountered on the stream.
+ should_terminate (Callable[[Exception], bool]): A function that returns
+ True if the stream should be terminated. This will be called
+ whenever an error is encountered on the stream.
+ metadata Sequence[Tuple(str, str)]: RPC metadata to include in
+ the request.
+ throttle_reopen (bool): If ``True``, throttling will be applied to
+ stream reopen calls. Defaults to ``False``.
+ """
+
+ def __init__(
+ self,
+ start_rpc,
+ should_recover,
+ should_terminate=_never_terminate,
+ initial_request=None,
+ metadata=None,
+ throttle_reopen=False,
+ ):
+ super(ResumableBidiRpc, self).__init__(start_rpc, initial_request, metadata)
+ self._should_recover = should_recover
+ self._should_terminate = should_terminate
+ self._operational_lock = threading.RLock()
+ self._finalized = False
+ self._finalize_lock = threading.Lock()
+
+ if throttle_reopen:
+ self._reopen_throttle = _Throttle(
+ access_limit=5, time_window=datetime.timedelta(seconds=10)
+ )
+ else:
+ self._reopen_throttle = None
+
+ def _finalize(self, result):
+ with self._finalize_lock:
+ if self._finalized:
+ return
+
+ for callback in self._callbacks:
+ callback(result)
+
+ self._finalized = True
+
+ def _on_call_done(self, future):
+ # Unlike the base class, we only execute the callbacks on a terminal
+ # error, not for errors that we can recover from. Note that grpc's
+ # "future" here is also a grpc.RpcError.
+ with self._operational_lock:
+ if self._should_terminate(future):
+ self._finalize(future)
+ elif not self._should_recover(future):
+ self._finalize(future)
+ else:
+ _LOGGER.debug("Re-opening stream from gRPC callback.")
+ self._reopen()
+
+ def _reopen(self):
+ with self._operational_lock:
+ # Another thread already managed to re-open this stream.
+ if self.call is not None and self.call.is_active():
+ _LOGGER.debug("Stream was already re-established.")
+ return
+
+ self.call = None
+ # Request generator should exit cleanly since the RPC its bound to
+ # has exited.
+ self._request_generator = None
+
+ # Note: we do not currently do any sort of backoff here. The
+ # assumption is that re-establishing the stream under normal
+ # circumstances will happen in intervals greater than 60s.
+ # However, it is possible in a degenerative case that the server
+ # closes the stream rapidly which would lead to thrashing here,
+ # but hopefully in those cases the server would return a non-
+ # retryable error.
+
+ try:
+ if self._reopen_throttle:
+ with self._reopen_throttle:
+ self.open()
+ else:
+ self.open()
+ # If re-opening or re-calling the method fails for any reason,
+ # consider it a terminal error and finalize the stream.
+ except Exception as exc:
+ _LOGGER.debug("Failed to re-open stream due to %s", exc)
+ self._finalize(exc)
+ raise
+
+ _LOGGER.info("Re-established stream")
+
+ def _recoverable(self, method, *args, **kwargs):
+ """Wraps a method to recover the stream and retry on error.
+
+ If a retryable error occurs while making the call, then the stream will
+ be re-opened and the method will be retried. This happens indefinitely
+ so long as the error is a retryable one. If an error occurs while
+ re-opening the stream, then this method will raise immediately and
+ trigger finalization of this object.
+
+ Args:
+ method (Callable[..., Any]): The method to call.
+ args: The args to pass to the method.
+ kwargs: The kwargs to pass to the method.
+ """
+ while True:
+ try:
+ return method(*args, **kwargs)
+
+ except Exception as exc:
+ with self._operational_lock:
+ _LOGGER.debug("Call to retryable %r caused %s.", method, exc)
+
+ if self._should_terminate(exc):
+ self.close()
+ _LOGGER.debug("Terminating %r due to %s.", method, exc)
+ self._finalize(exc)
+ break
+
+ if not self._should_recover(exc):
+ self.close()
+ _LOGGER.debug("Not retrying %r due to %s.", method, exc)
+ self._finalize(exc)
+ raise exc
+
+ _LOGGER.debug("Re-opening stream from retryable %r.", method)
+ self._reopen()
+
+ def _send(self, request):
+ # Grab a reference to the RPC call. Because another thread (notably
+ # the gRPC error thread) can modify self.call (by invoking reopen),
+ # we should ensure our reference can not change underneath us.
+ # If self.call is modified (such as replaced with a new RPC call) then
+ # this will use the "old" RPC, which should result in the same
+ # exception passed into gRPC's error handler being raised here, which
+ # will be handled by the usual error handling in retryable.
+ with self._operational_lock:
+ call = self.call
+
+ if call is None:
+ raise ValueError("Can not send() on an RPC that has never been open()ed.")
+
+ # Don't use self.is_active(), as ResumableBidiRpc will overload it
+ # to mean something semantically different.
+ if call.is_active():
+ self._request_queue.put(request)
+ pass
+ else:
+ # calling next should cause the call to raise.
+ next(call)
+
+ def send(self, request):
+ return self._recoverable(self._send, request)
+
+ def _recv(self):
+ with self._operational_lock:
+ call = self.call
+
+ if call is None:
+ raise ValueError("Can not recv() on an RPC that has never been open()ed.")
+
+ return next(call)
+
+ def recv(self):
+ return self._recoverable(self._recv)
+
+ def close(self):
+ self._finalize(None)
+ super(ResumableBidiRpc, self).close()
+
+ @property
+ def is_active(self):
+ """bool: True if this stream is currently open and active."""
+ # Use the operational lock. It's entirely possible for something
+ # to check the active state *while* the RPC is being retried.
+ # Also, use finalized to track the actual terminal state here.
+ # This is because if the stream is re-established by the gRPC thread
+ # it's technically possible to check this between when gRPC marks the
+ # RPC as inactive and when gRPC executes our callback that re-opens
+ # the stream.
+ with self._operational_lock:
+ return self.call is not None and not self._finalized
+
+
+class BackgroundConsumer(object):
+ """A bi-directional stream consumer that runs in a separate thread.
+
+ This maps the consumption of a stream into a callback-based model. It also
+ provides :func:`pause` and :func:`resume` to allow for flow-control.
+
+ Example::
+
+ def should_recover(exc):
+ return (
+ isinstance(exc, grpc.RpcError) and
+ exc.code() == grpc.StatusCode.UNVAILABLE)
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+
+ rpc = ResumeableBidiRpc(
+ stub.StreamingRpc,
+ initial_request=initial_request,
+ should_recover=should_recover)
+
+ def on_response(response):
+ print(response)
+
+ consumer = BackgroundConsumer(rpc, on_response)
+ consumer.start()
+
+ Note that error handling *must* be done by using the provided
+ ``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit
+ whenever the RPC itself exits and will not provide any error details.
+
+ Args:
+ bidi_rpc (BidiRpc): The RPC to consume. Should not have been
+ ``open()``ed yet.
+ on_response (Callable[[protobuf.Message], None]): The callback to
+ be called for every response on the stream.
+ """
+
+ def __init__(self, bidi_rpc, on_response):
+ self._bidi_rpc = bidi_rpc
+ self._on_response = on_response
+ self._paused = False
+ self._wake = threading.Condition()
+ self._thread = None
+ self._operational_lock = threading.Lock()
+
+ def _on_call_done(self, future):
+ # Resume the thread if it's paused, this prevents blocking forever
+ # when the RPC has terminated.
+ self.resume()
+
+ def _thread_main(self, ready):
+ try:
+ ready.set()
+ self._bidi_rpc.add_done_callback(self._on_call_done)
+ self._bidi_rpc.open()
+
+ while self._bidi_rpc.is_active:
+ # Do not allow the paused status to change at all during this
+ # section. There is a condition where we could be resumed
+ # between checking if we are paused and calling wake.wait(),
+ # which means that we will miss the notification to wake up
+ # (oops!) and wait for a notification that will never come.
+ # Keeping the lock throughout avoids that.
+ # In the future, we could use `Condition.wait_for` if we drop
+ # Python 2.7.
+ with self._wake:
+ while self._paused:
+ _LOGGER.debug("paused, waiting for waking.")
+ self._wake.wait()
+ _LOGGER.debug("woken.")
+
+ _LOGGER.debug("waiting for recv.")
+ response = self._bidi_rpc.recv()
+ _LOGGER.debug("recved response.")
+ self._on_response(response)
+
+ except exceptions.GoogleAPICallError as exc:
+ _LOGGER.debug(
+ "%s caught error %s and will exit. Generally this is due to "
+ "the RPC itself being cancelled and the error will be "
+ "surfaced to the calling code.",
+ _BIDIRECTIONAL_CONSUMER_NAME,
+ exc,
+ exc_info=True,
+ )
+
+ except Exception as exc:
+ _LOGGER.exception(
+ "%s caught unexpected exception %s and will exit.",
+ _BIDIRECTIONAL_CONSUMER_NAME,
+ exc,
+ )
+
+ _LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
+
+ def start(self):
+ """Start the background thread and begin consuming the thread."""
+ with self._operational_lock:
+ ready = threading.Event()
+ thread = threading.Thread(
+ name=_BIDIRECTIONAL_CONSUMER_NAME,
+ target=self._thread_main,
+ args=(ready,),
+ )
+ thread.daemon = True
+ thread.start()
+ # Other parts of the code rely on `thread.is_alive` which
+ # isn't sufficient to know if a thread is active, just that it may
+ # soon be active. This can cause races. Further protect
+ # against races by using a ready event and wait on it to be set.
+ ready.wait()
+ self._thread = thread
+ _LOGGER.debug("Started helper thread %s", thread.name)
+
+ def stop(self):
+ """Stop consuming the stream and shutdown the background thread."""
+ with self._operational_lock:
+ self._bidi_rpc.close()
+
+ if self._thread is not None:
+ # Resume the thread to wake it up in case it is sleeping.
+ self.resume()
+ # The daemonized thread may itself block, so don't wait
+ # for it longer than a second.
+ self._thread.join(1.0)
+ if self._thread.is_alive(): # pragma: NO COVER
+ _LOGGER.warning("Background thread did not exit.")
+
+ self._thread = None
+
+ @property
+ def is_active(self):
+ """bool: True if the background thread is active."""
+ return self._thread is not None and self._thread.is_alive()
+
+ def pause(self):
+ """Pauses the response stream.
+
+ This does *not* pause the request stream.
+ """
+ with self._wake:
+ self._paused = True
+
+ def resume(self):
+ """Resumes the response stream."""
+ with self._wake:
+ self._paused = False
+ self._wake.notifyAll()
+
+ @property
+ def is_paused(self):
+ """bool: True if the response stream is paused."""
+ return self._paused
diff --git a/venv/Lib/site-packages/google/api_core/client_info.py b/venv/Lib/site-packages/google/api_core/client_info.py
new file mode 100644
index 000000000..6c04d5de5
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/client_info.py
@@ -0,0 +1,98 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for providing client information.
+
+Client information is used to send information about the calling client,
+such as the library and Python version, to API services.
+"""
+
+import platform
+
+import pkg_resources
+
+from google.api_core import version as api_core_version
+
+_PY_VERSION = platform.python_version()
+_API_CORE_VERSION = api_core_version.__version__
+
+try:
+ _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version
+except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GRPC_VERSION = None
+
+
+class ClientInfo(object):
+ """Client information used to generate a user-agent for API calls.
+
+ This user-agent information is sent along with API calls to allow the
+ receiving service to do analytics on which versions of Python and Google
+ libraries are being used.
+
+ Args:
+ python_version (str): The Python interpreter version, for example,
+ ``'2.7.13'``.
+ grpc_version (Optional[str]): The gRPC library version.
+ api_core_version (str): The google-api-core library version.
+ gapic_version (Optional[str]): The sversion of gapic-generated client
+ library, if the library was generated by gapic.
+ client_library_version (Optional[str]): The version of the client
+ library, generally used if the client library was not generated
+ by gapic or if additional functionality was built on top of
+ a gapic client library.
+ user_agent (Optional[str]): Prefix to the user agent header. This is
+ used to supply information such as application name or partner tool.
+ Recommended format: ``application-or-tool-ID/major.minor.version``.
+ """
+
+ def __init__(
+ self,
+ python_version=_PY_VERSION,
+ grpc_version=_GRPC_VERSION,
+ api_core_version=_API_CORE_VERSION,
+ gapic_version=None,
+ client_library_version=None,
+ user_agent=None,
+ ):
+ self.python_version = python_version
+ self.grpc_version = grpc_version
+ self.api_core_version = api_core_version
+ self.gapic_version = gapic_version
+ self.client_library_version = client_library_version
+ self.user_agent = user_agent
+
+ def to_user_agent(self):
+ """Returns the user-agent string for this client info."""
+
+ # Note: the order here is important as the internal metrics system
+ # expects these items to be in specific locations.
+ ua = ""
+
+ if self.user_agent is not None:
+ ua += "{user_agent} "
+
+ ua += "gl-python/{python_version} "
+
+ if self.grpc_version is not None:
+ ua += "grpc/{grpc_version} "
+
+ ua += "gax/{api_core_version} "
+
+ if self.gapic_version is not None:
+ ua += "gapic/{gapic_version} "
+
+ if self.client_library_version is not None:
+ ua += "gccl/{client_library_version} "
+
+ return ua.format(**self.__dict__).strip()
diff --git a/venv/Lib/site-packages/google/api_core/client_options.py b/venv/Lib/site-packages/google/api_core/client_options.py
new file mode 100644
index 000000000..57000e95e
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/client_options.py
@@ -0,0 +1,116 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client options class.
+
+Client options provide a consistent interface for user options to be defined
+across clients.
+
+You can pass a client options object to a client.
+
+.. code-block:: python
+
+ from google.api_core.client_options import ClientOptions
+ from google.cloud.vision_v1 import ImageAnnotatorClient
+
+ def get_client_cert():
+ # code to load client certificate and private key.
+ return client_cert_bytes, client_private_key_bytes
+
+ options = ClientOptions(api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert)
+
+ client = ImageAnnotatorClient(client_options=options)
+
+You can also pass a mapping object.
+
+.. code-block:: python
+
+ from google.cloud.vision_v1 import ImageAnnotatorClient
+
+ client = ImageAnnotatorClient(
+ client_options={
+ "api_endpoint": "foo.googleapis.com",
+ "client_cert_source" : get_client_cert
+ })
+
+
+"""
+
+
+class ClientOptions(object):
+ """Client Options used to set options on clients.
+
+ Args:
+ api_endpoint (Optional[str]): The desired API endpoint, e.g.,
+ compute.googleapis.com
+ client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback
+ which returns client certificate bytes and private key bytes both in
+ PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
+ are mutually exclusive.
+ client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]):
+ A callback which returns client certificate file path, encrypted
+ private key file path, and the passphrase bytes.``client_cert_source``
+ and ``client_encrypted_cert_source`` are mutually exclusive.
+ quota_project_id (Optional[str]): A project name that a client's
+ quota belongs to.
+ credentials_file (Optional[str]): A path to a file storing credentials.
+ scopes (Optional[Sequence[str]]): OAuth access token override scopes.
+
+ Raises:
+ ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source``
+ are provided.
+ """
+
+ def __init__(
+ self,
+ api_endpoint=None,
+ client_cert_source=None,
+ client_encrypted_cert_source=None,
+ quota_project_id=None,
+ credentials_file=None,
+ scopes=None,
+ ):
+ if client_cert_source and client_encrypted_cert_source:
+ raise ValueError(
+ "client_cert_source and client_encrypted_cert_source are mutually exclusive"
+ )
+ self.api_endpoint = api_endpoint
+ self.client_cert_source = client_cert_source
+ self.client_encrypted_cert_source = client_encrypted_cert_source
+ self.quota_project_id = quota_project_id
+ self.credentials_file = credentials_file
+ self.scopes = scopes
+
+ def __repr__(self):
+ return "ClientOptions: " + repr(self.__dict__)
+
+
+def from_dict(options):
+ """Construct a client options object from a mapping object.
+
+ Args:
+ options (six.moves.collections_abc.Mapping): A mapping object with client options.
+ See the docstring for ClientOptions for details on valid arguments.
+ """
+
+ client_options = ClientOptions()
+
+ for key, value in options.items():
+ if hasattr(client_options, key):
+ setattr(client_options, key, value)
+ else:
+ raise ValueError("ClientOptions does not accept an option '" + key + "'")
+
+ return client_options
diff --git a/venv/Lib/site-packages/google/api_core/datetime_helpers.py b/venv/Lib/site-packages/google/api_core/datetime_helpers.py
new file mode 100644
index 000000000..e52fb1dd3
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/datetime_helpers.py
@@ -0,0 +1,296 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`datetime`."""
+
+import calendar
+import datetime
+import re
+
+import pytz
+
+from google.protobuf import timestamp_pb2
+
+
+_UTC_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
+_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
+_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
+# datetime.strptime cannot handle nanosecond precision: parse w/ regex
+_RFC3339_NANOS = re.compile(
+ r"""
+ (?P
+ \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
+ )
+ ( # Optional decimal part
+ \. # decimal point
+ (?P\d{1,9}) # nanoseconds, maybe truncated
+ )?
+ Z # Zulu
+""",
+ re.VERBOSE,
+)
+
+
+def utcnow():
+ """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests."""
+ return datetime.datetime.utcnow()
+
+
+def to_milliseconds(value):
+ """Convert a zone-aware datetime to milliseconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Milliseconds since the unix epoch.
+ """
+ micros = to_microseconds(value)
+ return micros // 1000
+
+
+def from_microseconds(value):
+ """Convert timestamp in microseconds since the unix epoch to datetime.
+
+ Args:
+ value (float): The timestamp to convert, in microseconds.
+
+ Returns:
+ datetime.datetime: The datetime object equivalent to the timestamp in
+ UTC.
+ """
+ return _UTC_EPOCH + datetime.timedelta(microseconds=value)
+
+
+def to_microseconds(value):
+ """Convert a datetime to microseconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Microseconds since the unix epoch.
+ """
+ if not value.tzinfo:
+ value = value.replace(tzinfo=pytz.utc)
+ # Regardless of what timezone is on the value, convert it to UTC.
+ value = value.astimezone(pytz.utc)
+ # Convert the datetime to a microsecond timestamp.
+ return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
+
+
+def from_iso8601_date(value):
+ """Convert a ISO8601 date string to a date.
+
+ Args:
+ value (str): The ISO8601 date string.
+
+ Returns:
+ datetime.date: A date equivalent to the date string.
+ """
+ return datetime.datetime.strptime(value, "%Y-%m-%d").date()
+
+
+def from_iso8601_time(value):
+ """Convert a zoneless ISO8601 time string to a time.
+
+ Args:
+ value (str): The ISO8601 time string.
+
+ Returns:
+ datetime.time: A time equivalent to the time string.
+ """
+ return datetime.datetime.strptime(value, "%H:%M:%S").time()
+
+
+def from_rfc3339(value):
+ """Convert an RFC3339-format timestamp to a native datetime.
+
+ Supported formats include those without fractional seconds, or with
+ any fraction up to nanosecond precision.
+
+ .. note::
+ Python datetimes do not support nanosecond precision; this function
+ therefore truncates such values to microseconds.
+
+ Args:
+ value (str): The RFC3339 string to convert.
+
+ Returns:
+ datetime.datetime: The datetime object equivalent to the timestamp
+ in UTC.
+
+ Raises:
+ ValueError: If the timestamp does not match the RFC3339
+ regular expression.
+ """
+ with_nanos = _RFC3339_NANOS.match(value)
+
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: {!r}, does not match pattern: {!r}".format(
+ value, _RFC3339_NANOS.pattern
+ )
+ )
+
+ bare_seconds = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+
+ if fraction is None:
+ micros = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10 ** scale)
+ micros = nanos // 1000
+
+ return bare_seconds.replace(microsecond=micros, tzinfo=pytz.utc)
+
+
+from_rfc3339_nanos = from_rfc3339 # from_rfc3339_nanos method was deprecated.
+
+
+def to_rfc3339(value, ignore_zone=True):
+ """Convert a datetime to an RFC3339 timestamp string.
+
+ Args:
+ value (datetime.datetime):
+ The datetime object to be converted to a string.
+ ignore_zone (bool): If True, then the timezone (if any) of the
+ datetime object is ignored and the datetime is treated as UTC.
+
+ Returns:
+ str: The RFC3339 formated string representing the datetime.
+ """
+ if not ignore_zone and value.tzinfo is not None:
+ # Convert to UTC and remove the time zone info.
+ value = value.replace(tzinfo=None) - value.utcoffset()
+
+ return value.strftime(_RFC3339_MICROS)
+
+
+class DatetimeWithNanoseconds(datetime.datetime):
+ """Track nanosecond in addition to normal datetime attrs.
+
+ Nanosecond can be passed only as a keyword argument.
+ """
+
+ __slots__ = ("_nanosecond",)
+
+ # pylint: disable=arguments-differ
+ def __new__(cls, *args, **kw):
+ nanos = kw.pop("nanosecond", 0)
+ if nanos > 0:
+ if "microsecond" in kw:
+ raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
+ kw["microsecond"] = nanos // 1000
+ inst = datetime.datetime.__new__(cls, *args, **kw)
+ inst._nanosecond = nanos or 0
+ return inst
+
+ # pylint: disable=arguments-differ
+
+ @property
+ def nanosecond(self):
+ """Read-only: nanosecond precision."""
+ return self._nanosecond
+
+ def rfc3339(self):
+ """Return an RFC3339-compliant timestamp.
+
+ Returns:
+ (str): Timestamp string according to RFC3339 spec.
+ """
+ if self._nanosecond == 0:
+ return to_rfc3339(self)
+ nanos = str(self._nanosecond).rjust(9, "0").rstrip("0")
+ return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos)
+
+ @classmethod
+ def from_rfc3339(cls, stamp):
+ """Parse RFC3339-compliant timestamp, preserving nanoseconds.
+
+ Args:
+ stamp (str): RFC3339 stamp, with up to nanosecond precision
+
+ Returns:
+ :class:`DatetimeWithNanoseconds`:
+ an instance matching the timestamp string
+
+ Raises:
+ ValueError: if `stamp` does not match the expected format
+ """
+ with_nanos = _RFC3339_NANOS.match(stamp)
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: {}, does not match pattern: {}".format(
+ stamp, _RFC3339_NANOS.pattern
+ )
+ )
+ bare = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+ if fraction is None:
+ nanos = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10 ** scale)
+ return cls(
+ bare.year,
+ bare.month,
+ bare.day,
+ bare.hour,
+ bare.minute,
+ bare.second,
+ nanosecond=nanos,
+ tzinfo=pytz.UTC,
+ )
+
+ def timestamp_pb(self):
+ """Return a timestamp message.
+
+ Returns:
+ (:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
+ """
+ inst = self if self.tzinfo is not None else self.replace(tzinfo=pytz.UTC)
+ delta = inst - _UTC_EPOCH
+ seconds = int(delta.total_seconds())
+ nanos = self._nanosecond or self.microsecond * 1000
+ return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+
+ @classmethod
+ def from_timestamp_pb(cls, stamp):
+ """Parse RFC3339-compliant timestamp, preserving nanoseconds.
+
+ Args:
+ stamp (:class:`~google.protobuf.timestamp_pb2.Timestamp`): timestamp message
+
+ Returns:
+ :class:`DatetimeWithNanoseconds`:
+ an instance matching the timestamp message
+ """
+ microseconds = int(stamp.seconds * 1e6)
+ bare = from_microseconds(microseconds)
+ return cls(
+ bare.year,
+ bare.month,
+ bare.day,
+ bare.hour,
+ bare.minute,
+ bare.second,
+ nanosecond=stamp.nanos,
+ tzinfo=pytz.UTC,
+ )
diff --git a/venv/Lib/site-packages/google/api_core/exceptions.py b/venv/Lib/site-packages/google/api_core/exceptions.py
new file mode 100644
index 000000000..b9c46ca00
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/exceptions.py
@@ -0,0 +1,474 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Exceptions raised by Google API core & clients.
+
+This module provides base classes for all errors raised by libraries based
+on :mod:`google.api_core`, including both HTTP and gRPC clients.
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import six
+from six.moves import http_client
+
+try:
+ import grpc
+except ImportError: # pragma: NO COVER
+ grpc = None
+
+# Lookup tables for mapping exceptions from HTTP and gRPC transports.
+# Populated by _APICallErrorMeta
+_HTTP_CODE_TO_EXCEPTION = {}
+_GRPC_CODE_TO_EXCEPTION = {}
+
+
+class GoogleAPIError(Exception):
+ """Base class for all exceptions raised by Google API Clients."""
+
+ pass
+
+
+class DuplicateCredentialArgs(GoogleAPIError):
+ """Raised when multiple credentials are passed."""
+
+ pass
+
+
+@six.python_2_unicode_compatible
+class RetryError(GoogleAPIError):
+ """Raised when a function has exhausted all of its available retries.
+
+ Args:
+ message (str): The exception message.
+ cause (Exception): The last exception raised when retring the
+ function.
+ """
+
+ def __init__(self, message, cause):
+ super(RetryError, self).__init__(message)
+ self.message = message
+ self._cause = cause
+
+ @property
+ def cause(self):
+ """The last exception raised when retrying the function."""
+ return self._cause
+
+ def __str__(self):
+ return "{}, last exception: {}".format(self.message, self.cause)
+
+
+class _GoogleAPICallErrorMeta(type):
+ """Metaclass for registering GoogleAPICallError subclasses."""
+
+ def __new__(mcs, name, bases, class_dict):
+ cls = type.__new__(mcs, name, bases, class_dict)
+ if cls.code is not None:
+ _HTTP_CODE_TO_EXCEPTION.setdefault(cls.code, cls)
+ if cls.grpc_status_code is not None:
+ _GRPC_CODE_TO_EXCEPTION.setdefault(cls.grpc_status_code, cls)
+ return cls
+
+
+@six.python_2_unicode_compatible
+@six.add_metaclass(_GoogleAPICallErrorMeta)
+class GoogleAPICallError(GoogleAPIError):
+ """Base class for exceptions raised by calling API methods.
+
+ Args:
+ message (str): The exception message.
+ errors (Sequence[Any]): An optional list of error details.
+ response (Union[requests.Request, grpc.Call]): The response or
+ gRPC call metadata.
+ """
+
+ code = None
+ """Optional[int]: The HTTP status code associated with this error.
+
+ This may be ``None`` if the exception does not have a direct mapping
+ to an HTTP error.
+
+ See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+ """
+
+ grpc_status_code = None
+ """Optional[grpc.StatusCode]: The gRPC status code associated with this
+ error.
+
+ This may be ``None`` if the exception does not match up to a gRPC error.
+ """
+
+ def __init__(self, message, errors=(), response=None):
+ super(GoogleAPICallError, self).__init__(message)
+ self.message = message
+ """str: The exception message."""
+ self._errors = errors
+ self._response = response
+
+ def __str__(self):
+ return "{} {}".format(self.code, self.message)
+
+ @property
+ def errors(self):
+ """Detailed error information.
+
+ Returns:
+ Sequence[Any]: A list of additional error details.
+ """
+ return list(self._errors)
+
+ @property
+ def response(self):
+ """Optional[Union[requests.Request, grpc.Call]]: The response or
+ gRPC call metadata."""
+ return self._response
+
+
+class Redirection(GoogleAPICallError):
+ """Base class for for all redirection (HTTP 3xx) responses."""
+
+
+class MovedPermanently(Redirection):
+ """Exception mapping a ``301 Moved Permanently`` response."""
+
+ code = http_client.MOVED_PERMANENTLY
+
+
+class NotModified(Redirection):
+ """Exception mapping a ``304 Not Modified`` response."""
+
+ code = http_client.NOT_MODIFIED
+
+
+class TemporaryRedirect(Redirection):
+ """Exception mapping a ``307 Temporary Redirect`` response."""
+
+ code = http_client.TEMPORARY_REDIRECT
+
+
+class ResumeIncomplete(Redirection):
+ """Exception mapping a ``308 Resume Incomplete`` response.
+
+ .. note:: :attr:`http_client.PERMANENT_REDIRECT` is ``308``, but Google
+ APIs differ in their use of this status code.
+ """
+
+ code = 308
+
+
+class ClientError(GoogleAPICallError):
+ """Base class for all client error (HTTP 4xx) responses."""
+
+
+class BadRequest(ClientError):
+ """Exception mapping a ``400 Bad Request`` response."""
+
+ code = http_client.BAD_REQUEST
+
+
+class InvalidArgument(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.INVALID_ARGUMENT` error."""
+
+ grpc_status_code = grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None
+
+
+class FailedPrecondition(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.FAILED_PRECONDITION`
+ error."""
+
+ grpc_status_code = grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None
+
+
+class OutOfRange(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.OUT_OF_RANGE` error."""
+
+ grpc_status_code = grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None
+
+
+class Unauthorized(ClientError):
+ """Exception mapping a ``401 Unauthorized`` response."""
+
+ code = http_client.UNAUTHORIZED
+
+
+class Unauthenticated(Unauthorized):
+ """Exception mapping a :attr:`grpc.StatusCode.UNAUTHENTICATED` error."""
+
+ grpc_status_code = grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None
+
+
+class Forbidden(ClientError):
+ """Exception mapping a ``403 Forbidden`` response."""
+
+ code = http_client.FORBIDDEN
+
+
+class PermissionDenied(Forbidden):
+ """Exception mapping a :attr:`grpc.StatusCode.PERMISSION_DENIED` error."""
+
+ grpc_status_code = grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None
+
+
+class NotFound(ClientError):
+ """Exception mapping a ``404 Not Found`` response or a
+ :attr:`grpc.StatusCode.NOT_FOUND` error."""
+
+ code = http_client.NOT_FOUND
+ grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
+
+
+class MethodNotAllowed(ClientError):
+ """Exception mapping a ``405 Method Not Allowed`` response."""
+
+ code = http_client.METHOD_NOT_ALLOWED
+
+
+class Conflict(ClientError):
+ """Exception mapping a ``409 Conflict`` response."""
+
+ code = http_client.CONFLICT
+
+
+class AlreadyExists(Conflict):
+ """Exception mapping a :attr:`grpc.StatusCode.ALREADY_EXISTS` error."""
+
+ grpc_status_code = grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None
+
+
+class Aborted(Conflict):
+ """Exception mapping a :attr:`grpc.StatusCode.ABORTED` error."""
+
+ grpc_status_code = grpc.StatusCode.ABORTED if grpc is not None else None
+
+
+class LengthRequired(ClientError):
+ """Exception mapping a ``411 Length Required`` response."""
+
+ code = http_client.LENGTH_REQUIRED
+
+
+class PreconditionFailed(ClientError):
+ """Exception mapping a ``412 Precondition Failed`` response."""
+
+ code = http_client.PRECONDITION_FAILED
+
+
+class RequestRangeNotSatisfiable(ClientError):
+ """Exception mapping a ``416 Request Range Not Satisfiable`` response."""
+
+ code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE
+
+
+class TooManyRequests(ClientError):
+ """Exception mapping a ``429 Too Many Requests`` response."""
+
+ # http_client does not define a constant for this in Python 2.
+ code = 429
+
+
+class ResourceExhausted(TooManyRequests):
+ """Exception mapping a :attr:`grpc.StatusCode.RESOURCE_EXHAUSTED` error."""
+
+ grpc_status_code = grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None
+
+
+class Cancelled(ClientError):
+ """Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
+
+ # This maps to HTTP status code 499. See
+ # https://github.com/googleapis/googleapis/blob/master/google/rpc\
+ # /code.proto
+ code = 499
+ grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None
+
+
+class ServerError(GoogleAPICallError):
+ """Base for 5xx responses."""
+
+
+class InternalServerError(ServerError):
+ """Exception mapping a ``500 Internal Server Error`` response. or a
+ :attr:`grpc.StatusCode.INTERNAL` error."""
+
+ code = http_client.INTERNAL_SERVER_ERROR
+ grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
+
+
+class Unknown(ServerError):
+ """Exception mapping a :attr:`grpc.StatusCode.UNKNOWN` error."""
+
+ grpc_status_code = grpc.StatusCode.UNKNOWN if grpc is not None else None
+
+
+class DataLoss(ServerError):
+ """Exception mapping a :attr:`grpc.StatusCode.DATA_LOSS` error."""
+
+ grpc_status_code = grpc.StatusCode.DATA_LOSS if grpc is not None else None
+
+
+class MethodNotImplemented(ServerError):
+ """Exception mapping a ``501 Not Implemented`` response or a
+ :attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
+
+ code = http_client.NOT_IMPLEMENTED
+ grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
+
+
+class BadGateway(ServerError):
+ """Exception mapping a ``502 Bad Gateway`` response."""
+
+ code = http_client.BAD_GATEWAY
+
+
+class ServiceUnavailable(ServerError):
+ """Exception mapping a ``503 Service Unavailable`` response or a
+ :attr:`grpc.StatusCode.UNAVAILABLE` error."""
+
+ code = http_client.SERVICE_UNAVAILABLE
+ grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
+
+
+class GatewayTimeout(ServerError):
+ """Exception mapping a ``504 Gateway Timeout`` response."""
+
+ code = http_client.GATEWAY_TIMEOUT
+
+
+class DeadlineExceeded(GatewayTimeout):
+ """Exception mapping a :attr:`grpc.StatusCode.DEADLINE_EXCEEDED` error."""
+
+ grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
+
+
+def exception_class_for_http_status(status_code):
+ """Return the exception class for a specific HTTP status code.
+
+ Args:
+ status_code (int): The HTTP status code.
+
+ Returns:
+ :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
+ """
+ return _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
+
+
+def from_http_status(status_code, message, **kwargs):
+ """Create a :class:`GoogleAPICallError` from an HTTP status code.
+
+ Args:
+ status_code (int): The HTTP status code.
+ message (str): The exception message.
+ kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
+ constructor.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ error_class = exception_class_for_http_status(status_code)
+ error = error_class(message, **kwargs)
+
+ if error.code is None:
+ error.code = status_code
+
+ return error
+
+
+def from_http_response(response):
+ """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`.
+
+ Args:
+ response (requests.Response): The HTTP response.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`, with the message and errors populated
+ from the response.
+ """
+ try:
+ payload = response.json()
+ except ValueError:
+ payload = {"error": {"message": response.text or "unknown error"}}
+
+ error_message = payload.get("error", {}).get("message", "unknown error")
+ errors = payload.get("error", {}).get("errors", ())
+
+ message = "{method} {url}: {error}".format(
+ method=response.request.method, url=response.request.url, error=error_message
+ )
+
+ exception = from_http_status(
+ response.status_code, message, errors=errors, response=response
+ )
+ return exception
+
+
+def exception_class_for_grpc_status(status_code):
+ """Return the exception class for a specific :class:`grpc.StatusCode`.
+
+ Args:
+ status_code (grpc.StatusCode): The gRPC status code.
+
+ Returns:
+ :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
+ """
+ return _GRPC_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
+
+
+def from_grpc_status(status_code, message, **kwargs):
+ """Create a :class:`GoogleAPICallError` from a :class:`grpc.StatusCode`.
+
+ Args:
+ status_code (grpc.StatusCode): The gRPC status code.
+ message (str): The exception message.
+ kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
+ constructor.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ error_class = exception_class_for_grpc_status(status_code)
+ error = error_class(message, **kwargs)
+
+ if error.grpc_status_code is None:
+ error.grpc_status_code = status_code
+
+ return error
+
+
+def _is_informative_grpc_error(rpc_exc):
+ return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details")
+
+
+def from_grpc_error(rpc_exc):
+ """Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
+
+ Args:
+ rpc_exc (grpc.RpcError): The gRPC error.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ # NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
+ # However, check for grpc.RpcError breaks backward compatibility.
+ if isinstance(rpc_exc, grpc.Call) or _is_informative_grpc_error(rpc_exc):
+ return from_grpc_status(
+ rpc_exc.code(), rpc_exc.details(), errors=(rpc_exc,), response=rpc_exc
+ )
+ else:
+ return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/venv/Lib/site-packages/google/api_core/future/__init__.py b/venv/Lib/site-packages/google/api_core/future/__init__.py
new file mode 100644
index 000000000..3768b2c53
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/future/__init__.py
@@ -0,0 +1,19 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for dealing with asynchronous operations."""
+
+from google.api_core.future.base import Future
+
+__all__ = ["Future"]
diff --git a/venv/Lib/site-packages/google/api_core/future/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/future/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..e379d2829
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/future/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/future/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/future/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..1a5a38c38
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/future/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/future/__pycache__/async_future.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/future/__pycache__/async_future.cpython-36.pyc
new file mode 100644
index 000000000..6681615b1
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/future/__pycache__/async_future.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/future/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/future/__pycache__/base.cpython-36.pyc
new file mode 100644
index 000000000..46b94fbbd
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/future/__pycache__/base.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/future/__pycache__/polling.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/future/__pycache__/polling.cpython-36.pyc
new file mode 100644
index 000000000..ce0af8287
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/future/__pycache__/polling.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/future/_helpers.py b/venv/Lib/site-packages/google/api_core/future/_helpers.py
new file mode 100644
index 000000000..9e88ca9d5
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/future/_helpers.py
@@ -0,0 +1,39 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Private helpers for futures."""
+
+import logging
+import threading
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def start_daemon_thread(*args, **kwargs):
+ """Starts a thread and marks it as a daemon thread."""
+ thread = threading.Thread(*args, **kwargs)
+ thread.daemon = True
+ thread.start()
+ return thread
+
+
+def safe_invoke_callback(callback, *args, **kwargs):
+ """Invoke a callback, swallowing and logging any exceptions."""
+ # pylint: disable=bare-except
+ # We intentionally want to swallow all exceptions.
+ try:
+ return callback(*args, **kwargs)
+ except Exception:
+ _LOGGER.exception("Error while executing Future callback.")
diff --git a/venv/Lib/site-packages/google/api_core/future/async_future.py b/venv/Lib/site-packages/google/api_core/future/async_future.py
new file mode 100644
index 000000000..e1d158d0e
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/future/async_future.py
@@ -0,0 +1,157 @@
+# Copyright 2020, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO implementation of the abstract base Future class."""
+
+import asyncio
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import retry_async
+from google.api_core.future import base
+
+
+class _OperationNotComplete(Exception):
+ """Private exception used for polling via retry."""
+ pass
+
+
+RETRY_PREDICATE = retry.if_exception_type(
+ _OperationNotComplete,
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+)
+DEFAULT_RETRY = retry_async.AsyncRetry(predicate=RETRY_PREDICATE)
+
+
+class AsyncFuture(base.Future):
+ """A Future that polls peer service to self-update.
+
+ The :meth:`done` method should be implemented by subclasses. The polling
+ behavior will repeatedly call ``done`` until it returns True.
+
+ .. note: Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
+
+ Args:
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(self, retry=DEFAULT_RETRY):
+ super().__init__()
+ self._retry = retry
+ self._future = asyncio.get_event_loop().create_future()
+ self._background_task = None
+
+ async def done(self, retry=DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ raise NotImplementedError()
+
+ async def _done_or_raise(self):
+ """Check if the future is done and raise if it's not."""
+ result = await self.done()
+ if not result:
+ raise _OperationNotComplete()
+
+ async def running(self):
+ """True if the operation is currently running."""
+ result = await self.done()
+ return not result
+
+ async def _blocking_poll(self, timeout=None):
+ """Poll and await for the Future to be resolved.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+ """
+ if self._future.done():
+ return
+
+ retry_ = self._retry.with_deadline(timeout)
+
+ try:
+ await retry_(self._done_or_raise)()
+ except exceptions.RetryError:
+ raise asyncio.TimeoutError(
+ "Operation did not complete within the designated " "timeout."
+ )
+
+ async def result(self, timeout=None):
+ """Get the result of the operation.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ google.protobuf.Message: The Operation's result.
+
+ Raises:
+ google.api_core.GoogleAPICallError: If the operation errors or if
+ the timeout is reached before the operation completes.
+ """
+ await self._blocking_poll(timeout=timeout)
+ return self._future.result()
+
+ async def exception(self, timeout=None):
+ """Get the exception from the operation.
+
+ Args:
+ timeout (int): How long to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ Optional[google.api_core.GoogleAPICallError]: The operation's
+ error.
+ """
+ await self._blocking_poll(timeout=timeout)
+ return self._future.exception()
+
+ def add_done_callback(self, fn):
+ """Add a callback to be executed when the operation is complete.
+
+ If the operation is completed, the callback will be scheduled onto the
+ event loop. Otherwise, the callback will be stored and invoked when the
+ future is done.
+
+ Args:
+ fn (Callable[Future]): The callback to execute when the operation
+ is complete.
+ """
+ if self._background_task is None:
+ self._background_task = asyncio.get_event_loop().create_task(self._blocking_poll())
+ self._future.add_done_callback(fn)
+
+ def set_result(self, result):
+ """Set the Future's result."""
+ self._future.set_result(result)
+
+ def set_exception(self, exception):
+ """Set the Future's exception."""
+ self._future.set_exception(exception)
diff --git a/venv/Lib/site-packages/google/api_core/future/base.py b/venv/Lib/site-packages/google/api_core/future/base.py
new file mode 100644
index 000000000..e7888ca3b
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/future/base.py
@@ -0,0 +1,67 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstract and helper bases for Future implementations."""
+
+import abc
+
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Future(object):
+ # pylint: disable=missing-docstring
+ # We inherit the interfaces here from concurrent.futures.
+
+ """Future interface.
+
+ This interface is based on :class:`concurrent.futures.Future`.
+ """
+
+ @abc.abstractmethod
+ def cancel(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancelled(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def done(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ # pylint: disable=invalid-name
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_result(self, result):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_exception(self, exception):
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/google/api_core/future/polling.py b/venv/Lib/site-packages/google/api_core/future/polling.py
new file mode 100644
index 000000000..6b4c687d0
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/future/polling.py
@@ -0,0 +1,186 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstract and helper bases for Future implementations."""
+
+import abc
+import concurrent.futures
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core.future import _helpers
+from google.api_core.future import base
+
+
+class _OperationNotComplete(Exception):
+ """Private exception used for polling via retry."""
+
+ pass
+
+
+RETRY_PREDICATE = retry.if_exception_type(
+ _OperationNotComplete,
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+)
+DEFAULT_RETRY = retry.Retry(predicate=RETRY_PREDICATE)
+
+
+class PollingFuture(base.Future):
+ """A Future that needs to poll some service to check its status.
+
+ The :meth:`done` method should be implemented by subclasses. The polling
+ behavior will repeatedly call ``done`` until it returns True.
+
+ .. note: Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
+
+ Args:
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(self, retry=DEFAULT_RETRY):
+ super(PollingFuture, self).__init__()
+ self._retry = retry
+ self._result = None
+ self._exception = None
+ self._result_set = False
+ """bool: Set to True when the result has been set via set_result or
+ set_exception."""
+ self._polling_thread = None
+ self._done_callbacks = []
+
+ @abc.abstractmethod
+ def done(self, retry=DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ raise NotImplementedError()
+
+ def _done_or_raise(self):
+ """Check if the future is done and raise if it's not."""
+ if not self.done():
+ raise _OperationNotComplete()
+
+ def running(self):
+ """True if the operation is currently running."""
+ return not self.done()
+
+ def _blocking_poll(self, timeout=None):
+ """Poll and wait for the Future to be resolved.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+ """
+ if self._result_set:
+ return
+
+ retry_ = self._retry.with_deadline(timeout)
+
+ try:
+ retry_(self._done_or_raise)()
+ except exceptions.RetryError:
+ raise concurrent.futures.TimeoutError(
+ "Operation did not complete within the designated " "timeout."
+ )
+
+ def result(self, timeout=None):
+ """Get the result of the operation, blocking if necessary.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ google.protobuf.Message: The Operation's result.
+
+ Raises:
+ google.api_core.GoogleAPICallError: If the operation errors or if
+ the timeout is reached before the operation completes.
+ """
+ self._blocking_poll(timeout=timeout)
+
+ if self._exception is not None:
+ # pylint: disable=raising-bad-type
+ # Pylint doesn't recognize that this is valid in this case.
+ raise self._exception
+
+ return self._result
+
+ def exception(self, timeout=None):
+ """Get the exception from the operation, blocking if necessary.
+
+ Args:
+ timeout (int): How long to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ Optional[google.api_core.GoogleAPICallError]: The operation's
+ error.
+ """
+ self._blocking_poll(timeout=timeout)
+ return self._exception
+
+ def add_done_callback(self, fn):
+ """Add a callback to be executed when the operation is complete.
+
+ If the operation is not already complete, this will start a helper
+ thread to poll for the status of the operation in the background.
+
+ Args:
+ fn (Callable[Future]): The callback to execute when the operation
+ is complete.
+ """
+ if self._result_set:
+ _helpers.safe_invoke_callback(fn, self)
+ return
+
+ self._done_callbacks.append(fn)
+
+ if self._polling_thread is None:
+ # The polling thread will exit on its own as soon as the operation
+ # is done.
+ self._polling_thread = _helpers.start_daemon_thread(
+ target=self._blocking_poll
+ )
+
+ def _invoke_callbacks(self, *args, **kwargs):
+ """Invoke all done callbacks."""
+ for callback in self._done_callbacks:
+ _helpers.safe_invoke_callback(callback, *args, **kwargs)
+
+ def set_result(self, result):
+ """Set the Future's result."""
+ self._result = result
+ self._result_set = True
+ self._invoke_callbacks(self)
+
+ def set_exception(self, exception):
+ """Set the Future's exception."""
+ self._exception = exception
+ self._result_set = True
+ self._invoke_callbacks(self)
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__init__.py b/venv/Lib/site-packages/google/api_core/gapic_v1/__init__.py
new file mode 100644
index 000000000..ed95da13d
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from google.api_core.gapic_v1 import client_info
+from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1 import method
+from google.api_core.gapic_v1 import routing_header
+
+__all__ = ["client_info", "config", "method", "routing_header"]
+
+if sys.version_info >= (3, 6):
+ from google.api_core.gapic_v1 import config_async # noqa: F401
+ from google.api_core.gapic_v1 import method_async # noqa: F401
+ __all__.append("config_async")
+ __all__.append("method_async")
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..dfae8addf
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/client_info.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/client_info.cpython-36.pyc
new file mode 100644
index 000000000..9a7bcd970
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/client_info.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/config.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/config.cpython-36.pyc
new file mode 100644
index 000000000..151317085
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/config.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/config_async.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/config_async.cpython-36.pyc
new file mode 100644
index 000000000..bc7088754
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/config_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/method.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/method.cpython-36.pyc
new file mode 100644
index 000000000..c6b873558
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/method.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/method_async.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/method_async.cpython-36.pyc
new file mode 100644
index 000000000..832611c30
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/method_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/routing_header.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/routing_header.cpython-36.pyc
new file mode 100644
index 000000000..3ed3ca943
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/gapic_v1/__pycache__/routing_header.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/client_info.py b/venv/Lib/site-packages/google/api_core/gapic_v1/client_info.py
new file mode 100644
index 000000000..bdc2ce440
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/client_info.py
@@ -0,0 +1,55 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for providing client information.
+
+Client information is used to send information about the calling client,
+such as the library and Python version, to API services.
+"""
+
+from google.api_core import client_info
+
+
+METRICS_METADATA_KEY = "x-goog-api-client"
+
+
+class ClientInfo(client_info.ClientInfo):
+ """Client information used to generate a user-agent for API calls.
+
+ This user-agent information is sent along with API calls to allow the
+ receiving service to do analytics on which versions of Python and Google
+ libraries are being used.
+
+ Args:
+ python_version (str): The Python interpreter version, for example,
+ ``'2.7.13'``.
+ grpc_version (Optional[str]): The gRPC library version.
+ api_core_version (str): The google-api-core library version.
+ gapic_version (Optional[str]): The sversion of gapic-generated client
+ library, if the library was generated by gapic.
+ client_library_version (Optional[str]): The version of the client
+ library, generally used if the client library was not generated
+ by gapic or if additional functionality was built on top of
+ a gapic client library.
+ user_agent (Optional[str]): Prefix to the user agent header. This is
+ used to supply information such as application name or partner tool.
+ Recommended format: ``application-or-tool-ID/major.minor.version``.
+ """
+
+ def to_grpc_metadata(self):
+ """Returns the gRPC metadata for this client info."""
+ return (METRICS_METADATA_KEY, self.to_user_agent())
+
+
+DEFAULT_CLIENT_INFO = ClientInfo()
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/config.py b/venv/Lib/site-packages/google/api_core/gapic_v1/config.py
new file mode 100644
index 000000000..2a56cf1b5
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/config.py
@@ -0,0 +1,169 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for loading gapic configuration data.
+
+The Google API generator creates supplementary configuration for each RPC
+method to tell the client library how to deal with retries and timeouts.
+"""
+
+import collections
+
+import grpc
+import six
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import timeout
+
+
+_MILLIS_PER_SECOND = 1000.0
+
+
+def _exception_class_for_grpc_status_name(name):
+ """Returns the Google API exception class for a gRPC error code name.
+
+ Args:
+ name (str): The name of the gRPC status code, for example,
+ ``UNAVAILABLE``.
+
+ Returns:
+ :func:`type`: The appropriate subclass of
+ :class:`google.api_core.exceptions.GoogleAPICallError`.
+ """
+ return exceptions.exception_class_for_grpc_status(getattr(grpc.StatusCode, name))
+
+
+def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
+ """Creates a Retry object given a gapic retry configuration.
+
+ Args:
+ retry_params (dict): The retry parameter values, for example::
+
+ {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000
+ }
+
+ retry_codes (sequence[str]): The list of retryable gRPC error code
+ names.
+
+ Returns:
+ google.api_core.retry.Retry: The default retry object for the method.
+ """
+ exception_classes = [
+ _exception_class_for_grpc_status_name(code) for code in retry_codes
+ ]
+ return retry_impl(
+ retry.if_exception_type(*exception_classes),
+ initial=(retry_params["initial_retry_delay_millis"] / _MILLIS_PER_SECOND),
+ maximum=(retry_params["max_retry_delay_millis"] / _MILLIS_PER_SECOND),
+ multiplier=retry_params["retry_delay_multiplier"],
+ deadline=retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND,
+ )
+
+
+def _timeout_from_retry_config(retry_params):
+ """Creates a ExponentialTimeout object given a gapic retry configuration.
+
+ Args:
+ retry_params (dict): The retry parameter values, for example::
+
+ {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000
+ }
+
+ Returns:
+ google.api_core.retry.ExponentialTimeout: The default time object for
+ the method.
+ """
+ return timeout.ExponentialTimeout(
+ initial=(retry_params["initial_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+ maximum=(retry_params["max_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+ multiplier=retry_params["rpc_timeout_multiplier"],
+ deadline=(retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND),
+ )
+
+
+MethodConfig = collections.namedtuple("MethodConfig", ["retry", "timeout"])
+
+
+def parse_method_configs(interface_config, retry_impl=retry.Retry):
+ """Creates default retry and timeout objects for each method in a gapic
+ interface config.
+
+ Args:
+ interface_config (Mapping): The interface config section of the full
+ gapic library config. For example, If the full configuration has
+ an interface named ``google.example.v1.ExampleService`` you would
+ pass in just that interface's configuration, for example
+ ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
+ retry_impl (Callable): The constructor that creates a retry decorator
+ that will be applied to the method based on method configs.
+
+ Returns:
+ Mapping[str, MethodConfig]: A mapping of RPC method names to their
+ configuration.
+ """
+ # Grab all the retry codes
+ retry_codes_map = {
+ name: retry_codes
+ for name, retry_codes in six.iteritems(interface_config.get("retry_codes", {}))
+ }
+
+ # Grab all of the retry params
+ retry_params_map = {
+ name: retry_params
+ for name, retry_params in six.iteritems(
+ interface_config.get("retry_params", {})
+ )
+ }
+
+ # Iterate through all the API methods and create a flat MethodConfig
+ # instance for each one.
+ method_configs = {}
+
+ for method_name, method_params in six.iteritems(
+ interface_config.get("methods", {})
+ ):
+ retry_params_name = method_params.get("retry_params_name")
+
+ if retry_params_name is not None:
+ retry_params = retry_params_map[retry_params_name]
+ retry_ = _retry_from_retry_config(
+ retry_params, retry_codes_map[method_params["retry_codes_name"]], retry_impl
+ )
+ timeout_ = _timeout_from_retry_config(retry_params)
+
+ # No retry config, so this is a non-retryable method.
+ else:
+ retry_ = None
+ timeout_ = timeout.ConstantTimeout(
+ method_params["timeout_millis"] / _MILLIS_PER_SECOND
+ )
+
+ method_configs[method_name] = MethodConfig(retry=retry_, timeout=timeout_)
+
+ return method_configs
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/config_async.py b/venv/Lib/site-packages/google/api_core/gapic_v1/config_async.py
new file mode 100644
index 000000000..00e5e2401
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/config_async.py
@@ -0,0 +1,42 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""AsyncIO helpers for loading gapic configuration data.
+
+The Google API generator creates supplementary configuration for each RPC
+method to tell the client library how to deal with retries and timeouts.
+"""
+
+from google.api_core import retry_async
+from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1.config import MethodConfig # noqa: F401
+
+
+def parse_method_configs(interface_config):
+ """Creates default retry and timeout objects for each method in a gapic
+ interface config with AsyncIO semantics.
+
+ Args:
+ interface_config (Mapping): The interface config section of the full
+ gapic library config. For example, If the full configuration has
+ an interface named ``google.example.v1.ExampleService`` you would
+ pass in just that interface's configuration, for example
+ ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
+
+ Returns:
+ Mapping[str, MethodConfig]: A mapping of RPC method names to their
+ configuration.
+ """
+ return config.parse_method_configs(
+ interface_config,
+ retry_impl=retry_async.AsyncRetry)
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/method.py b/venv/Lib/site-packages/google/api_core/gapic_v1/method.py
new file mode 100644
index 000000000..8bf82569d
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/method.py
@@ -0,0 +1,244 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for wrapping low-level gRPC methods with common functionality.
+
+This is used by gapic clients to provide common error mapping, retry, timeout,
+pagination, and long-running operations to gRPC methods.
+"""
+
+from google.api_core import general_helpers
+from google.api_core import grpc_helpers
+from google.api_core import timeout
+from google.api_core.gapic_v1 import client_info
+
+USE_DEFAULT_METADATA = object()
+DEFAULT = object()
+"""Sentinel value indicating that a retry or timeout argument was unspecified,
+so the default should be used."""
+
+
+def _is_not_none_or_false(value):
+ return value is not None and value is not False
+
+
+def _apply_decorators(func, decorators):
+ """Apply a list of decorators to a given function.
+
+ ``decorators`` may contain items that are ``None`` or ``False`` which will
+ be ignored.
+ """
+ decorators = filter(_is_not_none_or_false, reversed(decorators))
+
+ for decorator in decorators:
+ func = decorator(func)
+
+ return func
+
+
+def _determine_timeout(default_timeout, specified_timeout, retry):
+ """Determines how timeout should be applied to a wrapped method.
+
+ Args:
+ default_timeout (Optional[Timeout]): The default timeout specified
+ at method creation time.
+ specified_timeout (Optional[Timeout]): The timeout specified at
+ invocation time. If :attr:`DEFAULT`, this will be set to
+ the ``default_timeout``.
+ retry (Optional[Retry]): The retry specified at invocation time.
+
+ Returns:
+ Optional[Timeout]: The timeout to apply to the method or ``None``.
+ """
+ # If timeout is specified as a number instead of a Timeout instance,
+ # convert it to a ConstantTimeout.
+ if isinstance(specified_timeout, (int, float)):
+ specified_timeout = timeout.ConstantTimeout(specified_timeout)
+ if isinstance(default_timeout, (int, float)):
+ default_timeout = timeout.ConstantTimeout(default_timeout)
+
+ if specified_timeout is DEFAULT:
+ specified_timeout = default_timeout
+
+ if specified_timeout is default_timeout:
+ # If timeout is the default and the default timeout is exponential and
+ # a non-default retry is specified, make sure the timeout's deadline
+ # matches the retry's. This handles the case where the user leaves
+ # the timeout default but specifies a lower deadline via the retry.
+ if (
+ retry
+ and retry is not DEFAULT
+ and isinstance(default_timeout, timeout.ExponentialTimeout)
+ ):
+ return default_timeout.with_deadline(retry._deadline)
+ else:
+ return default_timeout
+
+ return specified_timeout
+
+
+class _GapicCallable(object):
+ """Callable that applies retry, timeout, and metadata logic.
+
+ Args:
+ target (Callable): The low-level RPC method.
+ retry (google.api_core.retry.Retry): The default retry for the
+ callable. If ``None``, this callable will not retry by default
+ timeout (google.api_core.timeout.Timeout): The default timeout
+ for the callable. If ``None``, this callable will not specify
+ a timeout argument to the low-level RPC method by default.
+ metadata (Sequence[Tuple[str, str]]): Additional metadata that is
+ provided to the RPC method on every invocation. This is merged with
+ any metadata specified during invocation. If ``None``, no
+ additional metadata will be passed to the RPC method.
+ """
+
+ def __init__(self, target, retry, timeout, metadata=None):
+ self._target = target
+ self._retry = retry
+ self._timeout = timeout
+ self._metadata = metadata
+
+ def __call__(self, *args, **kwargs):
+ """Invoke the low-level RPC with retry, timeout, and metadata."""
+ # Note: Due to Python 2 lacking keyword-only arguments we use kwargs to
+ # extract the retry and timeout params.
+ timeout_ = _determine_timeout(
+ self._timeout,
+ kwargs.pop("timeout", self._timeout),
+ # Use only the invocation-specified retry only for this, as we only
+ # want to adjust the timeout deadline if the *user* specified
+ # a different retry.
+ kwargs.get("retry", None),
+ )
+
+ retry = kwargs.pop("retry", self._retry)
+
+ if retry is DEFAULT:
+ retry = self._retry
+
+ # Apply all applicable decorators.
+ wrapped_func = _apply_decorators(self._target, [retry, timeout_])
+
+ # Add the user agent metadata to the call.
+ if self._metadata is not None:
+ metadata = kwargs.get("metadata", [])
+ # Due to the nature of invocation, None should be treated the same
+ # as not specified.
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ metadata.extend(self._metadata)
+ kwargs["metadata"] = metadata
+
+ return wrapped_func(*args, **kwargs)
+
+
+def wrap_method(
+ func,
+ default_retry=None,
+ default_timeout=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+):
+ """Wrap an RPC method with common behavior.
+
+ This applies common error wrapping, retry, and timeout behavior a function.
+ The wrapped function will take optional ``retry`` and ``timeout``
+ arguments.
+
+ For example::
+
+ import google.api_core.gapic_v1.method
+ from google.api_core import retry
+ from google.api_core import timeout
+
+ # The original RPC method.
+ def get_topic(name, timeout=None):
+ request = publisher_v2.GetTopicRequest(name=name)
+ return publisher_stub.GetTopic(request, timeout=timeout)
+
+ default_retry = retry.Retry(deadline=60)
+ default_timeout = timeout.Timeout(deadline=60)
+ wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method(
+ get_topic, default_retry)
+
+ # Execute get_topic with default retry and timeout:
+ response = wrapped_get_topic()
+
+ # Execute get_topic without doing any retying but with the default
+ # timeout:
+ response = wrapped_get_topic(retry=None)
+
+ # Execute get_topic but only retry on 5xx errors:
+ my_retry = retry.Retry(retry.if_exception_type(
+ exceptions.InternalServerError))
+ response = wrapped_get_topic(retry=my_retry)
+
+ The way this works is by late-wrapping the given function with the retry
+ and timeout decorators. Essentially, when ``wrapped_get_topic()`` is
+ called:
+
+ * ``get_topic()`` is first wrapped with the ``timeout`` into
+ ``get_topic_with_timeout``.
+ * ``get_topic_with_timeout`` is wrapped with the ``retry`` into
+ ``get_topic_with_timeout_and_retry()``.
+ * The final ``get_topic_with_timeout_and_retry`` is called passing through
+ the ``args`` and ``kwargs``.
+
+ The callstack is therefore::
+
+ method.__call__() ->
+ Retry.__call__() ->
+ Timeout.__call__() ->
+ wrap_errors() ->
+ get_topic()
+
+ Note that if ``timeout`` or ``retry`` is ``None``, then they are not
+ applied to the function. For example,
+ ``wrapped_get_topic(timeout=None, retry=None)`` is more or less
+ equivalent to just calling ``get_topic`` but with error re-mapping.
+
+ Args:
+ func (Callable[Any]): The function to wrap. It should accept an
+ optional ``timeout`` argument. If ``metadata`` is not ``None``, it
+ should accept a ``metadata`` argument.
+ default_retry (Optional[google.api_core.Retry]): The default retry
+ strategy. If ``None``, the method will not retry by default.
+ default_timeout (Optional[google.api_core.Timeout]): The default
+ timeout strategy. Can also be specified as an int or float. If
+ ``None``, the method will not have timeout specified by default.
+ client_info
+ (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
+ Client information used to create a user-agent string that's
+ passed as gRPC metadata to the method. If unspecified, then
+ a sane default will be used. If ``None``, then no user agent
+ metadata will be provided to the RPC method.
+
+ Returns:
+ Callable: A new callable that takes optional ``retry`` and ``timeout``
+ arguments and applies the common error mapping, retry, timeout,
+ and metadata behavior to the low-level RPC method.
+ """
+ func = grpc_helpers.wrap_errors(func)
+
+ if client_info is not None:
+ user_agent_metadata = [client_info.to_grpc_metadata()]
+ else:
+ user_agent_metadata = None
+
+ return general_helpers.wraps(func)(
+ _GapicCallable(
+ func, default_retry, default_timeout, metadata=user_agent_metadata
+ )
+ )
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/method_async.py b/venv/Lib/site-packages/google/api_core/gapic_v1/method_async.py
new file mode 100644
index 000000000..5210b2b7a
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/method_async.py
@@ -0,0 +1,45 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""AsyncIO helpers for wrapping gRPC methods with common functionality.
+
+This is used by gapic clients to provide common error mapping, retry, timeout,
+pagination, and long-running operations to gRPC methods.
+"""
+
+from google.api_core import general_helpers, grpc_helpers_async
+from google.api_core.gapic_v1 import client_info
+from google.api_core.gapic_v1.method import (_GapicCallable, # noqa: F401
+ DEFAULT,
+ USE_DEFAULT_METADATA)
+
+
+def wrap_method(
+ func,
+ default_retry=None,
+ default_timeout=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+):
+ """Wrap an async RPC method with common behavior.
+
+ Returns:
+ Callable: A new callable that takes optional ``retry`` and ``timeout``
+ arguments and applies the common error mapping, retry, timeout,
+ and metadata behavior to the low-level RPC method.
+ """
+ func = grpc_helpers_async.wrap_errors(func)
+
+ metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
+
+ return general_helpers.wraps(func)(_GapicCallable(
+ func, default_retry, default_timeout, metadata=metadata))
diff --git a/venv/Lib/site-packages/google/api_core/gapic_v1/routing_header.py b/venv/Lib/site-packages/google/api_core/gapic_v1/routing_header.py
new file mode 100644
index 000000000..3fb12a6f8
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/gapic_v1/routing_header.py
@@ -0,0 +1,62 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for constructing routing headers.
+
+These headers are used by Google infrastructure to determine how to route
+requests, especially for services that are regional.
+
+Generally, these headers are specified as gRPC metadata.
+"""
+
+import sys
+
+from six.moves.urllib.parse import urlencode
+
+ROUTING_METADATA_KEY = "x-goog-request-params"
+
+
+def to_routing_header(params):
+ """Returns a routing header string for the given request parameters.
+
+ Args:
+ params (Mapping[str, Any]): A dictionary containing the request
+ parameters used for routing.
+
+ Returns:
+ str: The routing header string.
+ """
+ if sys.version_info[0] < 3:
+ # Python 2 does not have the "safe" parameter for urlencode.
+ return urlencode(params).replace("%2F", "/")
+ return urlencode(
+ params,
+ # Per Google API policy (go/api-url-encoding), / is not encoded.
+ safe="/",
+ )
+
+
+def to_grpc_metadata(params):
+ """Returns the gRPC metadata containing the routing headers for the given
+ request parameters.
+
+ Args:
+ params (Mapping[str, Any]): A dictionary containing the request
+ parameters used for routing.
+
+ Returns:
+ Tuple(str, str): The gRPC metadata containing the routing header key
+ and value.
+ """
+ return (ROUTING_METADATA_KEY, to_routing_header(params))
diff --git a/venv/Lib/site-packages/google/api_core/general_helpers.py b/venv/Lib/site-packages/google/api_core/general_helpers.py
new file mode 100644
index 000000000..d2d0c4402
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/general_helpers.py
@@ -0,0 +1,33 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for general Python functionality."""
+
+import functools
+
+import six
+
+
+# functools.partial objects lack several attributes present on real function
+# objects. In Python 2 wraps fails on this so use a restricted set instead.
+_PARTIAL_VALID_ASSIGNMENTS = ("__doc__",)
+
+
+def wraps(wrapped):
+ """A functools.wraps helper that handles partial objects on Python 2."""
+ # https://github.com/google/pytype/issues/322
+ if isinstance(wrapped, functools.partial): # pytype: disable=wrong-arg-types
+ return six.wraps(wrapped, assigned=_PARTIAL_VALID_ASSIGNMENTS)
+ else:
+ return six.wraps(wrapped)
diff --git a/venv/Lib/site-packages/google/api_core/grpc_helpers.py b/venv/Lib/site-packages/google/api_core/grpc_helpers.py
new file mode 100644
index 000000000..0ccbe1264
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/grpc_helpers.py
@@ -0,0 +1,466 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`grpc`."""
+
+import collections
+
+import grpc
+import six
+
+from google.api_core import exceptions
+from google.api_core import general_helpers
+import google.auth
+import google.auth.credentials
+import google.auth.transport.grpc
+import google.auth.transport.requests
+
+try:
+ import grpc_gcp
+
+ HAS_GRPC_GCP = True
+except ImportError:
+ HAS_GRPC_GCP = False
+
+# The list of gRPC Callable interfaces that return iterators.
+_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
+
+
+def _patch_callable_name(callable_):
+ """Fix-up gRPC callable attributes.
+
+ gRPC callable lack the ``__name__`` attribute which causes
+ :func:`functools.wraps` to error. This adds the attribute if needed.
+ """
+ if not hasattr(callable_, "__name__"):
+ callable_.__name__ = callable_.__class__.__name__
+
+
+def _wrap_unary_errors(callable_):
+ """Map errors for Unary-Unary and Stream-Unary gRPC callables."""
+ _patch_callable_name(callable_)
+
+ @six.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ try:
+ return callable_(*args, **kwargs)
+ except grpc.RpcError as exc:
+ six.raise_from(exceptions.from_grpc_error(exc), exc)
+
+ return error_remapped_callable
+
+
+class _StreamingResponseIterator(grpc.Call):
+ def __init__(self, wrapped, prefetch_first_result=True):
+ self._wrapped = wrapped
+
+ # This iterator is used in a retry context, and returned outside after init.
+ # gRPC will not throw an exception until the stream is consumed, so we need
+ # to retrieve the first result, in order to fail, in order to trigger a retry.
+ try:
+ if prefetch_first_result:
+ self._stored_first_result = six.next(self._wrapped)
+ except TypeError:
+ # It is possible the wrapped method isn't an iterable (a grpc.Call
+ # for instance). If this happens don't store the first result.
+ pass
+ except StopIteration:
+ # ignore stop iteration at this time. This should be handled outside of retry.
+ pass
+
+ def __iter__(self):
+ """This iterator is also an iterable that returns itself."""
+ return self
+
+ def next(self):
+ """Get the next response from the stream.
+
+ Returns:
+ protobuf.Message: A single response from the stream.
+ """
+ try:
+ if hasattr(self, "_stored_first_result"):
+ result = self._stored_first_result
+ del self._stored_first_result
+ return result
+ return six.next(self._wrapped)
+ except grpc.RpcError as exc:
+ # If the stream has already returned data, we cannot recover here.
+ six.raise_from(exceptions.from_grpc_error(exc), exc)
+
+ # Alias needed for Python 2/3 support.
+ __next__ = next
+
+ # grpc.Call & grpc.RpcContext interface
+
+ def add_callback(self, callback):
+ return self._wrapped.add_callback(callback)
+
+ def cancel(self):
+ return self._wrapped.cancel()
+
+ def code(self):
+ return self._wrapped.code()
+
+ def details(self):
+ return self._wrapped.details()
+
+ def initial_metadata(self):
+ return self._wrapped.initial_metadata()
+
+ def is_active(self):
+ return self._wrapped.is_active()
+
+ def time_remaining(self):
+ return self._wrapped.time_remaining()
+
+ def trailing_metadata(self):
+ return self._wrapped.trailing_metadata()
+
+
+def _wrap_stream_errors(callable_):
+ """Wrap errors for Unary-Stream and Stream-Stream gRPC callables.
+
+ The callables that return iterators require a bit more logic to re-map
+ errors when iterating. This wraps both the initial invocation and the
+ iterator of the return value to re-map errors.
+ """
+ _patch_callable_name(callable_)
+
+ @general_helpers.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ try:
+ result = callable_(*args, **kwargs)
+ # Auto-fetching the first result causes PubSub client's streaming pull
+ # to hang when re-opening the stream, thus we need examine the hacky
+ # hidden flag to see if pre-fetching is disabled.
+ # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257
+ prefetch_first = getattr(callable_, "_prefetch_first_result_", True)
+ return _StreamingResponseIterator(result, prefetch_first_result=prefetch_first)
+ except grpc.RpcError as exc:
+ six.raise_from(exceptions.from_grpc_error(exc), exc)
+
+ return error_remapped_callable
+
+
+def wrap_errors(callable_):
+ """Wrap a gRPC callable and map :class:`grpc.RpcErrors` to friendly error
+ classes.
+
+ Errors raised by the gRPC callable are mapped to the appropriate
+ :class:`google.api_core.exceptions.GoogleAPICallError` subclasses.
+ The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ available from the ``response`` property on the mapped exception. This
+ is useful for extracting metadata from the original error.
+
+ Args:
+ callable_ (Callable): A gRPC callable.
+
+ Returns:
+ Callable: The wrapped gRPC callable.
+ """
+ if isinstance(callable_, _STREAM_WRAP_CLASSES):
+ return _wrap_stream_errors(callable_)
+ else:
+ return _wrap_unary_errors(callable_)
+
+
+def _create_composite_credentials(
+ credentials=None,
+ credentials_file=None,
+ scopes=None,
+ ssl_credentials=None,
+ quota_project_id=None):
+ """Create the composite credentials for secure channels.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ quota_project_id (str): An optional project to use for billing and quota.
+
+ Returns:
+ grpc.ChannelCredentials: The composed channel credentials object.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials' and 'credentials_file' are mutually exclusive."
+ )
+
+ if credentials_file:
+ credentials, _ = google.auth.load_credentials_from_file(credentials_file, scopes=scopes)
+ elif credentials:
+ credentials = google.auth.credentials.with_scopes_if_required(credentials, scopes)
+ else:
+ credentials, _ = google.auth.default(scopes=scopes)
+
+ if quota_project_id and isinstance(credentials, google.auth.credentials.CredentialsWithQuotaProject):
+ credentials = credentials.with_quota_project(quota_project_id)
+
+ request = google.auth.transport.requests.Request()
+
+ # Create the metadata plugin for inserting the authorization header.
+ metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
+ credentials, request
+ )
+
+ # Create a set of grpc.CallCredentials using the metadata plugin.
+ google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
+
+ if ssl_credentials is None:
+ ssl_credentials = grpc.ssl_channel_credentials()
+
+ # Combine the ssl credentials and the authorization credentials.
+ return grpc.composite_channel_credentials(
+ ssl_credentials, google_auth_credentials
+ )
+
+
+def create_channel(
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ **kwargs):
+ """Create a secure channel with credentials.
+
+ Args:
+ target (str): The target service address in the format 'hostname:port'.
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ quota_project_id (str): An optional project to use for billing and quota.
+ kwargs: Additional key-word args passed to
+ :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`.
+
+ Returns:
+ grpc.Channel: The created channel.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+
+ composite_credentials = _create_composite_credentials(
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ ssl_credentials=ssl_credentials,
+ quota_project_id=quota_project_id,
+ )
+
+ if HAS_GRPC_GCP:
+ # If grpc_gcp module is available use grpc_gcp.secure_channel,
+ # otherwise, use grpc.secure_channel to create grpc channel.
+ return grpc_gcp.secure_channel(target, composite_credentials, **kwargs)
+ else:
+ return grpc.secure_channel(target, composite_credentials, **kwargs)
+
+
+_MethodCall = collections.namedtuple(
+ "_MethodCall", ("request", "timeout", "metadata", "credentials")
+)
+
+_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
+
+
+class _CallableStub(object):
+ """Stub for the grpc.*MultiCallable interfaces."""
+
+ def __init__(self, method, channel):
+ self._method = method
+ self._channel = channel
+ self.response = None
+ """Union[protobuf.Message, Callable[protobuf.Message], exception]:
+ The response to give when invoking this callable. If this is a
+ callable, it will be invoked with the request protobuf. If it's an
+ exception, the exception will be raised when this is invoked.
+ """
+ self.responses = None
+ """Iterator[
+ Union[protobuf.Message, Callable[protobuf.Message], exception]]:
+ An iterator of responses. If specified, self.response will be populated
+ on each invocation by calling ``next(self.responses)``."""
+ self.requests = []
+ """List[protobuf.Message]: All requests sent to this callable."""
+ self.calls = []
+ """List[Tuple]: All invocations of this callable. Each tuple is the
+ request, timeout, metadata, and credentials."""
+
+ def __call__(self, request, timeout=None, metadata=None, credentials=None):
+ self._channel.requests.append(_ChannelRequest(self._method, request))
+ self.calls.append(_MethodCall(request, timeout, metadata, credentials))
+ self.requests.append(request)
+
+ response = self.response
+ if self.responses is not None:
+ if response is None:
+ response = next(self.responses)
+ else:
+ raise ValueError(
+ "{method}.response and {method}.responses are mutually "
+ "exclusive.".format(method=self._method)
+ )
+
+ if callable(response):
+ return response(request)
+
+ if isinstance(response, Exception):
+ raise response
+
+ if response is not None:
+ return response
+
+ raise ValueError('Method stub for "{}" has no response.'.format(self._method))
+
+
+def _simplify_method_name(method):
+ """Simplifies a gRPC method name.
+
+ When gRPC invokes the channel to create a callable, it gives a full
+ method name like "/google.pubsub.v1.Publisher/CreateTopic". This
+ returns just the name of the method, in this case "CreateTopic".
+
+ Args:
+ method (str): The name of the method.
+
+ Returns:
+ str: The simplified name of the method.
+ """
+ return method.rsplit("/", 1).pop()
+
+
+class ChannelStub(grpc.Channel):
+ """A testing stub for the grpc.Channel interface.
+
+ This can be used to test any client that eventually uses a gRPC channel
+ to communicate. By passing in a channel stub, you can configure which
+ responses are returned and track which requests are made.
+
+ For example:
+
+ .. code-block:: python
+
+ channel_stub = grpc_helpers.ChannelStub()
+ client = FooClient(channel=channel_stub)
+
+ channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
+
+ foo = client.get_foo(labels=['baz'])
+
+ assert foo.name == 'bar'
+ assert channel_stub.GetFoo.requests[0].labels = ['baz']
+
+ Each method on the stub can be accessed and configured on the channel.
+ Here's some examples of various configurations:
+
+ .. code-block:: python
+
+ # Return a basic response:
+
+ channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
+ assert client.get_foo().name == 'bar'
+
+ # Raise an exception:
+ channel_stub.GetFoo.response = NotFound('...')
+
+ with pytest.raises(NotFound):
+ client.get_foo()
+
+ # Use a sequence of responses:
+ channel_stub.GetFoo.responses = iter([
+ foo_pb2.Foo(name='bar'),
+ foo_pb2.Foo(name='baz'),
+ ])
+
+ assert client.get_foo().name == 'bar'
+ assert client.get_foo().name == 'baz'
+
+ # Use a callable
+
+ def on_get_foo(request):
+ return foo_pb2.Foo(name='bar' + request.id)
+
+ channel_stub.GetFoo.response = on_get_foo
+
+ assert client.get_foo(id='123').name == 'bar123'
+ """
+
+ def __init__(self, responses=[]):
+ self.requests = []
+ """Sequence[Tuple[str, protobuf.Message]]: A list of all requests made
+ on this channel in order. The tuple is of method name, request
+ message."""
+ self._method_stubs = {}
+
+ def _stub_for_method(self, method):
+ method = _simplify_method_name(method)
+ self._method_stubs[method] = _CallableStub(method, self)
+ return self._method_stubs[method]
+
+ def __getattr__(self, key):
+ try:
+ return self._method_stubs[key]
+ except KeyError:
+ raise AttributeError
+
+ def unary_unary(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.unary_unary implementation."""
+ return self._stub_for_method(method)
+
+ def unary_stream(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.unary_stream implementation."""
+ return self._stub_for_method(method)
+
+ def stream_unary(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.stream_unary implementation."""
+ return self._stub_for_method(method)
+
+ def stream_stream(
+ self, method, request_serializer=None, response_deserializer=None
+ ):
+ """grpc.Channel.stream_stream implementation."""
+ return self._stub_for_method(method)
+
+ def subscribe(self, callback, try_to_connect=False):
+ """grpc.Channel.subscribe implementation."""
+ pass
+
+ def unsubscribe(self, callback):
+ """grpc.Channel.unsubscribe implementation."""
+ pass
+
+ def close(self):
+ """grpc.Channel.close implementation."""
+ pass
diff --git a/venv/Lib/site-packages/google/api_core/grpc_helpers_async.py b/venv/Lib/site-packages/google/api_core/grpc_helpers_async.py
new file mode 100644
index 000000000..9a994e9f8
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/grpc_helpers_async.py
@@ -0,0 +1,289 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO helpers for :mod:`grpc` supporting 3.6+.
+
+Please combine more detailed docstring in grpc_helpers.py to use following
+functions. This module is implementing the same surface with AsyncIO semantics.
+"""
+
+import asyncio
+import functools
+
+import grpc
+from grpc.experimental import aio
+
+from google.api_core import exceptions, grpc_helpers
+
+
+# TODO(lidiz) Support gRPC GCP wrapper
+HAS_GRPC_GCP = False
+
+# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
+# automatic patching for us. But that means the overhead of creating an
+# extra Python function spreads to every single send and receive.
+
+
+class _WrappedCall(aio.Call):
+
+ def __init__(self):
+ self._call = None
+
+ def with_call(self, call):
+ """Supplies the call object separately to keep __init__ clean."""
+ self._call = call
+ return self
+
+ async def initial_metadata(self):
+ return await self._call.initial_metadata()
+
+ async def trailing_metadata(self):
+ return await self._call.trailing_metadata()
+
+ async def code(self):
+ return await self._call.code()
+
+ async def details(self):
+ return await self._call.details()
+
+ def cancelled(self):
+ return self._call.cancelled()
+
+ def done(self):
+ return self._call.done()
+
+ def time_remaining(self):
+ return self._call.time_remaining()
+
+ def cancel(self):
+ return self._call.cancel()
+
+ def add_done_callback(self, callback):
+ self._call.add_done_callback(callback)
+
+ async def wait_for_connection(self):
+ try:
+ await self._call.wait_for_connection()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+class _WrappedUnaryResponseMixin(_WrappedCall):
+
+ def __await__(self):
+ try:
+ response = yield from self._call.__await__()
+ return response
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+class _WrappedStreamResponseMixin(_WrappedCall):
+
+ def __init__(self):
+ self._wrapped_async_generator = None
+
+ async def read(self):
+ try:
+ return await self._call.read()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ async def _wrapped_aiter(self):
+ try:
+ # NOTE(lidiz) coverage doesn't understand the exception raised from
+ # __anext__ method. It is covered by test case:
+ # test_wrap_stream_errors_aiter_non_rpc_error
+ async for response in self._call: # pragma: no branch
+ yield response
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ def __aiter__(self):
+ if not self._wrapped_async_generator:
+ self._wrapped_async_generator = self._wrapped_aiter()
+ return self._wrapped_async_generator
+
+
+class _WrappedStreamRequestMixin(_WrappedCall):
+
+ async def write(self, request):
+ try:
+ await self._call.write(request)
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ async def done_writing(self):
+ try:
+ await self._call.done_writing()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+# NOTE(lidiz) Implementing each individual class separately, so we don't
+# expose any API that should not be seen. E.g., __aiter__ in unary-unary
+# RPC, or __await__ in stream-stream RPC.
+class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin, aio.UnaryUnaryCall):
+ """Wrapped UnaryUnaryCall to map exceptions."""
+
+
+class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin, aio.UnaryStreamCall):
+ """Wrapped UnaryStreamCall to map exceptions."""
+
+
+class _WrappedStreamUnaryCall(_WrappedUnaryResponseMixin, _WrappedStreamRequestMixin, aio.StreamUnaryCall):
+ """Wrapped StreamUnaryCall to map exceptions."""
+
+
+class _WrappedStreamStreamCall(_WrappedStreamRequestMixin, _WrappedStreamResponseMixin, aio.StreamStreamCall):
+ """Wrapped StreamStreamCall to map exceptions."""
+
+
+def _wrap_unary_errors(callable_):
+ """Map errors for Unary-Unary async callables."""
+ grpc_helpers._patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ call = callable_(*args, **kwargs)
+ return _WrappedUnaryUnaryCall().with_call(call)
+
+ return error_remapped_callable
+
+
+def _wrap_stream_errors(callable_):
+ """Map errors for streaming RPC async callables."""
+ grpc_helpers._patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ async def error_remapped_callable(*args, **kwargs):
+ call = callable_(*args, **kwargs)
+
+ if isinstance(call, aio.UnaryStreamCall):
+ call = _WrappedUnaryStreamCall().with_call(call)
+ elif isinstance(call, aio.StreamUnaryCall):
+ call = _WrappedStreamUnaryCall().with_call(call)
+ elif isinstance(call, aio.StreamStreamCall):
+ call = _WrappedStreamStreamCall().with_call(call)
+ else:
+ raise TypeError('Unexpected type of call %s' % type(call))
+
+ await call.wait_for_connection()
+ return call
+
+ return error_remapped_callable
+
+
+def wrap_errors(callable_):
+ """Wrap a gRPC async callable and map :class:`grpc.RpcErrors` to
+ friendly error classes.
+
+ Errors raised by the gRPC callable are mapped to the appropriate
+ :class:`google.api_core.exceptions.GoogleAPICallError` subclasses. The
+ original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ available from the ``response`` property on the mapped exception. This
+ is useful for extracting metadata from the original error.
+
+ Args:
+ callable_ (Callable): A gRPC callable.
+
+ Returns: Callable: The wrapped gRPC callable.
+ """
+ if isinstance(callable_, aio.UnaryUnaryMultiCallable):
+ return _wrap_unary_errors(callable_)
+ else:
+ return _wrap_stream_errors(callable_)
+
+
+def create_channel(
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ **kwargs):
+ """Create an AsyncIO secure channel with credentials.
+
+ Args:
+ target (str): The target service address in the format 'hostname:port'.
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ quota_project_id (str): An optional project to use for billing and quota.
+ kwargs: Additional key-word args passed to :func:`aio.secure_channel`.
+
+ Returns:
+ aio.Channel: The created channel.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+
+ composite_credentials = grpc_helpers._create_composite_credentials(
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ ssl_credentials=ssl_credentials,
+ quota_project_id=quota_project_id,
+ )
+
+ return aio.secure_channel(target, composite_credentials, **kwargs)
+
+
+class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall):
+ """Fake implementation for unary-unary RPCs.
+
+ It is a dummy object for response message. Supply the intended response
+ upon the initialization, and the coroutine will return the exact response
+ message.
+ """
+
+ def __init__(self, response=object()):
+ self.response = response
+ self._future = asyncio.get_event_loop().create_future()
+ self._future.set_result(self.response)
+
+ def __await__(self):
+ response = yield from self._future.__await__()
+ return response
+
+
+class FakeStreamUnaryCall(_WrappedStreamUnaryCall):
+ """Fake implementation for stream-unary RPCs.
+
+ It is a dummy object for response message. Supply the intended response
+ upon the initialization, and the coroutine will return the exact response
+ message.
+ """
+
+ def __init__(self, response=object()):
+ self.response = response
+ self._future = asyncio.get_event_loop().create_future()
+ self._future.set_result(self.response)
+
+ def __await__(self):
+ response = yield from self._future.__await__()
+ return response
+
+ async def wait_for_connection(self):
+ pass
diff --git a/venv/Lib/site-packages/google/api_core/iam.py b/venv/Lib/site-packages/google/api_core/iam.py
new file mode 100644
index 000000000..f13093603
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/iam.py
@@ -0,0 +1,460 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Non-API-specific IAM policy definitions
+
+For allowed roles / permissions, see:
+https://cloud.google.com/iam/docs/understanding-roles
+
+Example usage:
+
+.. code-block:: python
+
+ # ``get_iam_policy`` returns a :class:'~google.api_core.iam.Policy`.
+ policy = resource.get_iam_policy(requested_policy_version=3)
+
+ phred = "user:phred@example.com"
+ admin_group = "group:admins@groups.example.com"
+ account = "serviceAccount:account-1234@accounts.example.com"
+
+ policy.version = 3
+ policy.bindings = [
+ {
+ "role": "roles/owner",
+ "members": {phred, admin_group, account}
+ },
+ {
+ "role": "roles/editor",
+ "members": {"allAuthenticatedUsers"}
+ },
+ {
+ "role": "roles/viewer",
+ "members": {"allUsers"}
+ "condition": {
+ "title": "request_time",
+ "description": "Requests made before 2021-01-01T00:00:00Z",
+ "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
+ }
+ }
+ ]
+
+ resource.set_iam_policy(policy)
+"""
+
+import collections
+import operator
+import warnings
+
+try:
+ from collections import abc as collections_abc
+except ImportError: # Python 2.7
+ import collections as collections_abc
+
+# Generic IAM roles
+
+OWNER_ROLE = "roles/owner"
+"""Generic role implying all rights to an object."""
+
+EDITOR_ROLE = "roles/editor"
+"""Generic role implying rights to modify an object."""
+
+VIEWER_ROLE = "roles/viewer"
+"""Generic role implying rights to access an object."""
+
+_ASSIGNMENT_DEPRECATED_MSG = """\
+Assigning to '{}' is deprecated. Use the `policy.bindings` property to modify bindings instead."""
+
+_FACTORY_DEPRECATED_MSG = """\
+Factory method {0} is deprecated. Replace with '{0}'."""
+
+_DICT_ACCESS_MSG = """\
+Dict access is not supported on policies with version > 1 or with conditional bindings."""
+
+
+class InvalidOperationException(Exception):
+ """Raised when trying to use Policy class as a dict."""
+
+ pass
+
+
+class Policy(collections_abc.MutableMapping):
+ """IAM Policy
+
+ Args:
+ etag (Optional[str]): ETag used to identify a unique of the policy
+ version (Optional[int]): The syntax schema version of the policy.
+
+ Note:
+ Using conditions in bindings requires the policy's version to be set
+ to `3` or greater, depending on the versions that are currently supported.
+
+ Accessing the policy using dict operations will raise InvalidOperationException
+ when the policy's version is set to 3.
+
+ Use the policy.bindings getter/setter to retrieve and modify the policy's bindings.
+
+ See:
+ IAM Policy https://cloud.google.com/iam/reference/rest/v1/Policy
+ Policy versions https://cloud.google.com/iam/docs/policies#versions
+ Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
+ """
+
+ _OWNER_ROLES = (OWNER_ROLE,)
+ """Roles mapped onto our ``owners`` attribute."""
+
+ _EDITOR_ROLES = (EDITOR_ROLE,)
+ """Roles mapped onto our ``editors`` attribute."""
+
+ _VIEWER_ROLES = (VIEWER_ROLE,)
+ """Roles mapped onto our ``viewers`` attribute."""
+
+ def __init__(self, etag=None, version=None):
+ self.etag = etag
+ self.version = version
+ self._bindings = []
+
+ def __iter__(self):
+ self.__check_version__()
+ return (binding["role"] for binding in self._bindings)
+
+ def __len__(self):
+ self.__check_version__()
+ return len(self._bindings)
+
+ def __getitem__(self, key):
+ self.__check_version__()
+ for b in self._bindings:
+ if b["role"] == key:
+ return b["members"]
+ return set()
+
+ def __setitem__(self, key, value):
+ self.__check_version__()
+ value = set(value)
+ for binding in self._bindings:
+ if binding["role"] == key:
+ binding["members"] = value
+ return
+ self._bindings.append({"role": key, "members": value})
+
+ def __delitem__(self, key):
+ self.__check_version__()
+ for b in self._bindings:
+ if b["role"] == key:
+ self._bindings.remove(b)
+ return
+ raise KeyError(key)
+
+ def __check_version__(self):
+ """Raise InvalidOperationException if version is greater than 1 or policy contains conditions."""
+ raise_version = self.version is not None and self.version > 1
+
+ if raise_version or self._contains_conditions():
+ raise InvalidOperationException(_DICT_ACCESS_MSG)
+
+ def _contains_conditions(self):
+ for b in self._bindings:
+ if b.get("condition") is not None:
+ return True
+ return False
+
+ @property
+ def bindings(self):
+ """The policy's list of bindings.
+
+ A binding is specified by a dictionary with keys:
+
+ * role (str): Role that is assigned to `members`.
+
+ * members (:obj:`set` of str): Specifies the identities associated to this binding.
+
+ * condition (:obj:`dict` of str:str): Specifies a condition under which this binding will apply.
+
+ * title (str): Title for the condition.
+
+ * description (:obj:str, optional): Description of the condition.
+
+ * expression: A CEL expression.
+
+ Type:
+ :obj:`list` of :obj:`dict`
+
+ See:
+ Policy versions https://cloud.google.com/iam/docs/policies#versions
+ Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
+
+ Example:
+
+ .. code-block:: python
+
+ USER = "user:phred@example.com"
+ ADMIN_GROUP = "group:admins@groups.example.com"
+ SERVICE_ACCOUNT = "serviceAccount:account-1234@accounts.example.com"
+ CONDITION = {
+ "title": "request_time",
+ "description": "Requests made before 2021-01-01T00:00:00Z", # Optional
+ "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
+ }
+
+ # Set policy's version to 3 before setting bindings containing conditions.
+ policy.version = 3
+
+ policy.bindings = [
+ {
+ "role": "roles/viewer",
+ "members": {USER, ADMIN_GROUP, SERVICE_ACCOUNT},
+ "condition": CONDITION
+ },
+ ...
+ ]
+ """
+ return self._bindings
+
+ @bindings.setter
+ def bindings(self, bindings):
+ self._bindings = bindings
+
+ @property
+ def owners(self):
+ """Legacy access to owner role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ result = set()
+ for role in self._OWNER_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @owners.setter
+ def owners(self, value):
+ """Update owners.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("owners", OWNER_ROLE), DeprecationWarning
+ )
+ self[OWNER_ROLE] = value
+
+ @property
+ def editors(self):
+ """Legacy access to editor role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ result = set()
+ for role in self._EDITOR_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @editors.setter
+ def editors(self, value):
+ """Update editors.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("editors", EDITOR_ROLE),
+ DeprecationWarning,
+ )
+ self[EDITOR_ROLE] = value
+
+ @property
+ def viewers(self):
+ """Legacy access to viewer role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ result = set()
+ for role in self._VIEWER_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @viewers.setter
+ def viewers(self, value):
+ """Update viewers.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("viewers", VIEWER_ROLE),
+ DeprecationWarning,
+ )
+ self[VIEWER_ROLE] = value
+
+ @staticmethod
+ def user(email):
+ """Factory method for a user member.
+
+ Args:
+ email (str): E-mail for this particular user.
+
+ Returns:
+ str: A member string corresponding to the given user.
+
+ DEPRECATED: set the role `user:{email}` in the binding instead.
+ """
+ warnings.warn(
+ _FACTORY_DEPRECATED_MSG.format("user:{email}"), DeprecationWarning,
+ )
+ return "user:%s" % (email,)
+
+ @staticmethod
+ def service_account(email):
+ """Factory method for a service account member.
+
+ Args:
+ email (str): E-mail for this particular service account.
+
+ Returns:
+ str: A member string corresponding to the given service account.
+
+ DEPRECATED: set the role `serviceAccount:{email}` in the binding instead.
+ """
+ warnings.warn(
+ _FACTORY_DEPRECATED_MSG.format("serviceAccount:{email}"),
+ DeprecationWarning,
+ )
+ return "serviceAccount:%s" % (email,)
+
+ @staticmethod
+ def group(email):
+ """Factory method for a group member.
+
+ Args:
+ email (str): An id or e-mail for this particular group.
+
+ Returns:
+ str: A member string corresponding to the given group.
+
+ DEPRECATED: set the role `group:{email}` in the binding instead.
+ """
+ warnings.warn(
+ _FACTORY_DEPRECATED_MSG.format("group:{email}"), DeprecationWarning,
+ )
+ return "group:%s" % (email,)
+
+ @staticmethod
+ def domain(domain):
+ """Factory method for a domain member.
+
+ Args:
+ domain (str): The domain for this member.
+
+ Returns:
+ str: A member string corresponding to the given domain.
+
+ DEPRECATED: set the role `domain:{email}` in the binding instead.
+ """
+ warnings.warn(
+ _FACTORY_DEPRECATED_MSG.format("domain:{email}"), DeprecationWarning,
+ )
+ return "domain:%s" % (domain,)
+
+ @staticmethod
+ def all_users():
+ """Factory method for a member representing all users.
+
+ Returns:
+ str: A member string representing all users.
+
+ DEPRECATED: set the role `allUsers` in the binding instead.
+ """
+ warnings.warn(
+ _FACTORY_DEPRECATED_MSG.format("allUsers"), DeprecationWarning,
+ )
+ return "allUsers"
+
+ @staticmethod
+ def authenticated_users():
+ """Factory method for a member representing all authenticated users.
+
+ Returns:
+ str: A member string representing all authenticated users.
+
+ DEPRECATED: set the role `allAuthenticatedUsers` in the binding instead.
+ """
+ warnings.warn(
+ _FACTORY_DEPRECATED_MSG.format("allAuthenticatedUsers"), DeprecationWarning,
+ )
+ return "allAuthenticatedUsers"
+
+ @classmethod
+ def from_api_repr(cls, resource):
+ """Factory: create a policy from a JSON resource.
+
+ Args:
+ resource (dict): policy resource returned by ``getIamPolicy`` API.
+
+ Returns:
+ :class:`Policy`: the parsed policy
+ """
+ version = resource.get("version")
+ etag = resource.get("etag")
+ policy = cls(etag, version)
+ policy.bindings = resource.get("bindings", [])
+
+ for binding in policy.bindings:
+ binding["members"] = set(binding.get("members", ()))
+
+ return policy
+
+ def to_api_repr(self):
+ """Render a JSON policy resource.
+
+ Returns:
+ dict: a resource to be passed to the ``setIamPolicy`` API.
+ """
+ resource = {}
+
+ if self.etag is not None:
+ resource["etag"] = self.etag
+
+ if self.version is not None:
+ resource["version"] = self.version
+
+ if self._bindings and len(self._bindings) > 0:
+ bindings = []
+ for binding in self._bindings:
+ members = binding.get("members")
+ if members:
+ new_binding = {
+ "role": binding["role"],
+ "members": sorted(members)
+ }
+ condition = binding.get("condition")
+ if condition:
+ new_binding["condition"] = condition
+ bindings.append(new_binding)
+
+ if bindings:
+ # Sort bindings by role
+ key = operator.itemgetter("role")
+ resource["bindings"] = sorted(bindings, key=key)
+
+ return resource
diff --git a/venv/Lib/site-packages/google/api_core/operation.py b/venv/Lib/site-packages/google/api_core/operation.py
new file mode 100644
index 000000000..e6407b8c5
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/operation.py
@@ -0,0 +1,327 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operation using :meth:`Operation.result`:
+
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+ result = operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+
+ def my_callback(future):
+ result = future.result()
+
+ operation.add_done_callback(my_callback)
+
+"""
+
+import functools
+import threading
+
+from google.api_core import exceptions
+from google.api_core import protobuf_helpers
+from google.api_core.future import polling
+from google.longrunning import operations_pb2
+from google.protobuf import json_format
+from google.rpc import code_pb2
+
+
+class Operation(polling.PollingFuture):
+ """A Future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The
+ initial operation.
+ refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel
+ the operation.
+ result_type (func:`type`): The protobuf type for the operation's
+ result.
+ metadata_type (func:`type`): The protobuf type for the operation's
+ metadata.
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(
+ self,
+ operation,
+ refresh,
+ cancel,
+ result_type,
+ metadata_type=None,
+ retry=polling.DEFAULT_RETRY,
+ ):
+ super(Operation, self).__init__(retry=retry)
+ self._operation = operation
+ self._refresh = refresh
+ self._cancel = cancel
+ self._result_type = result_type
+ self._metadata_type = metadata_type
+ self._completion_lock = threading.Lock()
+ # Invoke this in case the operation came back already complete.
+ self._set_result_from_operation()
+
+ @property
+ def operation(self):
+ """google.longrunning.Operation: The current long-running operation."""
+ return self._operation
+
+ @property
+ def metadata(self):
+ """google.protobuf.Message: the current operation metadata."""
+ if not self._operation.HasField("metadata"):
+ return None
+
+ return protobuf_helpers.from_any_pb(
+ self._metadata_type, self._operation.metadata
+ )
+
+ @classmethod
+ def deserialize(self, payload):
+ """Deserialize a ``google.longrunning.Operation`` protocol buffer.
+
+ Args:
+ payload (bytes): A serialized operation protocol buffer.
+
+ Returns:
+ ~.operations_pb2.Operation: An Operation protobuf object.
+ """
+ return operations_pb2.Operation.FromString(payload)
+
+ def _set_result_from_operation(self):
+ """Set the result or exception from the operation if it is complete."""
+ # This must be done in a lock to prevent the polling thread
+ # and main thread from both executing the completion logic
+ # at the same time.
+ with self._completion_lock:
+ # If the operation isn't complete or if the result has already been
+ # set, do not call set_result/set_exception again.
+ # Note: self._result_set is set to True in set_result and
+ # set_exception, in case those methods are invoked directly.
+ if not self._operation.done or self._result_set:
+ return
+
+ if self._operation.HasField("response"):
+ response = protobuf_helpers.from_any_pb(
+ self._result_type, self._operation.response
+ )
+ self.set_result(response)
+ elif self._operation.HasField("error"):
+ exception = exceptions.GoogleAPICallError(
+ self._operation.error.message,
+ errors=(self._operation.error,),
+ response=self._operation,
+ )
+ self.set_exception(exception)
+ else:
+ exception = exceptions.GoogleAPICallError(
+ "Unexpected state: Long-running operation had neither "
+ "response nor error set."
+ )
+ self.set_exception(exception)
+
+ def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
+ """Refresh the operation and update the result if needed.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ """
+ # If the currently cached operation is done, no need to make another
+ # RPC as it will not change once done.
+ if not self._operation.done:
+ self._operation = self._refresh(retry=retry)
+ self._set_result_from_operation()
+
+ def done(self, retry=polling.DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ self._refresh_and_update(retry)
+ return self._operation.done
+
+ def cancel(self):
+ """Attempt to cancel the operation.
+
+ Returns:
+ bool: True if the cancel RPC was made, False if the operation is
+ already complete.
+ """
+ if self.done():
+ return False
+
+ self._cancel()
+ return True
+
+ def cancelled(self):
+ """True if the operation was cancelled."""
+ self._refresh_and_update()
+ return (
+ self._operation.HasField("error")
+ and self._operation.error.code == code_pb2.CANCELLED
+ )
+
+
+def _refresh_http(api_request, operation_name):
+ """Refresh an operation using a JSON/HTTP client.
+
+ Args:
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ operation_name (str): The name of the operation.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The operation.
+ """
+ path = "operations/{}".format(operation_name)
+ api_response = api_request(method="GET", path=path)
+ return json_format.ParseDict(api_response, operations_pb2.Operation())
+
+
+def _cancel_http(api_request, operation_name):
+ """Cancel an operation using a JSON/HTTP client.
+
+ Args:
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ operation_name (str): The name of the operation.
+ """
+ path = "operations/{}:cancel".format(operation_name)
+ api_request(method="POST", path=path)
+
+
+def from_http_json(operation, api_request, result_type, **kwargs):
+ """Create an operation future using a HTTP/JSON client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via `HTTP/JSON`_.
+
+ .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\
+ v1beta1/operations#Operation
+
+ Args:
+ operation (dict): Operation as a dictionary.
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ result_type (:func:`type`): The protobuf result type.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ operation_proto = json_format.ParseDict(operation, operations_pb2.Operation())
+ refresh = functools.partial(_refresh_http, api_request, operation_proto.name)
+ cancel = functools.partial(_cancel_http, api_request, operation_proto.name)
+ return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
+
+
+def _refresh_grpc(operations_stub, operation_name):
+ """Refresh an operation using a gRPC client.
+
+ Args:
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The gRPC operations stub.
+ operation_name (str): The name of the operation.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The operation.
+ """
+ request_pb = operations_pb2.GetOperationRequest(name=operation_name)
+ return operations_stub.GetOperation(request_pb)
+
+
+def _cancel_grpc(operations_stub, operation_name):
+ """Cancel an operation using a gRPC client.
+
+ Args:
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The gRPC operations stub.
+ operation_name (str): The name of the operation.
+ """
+ request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
+ operations_stub.CancelOperation(request_pb)
+
+
+def from_grpc(operation, operations_stub, result_type, **kwargs):
+ """Create an operation future using a gRPC client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via gRPC.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The operations stub.
+ result_type (:func:`type`): The protobuf result type.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(_refresh_grpc, operations_stub, operation.name)
+ cancel = functools.partial(_cancel_grpc, operations_stub, operation.name)
+ return Operation(operation, refresh, cancel, result_type, **kwargs)
+
+
+def from_gapic(operation, operations_client, result_type, **kwargs):
+ """Create an operation future from a gapic client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via a gapic client.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_client (google.api_core.operations_v1.OperationsClient):
+ The operations client.
+ result_type (:func:`type`): The protobuf result type.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(operations_client.get_operation, operation.name)
+ cancel = functools.partial(operations_client.cancel_operation, operation.name)
+ return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/venv/Lib/site-packages/google/api_core/operation_async.py b/venv/Lib/site-packages/google/api_core/operation_async.py
new file mode 100644
index 000000000..89500af19
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/operation_async.py
@@ -0,0 +1,215 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO futures for long-running operations returned from Google Cloud APIs.
+
+These futures can be used to await for the result of a long-running operation
+using :meth:`AsyncOperation.result`:
+
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+ result = await operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+
+ def my_callback(future):
+ result = await future.result()
+
+ operation.add_done_callback(my_callback)
+
+"""
+
+import functools
+import threading
+
+from google.api_core import exceptions
+from google.api_core import protobuf_helpers
+from google.api_core.future import async_future
+from google.longrunning import operations_pb2
+from google.rpc import code_pb2
+
+
+class AsyncOperation(async_future.AsyncFuture):
+ """A Future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The
+ initial operation.
+ refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel
+ the operation.
+ result_type (func:`type`): The protobuf type for the operation's
+ result.
+ metadata_type (func:`type`): The protobuf type for the operation's
+ metadata.
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(
+ self,
+ operation,
+ refresh,
+ cancel,
+ result_type,
+ metadata_type=None,
+ retry=async_future.DEFAULT_RETRY,
+ ):
+ super().__init__(retry=retry)
+ self._operation = operation
+ self._refresh = refresh
+ self._cancel = cancel
+ self._result_type = result_type
+ self._metadata_type = metadata_type
+ self._completion_lock = threading.Lock()
+ # Invoke this in case the operation came back already complete.
+ self._set_result_from_operation()
+
+ @property
+ def operation(self):
+ """google.longrunning.Operation: The current long-running operation."""
+ return self._operation
+
+ @property
+ def metadata(self):
+ """google.protobuf.Message: the current operation metadata."""
+ if not self._operation.HasField("metadata"):
+ return None
+
+ return protobuf_helpers.from_any_pb(
+ self._metadata_type, self._operation.metadata
+ )
+
+ @classmethod
+ def deserialize(cls, payload):
+ """Deserialize a ``google.longrunning.Operation`` protocol buffer.
+
+ Args:
+ payload (bytes): A serialized operation protocol buffer.
+
+ Returns:
+ ~.operations_pb2.Operation: An Operation protobuf object.
+ """
+ return operations_pb2.Operation.FromString(payload)
+
+ def _set_result_from_operation(self):
+ """Set the result or exception from the operation if it is complete."""
+ # This must be done in a lock to prevent the async_future thread
+ # and main thread from both executing the completion logic
+ # at the same time.
+ with self._completion_lock:
+ # If the operation isn't complete or if the result has already been
+ # set, do not call set_result/set_exception again.
+ if not self._operation.done or self._future.done():
+ return
+
+ if self._operation.HasField("response"):
+ response = protobuf_helpers.from_any_pb(
+ self._result_type, self._operation.response
+ )
+ self.set_result(response)
+ elif self._operation.HasField("error"):
+ exception = exceptions.GoogleAPICallError(
+ self._operation.error.message,
+ errors=(self._operation.error,),
+ response=self._operation,
+ )
+ self.set_exception(exception)
+ else:
+ exception = exceptions.GoogleAPICallError(
+ "Unexpected state: Long-running operation had neither "
+ "response nor error set."
+ )
+ self.set_exception(exception)
+
+ async def _refresh_and_update(self, retry=async_future.DEFAULT_RETRY):
+ """Refresh the operation and update the result if needed.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ """
+ # If the currently cached operation is done, no need to make another
+ # RPC as it will not change once done.
+ if not self._operation.done:
+ self._operation = await self._refresh(retry=retry)
+ self._set_result_from_operation()
+
+ async def done(self, retry=async_future.DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ await self._refresh_and_update(retry)
+ return self._operation.done
+
+ async def cancel(self):
+ """Attempt to cancel the operation.
+
+ Returns:
+ bool: True if the cancel RPC was made, False if the operation is
+ already complete.
+ """
+ result = await self.done()
+ if result:
+ return False
+ else:
+ await self._cancel()
+ return True
+
+ async def cancelled(self):
+ """True if the operation was cancelled."""
+ await self._refresh_and_update()
+ return (
+ self._operation.HasField("error")
+ and self._operation.error.code == code_pb2.CANCELLED
+ )
+
+
+def from_gapic(operation, operations_client, result_type, **kwargs):
+ """Create an operation future from a gapic client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via a gapic client.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_client (google.api_core.operations_v1.OperationsClient):
+ The operations client.
+ result_type (:func:`type`): The protobuf result type.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(operations_client.get_operation, operation.name)
+ cancel = functools.partial(operations_client.cancel_operation, operation.name)
+ return AsyncOperation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/__init__.py b/venv/Lib/site-packages/google/api_core/operations_v1/__init__.py
new file mode 100644
index 000000000..bc9befcb7
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/operations_v1/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Package for interacting with the google.longrunning.operations meta-API."""
+
+import sys
+
+from google.api_core.operations_v1.operations_client import OperationsClient
+
+__all__ = ["OperationsClient"]
+if sys.version_info >= (3, 6, 0):
+ from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient # noqa: F401
+ __all__.append("OperationsAsyncClient")
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..c3d575365
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_async_client.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_async_client.cpython-36.pyc
new file mode 100644
index 000000000..c9b893f86
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_async_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_client.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_client.cpython-36.pyc
new file mode 100644
index 000000000..3ebeaa499
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_client_config.cpython-36.pyc b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_client_config.cpython-36.pyc
new file mode 100644
index 000000000..184cc0281
Binary files /dev/null and b/venv/Lib/site-packages/google/api_core/operations_v1/__pycache__/operations_client_config.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/operations_async_client.py b/venv/Lib/site-packages/google/api_core/operations_v1/operations_async_client.py
new file mode 100644
index 000000000..039bec1b2
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/operations_v1/operations_async_client.py
@@ -0,0 +1,274 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An async client for the google.longrunning.operations meta-API.
+
+.. _Google API Style Guide:
+ https://cloud.google.com/apis/design/design_pattern
+ s#long_running_operations
+.. _google/longrunning/operations.proto:
+ https://github.com/googleapis/googleapis/blob/master/google/longrunning
+ /operations.proto
+"""
+
+import functools
+
+from google.api_core import gapic_v1, page_iterator_async
+from google.api_core.operations_v1 import operations_client_config
+from google.longrunning import operations_pb2
+
+
+class OperationsAsyncClient:
+ """Async client for interacting with long-running operations.
+
+ Args:
+ channel (aio.Channel): The gRPC AsyncIO channel associated with the
+ service that implements the ``google.longrunning.operations``
+ interface.
+ client_config (dict):
+ A dictionary of call options for each method. If not specified
+ the default configuration is used.
+ """
+
+ def __init__(self, channel, client_config=operations_client_config.config):
+ # Create the gRPC client stub with gRPC AsyncIO channel.
+ self.operations_stub = operations_pb2.OperationsStub(channel)
+
+ # Create all wrapped methods using the interface configuration.
+ # The interface config contains all of the default settings for retry
+ # and timeout for each RPC method.
+ interfaces = client_config["interfaces"]
+ interface_config = interfaces["google.longrunning.Operations"]
+ method_configs = gapic_v1.config_async.parse_method_configs(interface_config)
+
+ self._get_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.GetOperation,
+ default_retry=method_configs["GetOperation"].retry,
+ default_timeout=method_configs["GetOperation"].timeout,
+ )
+
+ self._list_operations = gapic_v1.method_async.wrap_method(
+ self.operations_stub.ListOperations,
+ default_retry=method_configs["ListOperations"].retry,
+ default_timeout=method_configs["ListOperations"].timeout,
+ )
+
+ self._cancel_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.CancelOperation,
+ default_retry=method_configs["CancelOperation"].retry,
+ default_timeout=method_configs["CancelOperation"].timeout,
+ )
+
+ self._delete_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.DeleteOperation,
+ default_retry=method_configs["DeleteOperation"].retry,
+ default_timeout=method_configs["DeleteOperation"].timeout,
+ )
+
+ async def get_operation(
+ self, name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT
+ ):
+ """Gets the latest state of a long-running operation.
+
+ Clients can use this method to poll the operation result at intervals
+ as recommended by the API service.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> response = await api.get_operation(name)
+
+ Args:
+ name (str): The name of the operation resource.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The state of the
+ operation.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ request = operations_pb2.GetOperationRequest(name=name)
+ return await self._get_operation(request, retry=retry, timeout=timeout)
+
+ async def list_operations(
+ self,
+ name,
+ filter_,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ ):
+ """
+ Lists operations that match the specified filter in the request.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for operation in await api.list_operations(name):
+ >>> # process operation
+ >>> pass
+ >>>
+ >>> # Or iterate over results one page at a time
+ >>> iter = await api.list_operations(name)
+ >>> for page in iter.pages:
+ >>> for operation in page:
+ >>> # process operation
+ >>> pass
+
+ Args:
+ name (str): The name of the operation collection.
+ filter_ (str): The standard list filter.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Returns:
+ google.api_core.page_iterator.Iterator: An iterator that yields
+ :class:`google.longrunning.operations_pb2.Operation` instances.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+
+ # Create the method used to fetch pages
+ method = functools.partial(self._list_operations, retry=retry, timeout=timeout)
+
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ client=None,
+ method=method,
+ request=request,
+ items_field="operations",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+
+ return iterator
+
+ async def cancel_operation(
+ self, name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT
+ ):
+ """Starts asynchronous cancellation on a long-running operation.
+
+ The server makes a best effort to cancel the operation, but success is
+ not guaranteed. Clients can use :meth:`get_operation` or service-
+ specific methods to check whether the cancellation succeeded or whether
+ the operation completed despite cancellation. On successful
+ cancellation, the operation is not deleted; instead, it becomes an
+ operation with an ``Operation.error`` value with a
+ ``google.rpc.Status.code`` of ``1``, corresponding to
+ ``Code.CANCELLED``.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.cancel_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be cancelled.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+ await self._cancel_operation(request, retry=retry, timeout=timeout)
+
+ async def delete_operation(
+ self, name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT
+ ):
+ """Deletes a long-running operation.
+
+ This method indicates that the client is no longer interested in the
+ operation result. It does not cancel the operation.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.delete_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be deleted.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+ await self._delete_operation(request, retry=retry, timeout=timeout)
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/operations_client.py b/venv/Lib/site-packages/google/api_core/operations_v1/operations_client.py
new file mode 100644
index 000000000..cd2923bb9
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/operations_v1/operations_client.py
@@ -0,0 +1,288 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A client for the google.longrunning.operations meta-API.
+
+This is a client that deals with long-running operations that follow the
+pattern outlined by the `Google API Style Guide`_.
+
+When an API method normally takes long time to complete, it can be designed to
+return ``Operation`` to the client, and the client can use this interface to
+receive the real response asynchronously by polling the operation resource to
+receive the response.
+
+It is not a separate service, but rather an interface implemented by a larger
+service. The protocol-level definition is available at
+`google/longrunning/operations.proto`_. Typically, this will be constructed
+automatically by another client class to deal with operations.
+
+.. _Google API Style Guide:
+ https://cloud.google.com/apis/design/design_pattern
+ s#long_running_operations
+.. _google/longrunning/operations.proto:
+ https://github.com/googleapis/googleapis/blob/master/google/longrunning
+ /operations.proto
+"""
+
+import functools
+
+from google.api_core import gapic_v1
+from google.api_core import page_iterator
+from google.api_core.operations_v1 import operations_client_config
+from google.longrunning import operations_pb2
+
+
+class OperationsClient(object):
+ """Client for interacting with long-running operations within a service.
+
+ Args:
+ channel (grpc.Channel): The gRPC channel associated with the service
+ that implements the ``google.longrunning.operations`` interface.
+ client_config (dict):
+ A dictionary of call options for each method. If not specified
+ the default configuration is used.
+ """
+
+ def __init__(self, channel, client_config=operations_client_config.config):
+ # Create the gRPC client stub.
+ self.operations_stub = operations_pb2.OperationsStub(channel)
+
+ # Create all wrapped methods using the interface configuration.
+ # The interface config contains all of the default settings for retry
+ # and timeout for each RPC method.
+ interfaces = client_config["interfaces"]
+ interface_config = interfaces["google.longrunning.Operations"]
+ method_configs = gapic_v1.config.parse_method_configs(interface_config)
+
+ self._get_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.GetOperation,
+ default_retry=method_configs["GetOperation"].retry,
+ default_timeout=method_configs["GetOperation"].timeout,
+ )
+
+ self._list_operations = gapic_v1.method.wrap_method(
+ self.operations_stub.ListOperations,
+ default_retry=method_configs["ListOperations"].retry,
+ default_timeout=method_configs["ListOperations"].timeout,
+ )
+
+ self._cancel_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.CancelOperation,
+ default_retry=method_configs["CancelOperation"].retry,
+ default_timeout=method_configs["CancelOperation"].timeout,
+ )
+
+ self._delete_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.DeleteOperation,
+ default_retry=method_configs["DeleteOperation"].retry,
+ default_timeout=method_configs["DeleteOperation"].timeout,
+ )
+
+ # Service calls
+ def get_operation(
+ self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+ ):
+ """Gets the latest state of a long-running operation.
+
+ Clients can use this method to poll the operation result at intervals
+ as recommended by the API service.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> response = api.get_operation(name)
+
+ Args:
+ name (str): The name of the operation resource.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The state of the
+ operation.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ request = operations_pb2.GetOperationRequest(name=name)
+ return self._get_operation(request, retry=retry, timeout=timeout)
+
+ def list_operations(
+ self,
+ name,
+ filter_,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ ):
+ """
+ Lists operations that match the specified filter in the request.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for operation in api.list_operations(name):
+ >>> # process operation
+ >>> pass
+ >>>
+ >>> # Or iterate over results one page at a time
+ >>> iter = api.list_operations(name)
+ >>> for page in iter.pages:
+ >>> for operation in page:
+ >>> # process operation
+ >>> pass
+
+ Args:
+ name (str): The name of the operation collection.
+ filter_ (str): The standard list filter.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Returns:
+ google.api_core.page_iterator.Iterator: An iterator that yields
+ :class:`google.longrunning.operations_pb2.Operation` instances.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+
+ # Create the method used to fetch pages
+ method = functools.partial(self._list_operations, retry=retry, timeout=timeout)
+
+ iterator = page_iterator.GRPCIterator(
+ client=None,
+ method=method,
+ request=request,
+ items_field="operations",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+
+ return iterator
+
+ def cancel_operation(
+ self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+ ):
+ """Starts asynchronous cancellation on a long-running operation.
+
+ The server makes a best effort to cancel the operation, but success is
+ not guaranteed. Clients can use :meth:`get_operation` or service-
+ specific methods to check whether the cancellation succeeded or whether
+ the operation completed despite cancellation. On successful
+ cancellation, the operation is not deleted; instead, it becomes an
+ operation with an ``Operation.error`` value with a
+ ``google.rpc.Status.code`` of ``1``, corresponding to
+ ``Code.CANCELLED``.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.cancel_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be cancelled.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+ self._cancel_operation(request, retry=retry, timeout=timeout)
+
+ def delete_operation(
+ self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+ ):
+ """Deletes a long-running operation.
+
+ This method indicates that the client is no longer interested in the
+ operation result. It does not cancel the operation.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.delete_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be deleted.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+ self._delete_operation(request, retry=retry, timeout=timeout)
diff --git a/venv/Lib/site-packages/google/api_core/operations_v1/operations_client_config.py b/venv/Lib/site-packages/google/api_core/operations_v1/operations_client_config.py
new file mode 100644
index 000000000..6cf95753f
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/operations_v1/operations_client_config.py
@@ -0,0 +1,59 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""gapic configuration for the googe.longrunning.operations client."""
+
+config = {
+ "interfaces": {
+ "google.longrunning.Operations": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 20000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ }
+ },
+ "methods": {
+ "GetOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "ListOperations": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "CancelOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "DeleteOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ },
+ }
+ }
+}
diff --git a/venv/Lib/site-packages/google/api_core/page_iterator.py b/venv/Lib/site-packages/google/api_core/page_iterator.py
new file mode 100644
index 000000000..11a92d38f
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/page_iterator.py
@@ -0,0 +1,557 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Iterators for paging through paged API methods.
+
+These iterators simplify the process of paging through API responses
+where the request takes a page token and the response is a list of results with
+a token for the next page. See `list pagination`_ in the Google API Style Guide
+for more details.
+
+.. _list pagination:
+ https://cloud.google.com/apis/design/design_patterns#list_pagination
+
+API clients that have methods that follow the list pagination pattern can
+return an :class:`.Iterator`. You can use this iterator to get **all** of
+the results across all pages::
+
+ >>> results_iterator = client.list_resources()
+ >>> list(results_iterator) # Convert to a list (consumes all values).
+
+Or you can walk your way through items and call off the search early if
+you find what you're looking for (resulting in possibly fewer requests)::
+
+ >>> for resource in results_iterator:
+ ... print(resource.name)
+ ... if not resource.is_valid:
+ ... break
+
+At any point, you may check the number of items consumed by referencing the
+``num_results`` property of the iterator::
+
+ >>> for my_item in results_iterator:
+ ... if results_iterator.num_results >= 10:
+ ... break
+
+When iterating, not every new item will send a request to the server.
+To iterate based on each page of items (where a page corresponds to
+a request)::
+
+ >>> for page in results_iterator.pages:
+ ... print('=' * 20)
+ ... print(' Page number: {:d}'.format(iterator.page_number))
+ ... print(' Items in page: {:d}'.format(page.num_items))
+ ... print(' First item: {!r}'.format(next(page)))
+ ... print('Items remaining: {:d}'.format(page.remaining))
+ ... print('Next page token: {}'.format(iterator.next_page_token))
+ ====================
+ Page number: 1
+ Items in page: 1
+ First item:
+ Items remaining: 0
+ Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
+ ====================
+ Page number: 2
+ Items in page: 19
+ First item:
+ Items remaining: 18
+ Next page token: None
+
+Then, for each page you can get all the resources on that page by iterating
+through it or using :func:`list`::
+
+ >>> list(page)
+ [
+ ,
+ ,
+ ,
+ ]
+"""
+
+import abc
+
+import six
+
+
+class Page(object):
+ """Single page of results in an iterator.
+
+ Args:
+ parent (google.api_core.page_iterator.Iterator): The iterator that owns
+ the current page.
+ items (Sequence[Any]): An iterable (that also defines __len__) of items
+ from a raw API response.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ raw_page Optional[google.protobuf.message.Message]:
+ The raw page response.
+ """
+
+ def __init__(self, parent, items, item_to_value, raw_page=None):
+ self._parent = parent
+ self._num_items = len(items)
+ self._remaining = self._num_items
+ self._item_iter = iter(items)
+ self._item_to_value = item_to_value
+ self._raw_page = raw_page
+
+ @property
+ def raw_page(self):
+ """google.protobuf.message.Message"""
+ return self._raw_page
+
+ @property
+ def num_items(self):
+ """int: Total items in the page."""
+ return self._num_items
+
+ @property
+ def remaining(self):
+ """int: Remaining items in the page."""
+ return self._remaining
+
+ def __iter__(self):
+ """The :class:`Page` is an iterator of items."""
+ return self
+
+ def next(self):
+ """Get the next value in the page."""
+ item = six.next(self._item_iter)
+ result = self._item_to_value(self._parent, item)
+ # Since we've successfully got the next value from the
+ # iterator, we update the number of remaining.
+ self._remaining -= 1
+ return result
+
+ # Alias needed for Python 2/3 support.
+ __next__ = next
+
+
+def _item_to_value_identity(iterator, item):
+ """An item to value transformer that returns the item un-changed."""
+ # pylint: disable=unused-argument
+ # We are conforming to the interface defined by Iterator.
+ return item
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Iterator(object):
+ """A generic class for iterating through API list responses.
+
+ Args:
+ client(google.cloud.client.Client): The API client.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ """
+
+ def __init__(
+ self,
+ client,
+ item_to_value=_item_to_value_identity,
+ page_token=None,
+ max_results=None,
+ ):
+ self._started = False
+ self.client = client
+ """Optional[Any]: The client that created this iterator."""
+ self.item_to_value = item_to_value
+ """Callable[Iterator, Any]: Callable to convert an item from the type
+ in the raw API response into the native object. Will be called with
+ the iterator and a
+ single item.
+ """
+ self.max_results = max_results
+ """int: The maximum number of results to fetch."""
+
+ # The attributes below will change over the life of the iterator.
+ self.page_number = 0
+ """int: The current page of results."""
+ self.next_page_token = page_token
+ """str: The token for the next page of results. If this is set before
+ the iterator starts, it effectively offsets the iterator to a
+ specific starting point."""
+ self.num_results = 0
+ """int: The total number of results fetched so far."""
+
+ @property
+ def pages(self):
+ """Iterator of pages in the response.
+
+ returns:
+ types.GeneratorType[google.api_core.page_iterator.Page]: A
+ generator of page instances.
+
+ raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._page_iter(increment=True)
+
+ def _items_iter(self):
+ """Iterator for each item returned."""
+ for page in self._page_iter(increment=False):
+ for item in page:
+ self.num_results += 1
+ yield item
+
+ def __iter__(self):
+ """Iterator for each item returned.
+
+ Returns:
+ types.GeneratorType[Any]: A generator of items from the API.
+
+ Raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._items_iter()
+
+ def _page_iter(self, increment):
+ """Generator of pages of API responses.
+
+ Args:
+ increment (bool): Flag indicating if the total number of results
+ should be incremented on each page. This is useful since a page
+ iterator will want to increment by results per page while an
+ items iterator will want to increment per item.
+
+ Yields:
+ Page: each page of items from the API.
+ """
+ page = self._next_page()
+ while page is not None:
+ self.page_number += 1
+ if increment:
+ self.num_results += page.num_items
+ yield page
+ page = self._next_page()
+
+ @abc.abstractmethod
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ This does nothing and is intended to be over-ridden by subclasses
+ to return the next :class:`Page`.
+
+ Raises:
+ NotImplementedError: Always, this method is abstract.
+ """
+ raise NotImplementedError
+
+
+def _do_nothing_page_start(iterator, page, response):
+ """Helper to provide custom behavior after a :class:`Page` is started.
+
+ This is a do-nothing stand-in as the default value.
+
+ Args:
+ iterator (Iterator): An iterator that holds some request info.
+ page (Page): The page that was just created.
+ response (Any): The API response for a page.
+ """
+ # pylint: disable=unused-argument
+ pass
+
+
+class HTTPIterator(Iterator):
+ """A generic class for iterating through HTTP/JSON API list responses.
+
+ To make an iterator work, you'll need to provide a way to convert a JSON
+ item returned from the API into the object of your choice (via
+ ``item_to_value``). You also may need to specify a custom ``items_key`` so
+ that a given response (containing a page of results) can be parsed into an
+ iterable page of the actual objects you want.
+
+ Args:
+ client (google.cloud.client.Client): The API client.
+ api_request (Callable): The function to use to make API requests.
+ Generally, this will be
+ :meth:`google.cloud._http.JSONConnection.api_request`.
+ path (str): The method path to query for the list of items.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the JSON response into
+ a native object. Will be called with the iterator and a single
+ item.
+ items_key (str): The key in the API response where the list of items
+ can be found.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ extra_params (dict): Extra query string parameters for the
+ API call.
+ page_start (Callable[
+ google.api_core.page_iterator.Iterator,
+ google.api_core.page_iterator.Page, dict]): Callable to provide
+ any special behavior after a new page has been created. Assumed
+ signature takes the :class:`.Iterator` that started the page,
+ the :class:`.Page` that was started and the dictionary containing
+ the page response.
+ next_token (str): The name of the field used in the response for page
+ tokens.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_ITEMS_KEY = "items"
+ _PAGE_TOKEN = "pageToken"
+ _MAX_RESULTS = "maxResults"
+ _NEXT_TOKEN = "nextPageToken"
+ _RESERVED_PARAMS = frozenset([_PAGE_TOKEN])
+ _HTTP_METHOD = "GET"
+
+ def __init__(
+ self,
+ client,
+ api_request,
+ path,
+ item_to_value,
+ items_key=_DEFAULT_ITEMS_KEY,
+ page_token=None,
+ max_results=None,
+ extra_params=None,
+ page_start=_do_nothing_page_start,
+ next_token=_NEXT_TOKEN,
+ ):
+ super(HTTPIterator, self).__init__(
+ client, item_to_value, page_token=page_token, max_results=max_results
+ )
+ self.api_request = api_request
+ self.path = path
+ self._items_key = items_key
+ self.extra_params = extra_params
+ self._page_start = page_start
+ self._next_token = next_token
+ # Verify inputs / provide defaults.
+ if self.extra_params is None:
+ self.extra_params = {}
+ self._verify_params()
+
+ def _verify_params(self):
+ """Verifies the parameters don't use any reserved parameter.
+
+ Raises:
+ ValueError: If a reserved parameter is used.
+ """
+ reserved_in_use = self._RESERVED_PARAMS.intersection(self.extra_params)
+ if reserved_in_use:
+ raise ValueError("Using a reserved parameter", reserved_in_use)
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Optional[Page]: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if self._has_next_page():
+ response = self._get_next_page_response()
+ items = response.get(self._items_key, ())
+ page = Page(self, items, self.item_to_value, raw_page=response)
+ self._page_start(self, page, response)
+ self.next_page_token = response.get(self._next_token)
+ return page
+ else:
+ return None
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ return self.next_page_token is not None
+
+ def _get_query_params(self):
+ """Getter for query parameters for the next request.
+
+ Returns:
+ dict: A dictionary of query parameters.
+ """
+ result = {}
+ if self.next_page_token is not None:
+ result[self._PAGE_TOKEN] = self.next_page_token
+ if self.max_results is not None:
+ result[self._MAX_RESULTS] = self.max_results - self.num_results
+ result.update(self.extra_params)
+ return result
+
+ def _get_next_page_response(self):
+ """Requests the next page from the path provided.
+
+ Returns:
+ dict: The parsed JSON response of the next page's contents.
+
+ Raises:
+ ValueError: If the HTTP method is not ``GET`` or ``POST``.
+ """
+ params = self._get_query_params()
+ if self._HTTP_METHOD == "GET":
+ return self.api_request(
+ method=self._HTTP_METHOD, path=self.path, query_params=params
+ )
+ elif self._HTTP_METHOD == "POST":
+ return self.api_request(
+ method=self._HTTP_METHOD, path=self.path, data=params
+ )
+ else:
+ raise ValueError("Unexpected HTTP method", self._HTTP_METHOD)
+
+
+class _GAXIterator(Iterator):
+ """A generic class for iterating through Cloud gRPC APIs list responses.
+
+ Any:
+ client (google.cloud.client.Client): The API client.
+ page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped
+ to conform to the :class:`Iterator` interface.
+ item_to_value (Callable[Iterator, Any]): Callable to convert an item
+ from the the protobuf response into a native object. Will
+ be called with the iterator and a single item.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ def __init__(self, client, page_iter, item_to_value, max_results=None):
+ super(_GAXIterator, self).__init__(
+ client,
+ item_to_value,
+ page_token=page_iter.page_token,
+ max_results=max_results,
+ )
+ self._gax_page_iter = page_iter
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Wraps the response from the :class:`~google.gax.PageIterator` in a
+ :class:`Page` instance and captures some state at each page.
+
+ Returns:
+ Optional[Page]: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ try:
+ items = six.next(self._gax_page_iter)
+ page = Page(self, items, self.item_to_value)
+ self.next_page_token = self._gax_page_iter.page_token or None
+ return page
+ except StopIteration:
+ return None
+
+
+class GRPCIterator(Iterator):
+ """A generic class for iterating through gRPC list responses.
+
+ .. note:: The class does not take a ``page_token`` argument because it can
+ just be specified in the ``request``.
+
+ Args:
+ client (google.cloud.client.Client): The API client. This unused by
+ this class, but kept to satisfy the :class:`Iterator` interface.
+ method (Callable[protobuf.Message]): A bound gRPC method that should
+ take a single message for the request.
+ request (protobuf.Message): The request message.
+ items_field (str): The field in the response message that has the
+ items for the page.
+ item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
+ item from the type in the JSON response into a native object. Will
+ be called with the iterator and a single item.
+ request_token_field (str): The field in the request message used to
+ specify the page token.
+ response_token_field (str): The field in the response message that has
+ the token for the next page.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+ _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
+
+ def __init__(
+ self,
+ client,
+ method,
+ request,
+ items_field,
+ item_to_value=_item_to_value_identity,
+ request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+ response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+ max_results=None,
+ ):
+ super(GRPCIterator, self).__init__(
+ client, item_to_value, max_results=max_results
+ )
+ self._method = method
+ self._request = request
+ self._items_field = items_field
+ self._request_token_field = request_token_field
+ self._response_token_field = response_token_field
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Page: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if not self._has_next_page():
+ return None
+
+ if self.next_page_token is not None:
+ setattr(self._request, self._request_token_field, self.next_page_token)
+
+ response = self._method(self._request)
+
+ self.next_page_token = getattr(response, self._response_token_field)
+ items = getattr(response, self._items_field)
+ page = Page(self, items, self.item_to_value, raw_page=response)
+
+ return page
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ # Note: intentionally a falsy check instead of a None check. The RPC
+ # can return an empty string indicating no more pages.
+ return True if self.next_page_token else False
diff --git a/venv/Lib/site-packages/google/api_core/page_iterator_async.py b/venv/Lib/site-packages/google/api_core/page_iterator_async.py
new file mode 100644
index 000000000..a0aa41a7d
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/page_iterator_async.py
@@ -0,0 +1,278 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO iterators for paging through paged API methods.
+
+These iterators simplify the process of paging through API responses
+where the request takes a page token and the response is a list of results with
+a token for the next page. See `list pagination`_ in the Google API Style Guide
+for more details.
+
+.. _list pagination:
+ https://cloud.google.com/apis/design/design_patterns#list_pagination
+
+API clients that have methods that follow the list pagination pattern can
+return an :class:`.AsyncIterator`:
+
+ >>> results_iterator = await client.list_resources()
+
+Or you can walk your way through items and call off the search early if
+you find what you're looking for (resulting in possibly fewer requests)::
+
+ >>> async for resource in results_iterator:
+ ... print(resource.name)
+ ... if not resource.is_valid:
+ ... break
+
+At any point, you may check the number of items consumed by referencing the
+``num_results`` property of the iterator::
+
+ >>> async for my_item in results_iterator:
+ ... if results_iterator.num_results >= 10:
+ ... break
+
+When iterating, not every new item will send a request to the server.
+To iterate based on each page of items (where a page corresponds to
+a request)::
+
+ >>> async for page in results_iterator.pages:
+ ... print('=' * 20)
+ ... print(' Page number: {:d}'.format(iterator.page_number))
+ ... print(' Items in page: {:d}'.format(page.num_items))
+ ... print(' First item: {!r}'.format(next(page)))
+ ... print('Items remaining: {:d}'.format(page.remaining))
+ ... print('Next page token: {}'.format(iterator.next_page_token))
+ ====================
+ Page number: 1
+ Items in page: 1
+ First item:
+ Items remaining: 0
+ Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
+ ====================
+ Page number: 2
+ Items in page: 19
+ First item:
+ Items remaining: 18
+ Next page token: None
+"""
+
+import abc
+
+from google.api_core.page_iterator import Page
+
+
+def _item_to_value_identity(iterator, item):
+ """An item to value transformer that returns the item un-changed."""
+ # pylint: disable=unused-argument
+ # We are conforming to the interface defined by Iterator.
+ return item
+
+
+class AsyncIterator(abc.ABC):
+ """A generic class for iterating through API list responses.
+
+ Args:
+ client(google.cloud.client.Client): The API client.
+ item_to_value (Callable[google.api_core.page_iterator_async.AsyncIterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ """
+
+ def __init__(
+ self,
+ client,
+ item_to_value=_item_to_value_identity,
+ page_token=None,
+ max_results=None,
+ ):
+ self._started = False
+ self.client = client
+ """Optional[Any]: The client that created this iterator."""
+ self.item_to_value = item_to_value
+ """Callable[Iterator, Any]: Callable to convert an item from the type
+ in the raw API response into the native object. Will be called with
+ the iterator and a
+ single item.
+ """
+ self.max_results = max_results
+ """int: The maximum number of results to fetch."""
+
+ # The attributes below will change over the life of the iterator.
+ self.page_number = 0
+ """int: The current page of results."""
+ self.next_page_token = page_token
+ """str: The token for the next page of results. If this is set before
+ the iterator starts, it effectively offsets the iterator to a
+ specific starting point."""
+ self.num_results = 0
+ """int: The total number of results fetched so far."""
+
+ @property
+ def pages(self):
+ """Iterator of pages in the response.
+
+ returns:
+ types.GeneratorType[google.api_core.page_iterator.Page]: A
+ generator of page instances.
+
+ raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._page_aiter(increment=True)
+
+ async def _items_aiter(self):
+ """Iterator for each item returned."""
+ async for page in self._page_aiter(increment=False):
+ for item in page:
+ self.num_results += 1
+ yield item
+
+ def __aiter__(self):
+ """Iterator for each item returned.
+
+ Returns:
+ types.GeneratorType[Any]: A generator of items from the API.
+
+ Raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._items_aiter()
+
+ async def _page_aiter(self, increment):
+ """Generator of pages of API responses.
+
+ Args:
+ increment (bool): Flag indicating if the total number of results
+ should be incremented on each page. This is useful since a page
+ iterator will want to increment by results per page while an
+ items iterator will want to increment per item.
+
+ Yields:
+ Page: each page of items from the API.
+ """
+ page = await self._next_page()
+ while page is not None:
+ self.page_number += 1
+ if increment:
+ self.num_results += page.num_items
+ yield page
+ page = await self._next_page()
+
+ @abc.abstractmethod
+ async def _next_page(self):
+ """Get the next page in the iterator.
+
+ This does nothing and is intended to be over-ridden by subclasses
+ to return the next :class:`Page`.
+
+ Raises:
+ NotImplementedError: Always, this method is abstract.
+ """
+ raise NotImplementedError
+
+
+class AsyncGRPCIterator(AsyncIterator):
+ """A generic class for iterating through gRPC list responses.
+
+ .. note:: The class does not take a ``page_token`` argument because it can
+ just be specified in the ``request``.
+
+ Args:
+ client (google.cloud.client.Client): The API client. This unused by
+ this class, but kept to satisfy the :class:`Iterator` interface.
+ method (Callable[protobuf.Message]): A bound gRPC method that should
+ take a single message for the request.
+ request (protobuf.Message): The request message.
+ items_field (str): The field in the response message that has the
+ items for the page.
+ item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
+ item from the type in the JSON response into a native object. Will
+ be called with the iterator and a single item.
+ request_token_field (str): The field in the request message used to
+ specify the page token.
+ response_token_field (str): The field in the response message that has
+ the token for the next page.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+ _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
+
+ def __init__(
+ self,
+ client,
+ method,
+ request,
+ items_field,
+ item_to_value=_item_to_value_identity,
+ request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+ response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+ max_results=None,
+ ):
+ super().__init__(client, item_to_value, max_results=max_results)
+ self._method = method
+ self._request = request
+ self._items_field = items_field
+ self._request_token_field = request_token_field
+ self._response_token_field = response_token_field
+
+ async def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Page: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if not self._has_next_page():
+ return None
+
+ if self.next_page_token is not None:
+ setattr(self._request, self._request_token_field, self.next_page_token)
+
+ response = await self._method(self._request)
+
+ self.next_page_token = getattr(response, self._response_token_field)
+ items = getattr(response, self._items_field)
+ page = Page(self, items, self.item_to_value, raw_page=response)
+
+ return page
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ # Note: intentionally a falsy check instead of a None check. The RPC
+ # can return an empty string indicating no more pages.
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ return True if self.next_page_token else False
diff --git a/venv/Lib/site-packages/google/api_core/path_template.py b/venv/Lib/site-packages/google/api_core/path_template.py
new file mode 100644
index 000000000..bb549356d
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/path_template.py
@@ -0,0 +1,197 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Expand and validate URL path templates.
+
+This module provides the :func:`expand` and :func:`validate` functions for
+interacting with Google-style URL `path templates`_ which are commonly used
+in Google APIs for `resource names`_.
+
+.. _path templates: https://github.com/googleapis/googleapis/blob
+ /57e2d376ac7ef48681554204a3ba78a414f2c533/google/api/http.proto#L212
+.. _resource names: https://cloud.google.com/apis/design/resource_names
+"""
+
+from __future__ import unicode_literals
+
+import functools
+import re
+
+import six
+
+# Regular expression for extracting variable parts from a path template.
+# The variables can be expressed as:
+#
+# - "*": a single-segment positional variable, for example: "books/*"
+# - "**": a multi-segment positional variable, for example: "shelf/**/book/*"
+# - "{name}": a single-segment wildcard named variable, for example
+# "books/{name}"
+# - "{name=*}: same as above.
+# - "{name=**}": a multi-segment wildcard named variable, for example
+# "shelf/{name=**}"
+# - "{name=/path/*/**}": a multi-segment named variable with a sub-template.
+_VARIABLE_RE = re.compile(
+ r"""
+ ( # Capture the entire variable expression
+ (?P\*\*?) # Match & capture * and ** positional variables.
+ |
+ # Match & capture named variables {name}
+ {
+ (?P[^/]+?)
+ # Optionally match and capture the named variable's template.
+ (?:=(?P.+?))?
+ }
+ )
+ """,
+ re.VERBOSE,
+)
+
+# Segment expressions used for validating paths against a template.
+_SINGLE_SEGMENT_PATTERN = r"([^/]+)"
+_MULTI_SEGMENT_PATTERN = r"(.+)"
+
+
+def _expand_variable_match(positional_vars, named_vars, match):
+ """Expand a matched variable with its value.
+
+ Args:
+ positional_vars (list): A list of positonal variables. This list will
+ be modified.
+ named_vars (dict): A dictionary of named variables.
+ match (re.Match): A regular expression match.
+
+ Returns:
+ str: The expanded variable to replace the match.
+
+ Raises:
+ ValueError: If a positional or named variable is required by the
+ template but not specified or if an unexpected template expression
+ is encountered.
+ """
+ positional = match.group("positional")
+ name = match.group("name")
+ if name is not None:
+ try:
+ return six.text_type(named_vars[name])
+ except KeyError:
+ raise ValueError(
+ "Named variable '{}' not specified and needed by template "
+ "`{}` at position {}".format(name, match.string, match.start())
+ )
+ elif positional is not None:
+ try:
+ return six.text_type(positional_vars.pop(0))
+ except IndexError:
+ raise ValueError(
+ "Positional variable not specified and needed by template "
+ "`{}` at position {}".format(match.string, match.start())
+ )
+ else:
+ raise ValueError("Unknown template expression {}".format(match.group(0)))
+
+
+def expand(tmpl, *args, **kwargs):
+ """Expand a path template with the given variables.
+
+ ..code-block:: python
+
+ >>> expand('users/*/messages/*', 'me', '123')
+ users/me/messages/123
+ >>> expand('/v1/{name=shelves/*/books/*}', name='shelves/1/books/3')
+ /v1/shelves/1/books/3
+
+ Args:
+ tmpl (str): The path template.
+ args: The positional variables for the path.
+ kwargs: The named variables for the path.
+
+ Returns:
+ str: The expanded path
+
+ Raises:
+ ValueError: If a positional or named variable is required by the
+ template but not specified or if an unexpected template expression
+ is encountered.
+ """
+ replacer = functools.partial(_expand_variable_match, list(args), kwargs)
+ return _VARIABLE_RE.sub(replacer, tmpl)
+
+
+def _replace_variable_with_pattern(match):
+ """Replace a variable match with a pattern that can be used to validate it.
+
+ Args:
+ match (re.Match): A regular expression match
+
+ Returns:
+ str: A regular expression pattern that can be used to validate the
+ variable in an expanded path.
+
+ Raises:
+ ValueError: If an unexpected template expression is encountered.
+ """
+ positional = match.group("positional")
+ name = match.group("name")
+ template = match.group("template")
+ if name is not None:
+ if not template:
+ return _SINGLE_SEGMENT_PATTERN.format(name)
+ elif template == "**":
+ return _MULTI_SEGMENT_PATTERN.format(name)
+ else:
+ return _generate_pattern_for_template(template)
+ elif positional == "*":
+ return _SINGLE_SEGMENT_PATTERN
+ elif positional == "**":
+ return _MULTI_SEGMENT_PATTERN
+ else:
+ raise ValueError("Unknown template expression {}".format(match.group(0)))
+
+
+def _generate_pattern_for_template(tmpl):
+ """Generate a pattern that can validate a path template.
+
+ Args:
+ tmpl (str): The path template
+
+ Returns:
+ str: A regular expression pattern that can be used to validate an
+ expanded path template.
+ """
+ return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl)
+
+
+def validate(tmpl, path):
+ """Validate a path against the path template.
+
+ .. code-block:: python
+
+ >>> validate('users/*/messages/*', 'users/me/messages/123')
+ True
+ >>> validate('users/*/messages/*', 'users/me/drafts/123')
+ False
+ >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/books/3)
+ True
+ >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/tapes/3)
+ False
+
+ Args:
+ tmpl (str): The path template.
+ path (str): The expanded path.
+
+ Returns:
+ bool: True if the path matches.
+ """
+ pattern = _generate_pattern_for_template(tmpl) + "$"
+ return True if re.match(pattern, path) is not None else False
diff --git a/venv/Lib/site-packages/google/api_core/protobuf_helpers.py b/venv/Lib/site-packages/google/api_core/protobuf_helpers.py
new file mode 100644
index 000000000..365ef25c6
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/protobuf_helpers.py
@@ -0,0 +1,370 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`protobuf`."""
+
+import collections
+import copy
+import inspect
+
+from google.protobuf import field_mask_pb2
+from google.protobuf import message
+from google.protobuf import wrappers_pb2
+
+try:
+ from collections import abc as collections_abc
+except ImportError: # Python 2.7
+ import collections as collections_abc
+
+
+_SENTINEL = object()
+_WRAPPER_TYPES = (
+ wrappers_pb2.BoolValue,
+ wrappers_pb2.BytesValue,
+ wrappers_pb2.DoubleValue,
+ wrappers_pb2.FloatValue,
+ wrappers_pb2.Int32Value,
+ wrappers_pb2.Int64Value,
+ wrappers_pb2.StringValue,
+ wrappers_pb2.UInt32Value,
+ wrappers_pb2.UInt64Value,
+)
+
+
+def from_any_pb(pb_type, any_pb):
+ """Converts an ``Any`` protobuf to the specified message type.
+
+ Args:
+ pb_type (type): the type of the message that any_pb stores an instance
+ of.
+ any_pb (google.protobuf.any_pb2.Any): the object to be converted.
+
+ Returns:
+ pb_type: An instance of the pb_type message.
+
+ Raises:
+ TypeError: if the message could not be converted.
+ """
+ msg = pb_type()
+
+ # Unwrap proto-plus wrapped messages.
+ if callable(getattr(pb_type, "pb", None)):
+ msg_pb = pb_type.pb(msg)
+ else:
+ msg_pb = msg
+
+ # Unpack the Any object and populate the protobuf message instance.
+ if not any_pb.Unpack(msg_pb):
+ raise TypeError(
+ "Could not convert {} to {}".format(
+ any_pb.__class__.__name__, pb_type.__name__
+ )
+ )
+
+ # Done; return the message.
+ return msg
+
+
+def check_oneof(**kwargs):
+ """Raise ValueError if more than one keyword argument is not ``None``.
+
+ Args:
+ kwargs (dict): The keyword arguments sent to the function.
+
+ Raises:
+ ValueError: If more than one entry in ``kwargs`` is not ``None``.
+ """
+ # Sanity check: If no keyword arguments were sent, this is fine.
+ if not kwargs:
+ return
+
+ not_nones = [val for val in kwargs.values() if val is not None]
+ if len(not_nones) > 1:
+ raise ValueError(
+ "Only one of {fields} should be set.".format(
+ fields=", ".join(sorted(kwargs.keys()))
+ )
+ )
+
+
+def get_messages(module):
+ """Discovers all protobuf Message classes in a given import module.
+
+ Args:
+ module (module): A Python module; :func:`dir` will be run against this
+ module to find Message subclasses.
+
+ Returns:
+ dict[str, google.protobuf.message.Message]: A dictionary with the
+ Message class names as keys, and the Message subclasses themselves
+ as values.
+ """
+ answer = collections.OrderedDict()
+ for name in dir(module):
+ candidate = getattr(module, name)
+ if inspect.isclass(candidate) and issubclass(candidate, message.Message):
+ answer[name] = candidate
+ return answer
+
+
+def _resolve_subkeys(key, separator="."):
+ """Resolve a potentially nested key.
+
+ If the key contains the ``separator`` (e.g. ``.``) then the key will be
+ split on the first instance of the subkey::
+
+ >>> _resolve_subkeys('a.b.c')
+ ('a', 'b.c')
+ >>> _resolve_subkeys('d|e|f', separator='|')
+ ('d', 'e|f')
+
+ If not, the subkey will be :data:`None`::
+
+ >>> _resolve_subkeys('foo')
+ ('foo', None)
+
+ Args:
+ key (str): A string that may or may not contain the separator.
+ separator (str): The namespace separator. Defaults to `.`.
+
+ Returns:
+ Tuple[str, str]: The key and subkey(s).
+ """
+ parts = key.split(separator, 1)
+
+ if len(parts) > 1:
+ return parts
+ else:
+ return parts[0], None
+
+
+def get(msg_or_dict, key, default=_SENTINEL):
+ """Retrieve a key's value from a protobuf Message or dictionary.
+
+ Args:
+ mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key to retrieve from the object.
+ default (Any): If the key is not present on the object, and a default
+ is set, returns that default instead. A type-appropriate falsy
+ default is generally recommended, as protobuf messages almost
+ always have default values for unset values and it is not always
+ possible to tell the difference between a falsy value and an
+ unset one. If no default is set then :class:`KeyError` will be
+ raised if the key is not present in the object.
+
+ Returns:
+ Any: The return value from the underlying Message or dict.
+
+ Raises:
+ KeyError: If the key is not found. Note that, for unset values,
+ messages and dictionaries may not have consistent behavior.
+ TypeError: If ``msg_or_dict`` is not a Message or Mapping.
+ """
+ # We may need to get a nested key. Resolve this.
+ key, subkey = _resolve_subkeys(key)
+
+ # Attempt to get the value from the two types of objects we know about.
+ # If we get something else, complain.
+ if isinstance(msg_or_dict, message.Message):
+ answer = getattr(msg_or_dict, key, default)
+ elif isinstance(msg_or_dict, collections_abc.Mapping):
+ answer = msg_or_dict.get(key, default)
+ else:
+ raise TypeError(
+ "get() expected a dict or protobuf message, got {!r}.".format(
+ type(msg_or_dict)
+ )
+ )
+
+ # If the object we got back is our sentinel, raise KeyError; this is
+ # a "not found" case.
+ if answer is _SENTINEL:
+ raise KeyError(key)
+
+ # If a subkey exists, call this method recursively against the answer.
+ if subkey is not None and answer is not default:
+ return get(answer, subkey, default=default)
+
+ return answer
+
+
+def _set_field_on_message(msg, key, value):
+ """Set helper for protobuf Messages."""
+ # Attempt to set the value on the types of objects we know how to deal
+ # with.
+ if isinstance(value, (collections_abc.MutableSequence, tuple)):
+ # Clear the existing repeated protobuf message of any elements
+ # currently inside it.
+ while getattr(msg, key):
+ getattr(msg, key).pop()
+
+ # Write our new elements to the repeated field.
+ for item in value:
+ if isinstance(item, collections_abc.Mapping):
+ getattr(msg, key).add(**item)
+ else:
+ # protobuf's RepeatedCompositeContainer doesn't support
+ # append.
+ getattr(msg, key).extend([item])
+ elif isinstance(value, collections_abc.Mapping):
+ # Assign the dictionary values to the protobuf message.
+ for item_key, item_value in value.items():
+ set(getattr(msg, key), item_key, item_value)
+ elif isinstance(value, message.Message):
+ getattr(msg, key).CopyFrom(value)
+ else:
+ setattr(msg, key, value)
+
+
+def set(msg_or_dict, key, value):
+ """Set a key's value on a protobuf Message or dictionary.
+
+ Args:
+ msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key to set.
+ value (Any): The value to set.
+
+ Raises:
+ TypeError: If ``msg_or_dict`` is not a Message or dictionary.
+ """
+ # Sanity check: Is our target object valid?
+ if not isinstance(msg_or_dict, (collections_abc.MutableMapping, message.Message)):
+ raise TypeError(
+ "set() expected a dict or protobuf message, got {!r}.".format(
+ type(msg_or_dict)
+ )
+ )
+
+ # We may be setting a nested key. Resolve this.
+ basekey, subkey = _resolve_subkeys(key)
+
+ # If a subkey exists, then get that object and call this method
+ # recursively against it using the subkey.
+ if subkey is not None:
+ if isinstance(msg_or_dict, collections_abc.MutableMapping):
+ msg_or_dict.setdefault(basekey, {})
+ set(get(msg_or_dict, basekey), subkey, value)
+ return
+
+ if isinstance(msg_or_dict, collections_abc.MutableMapping):
+ msg_or_dict[key] = value
+ else:
+ _set_field_on_message(msg_or_dict, key, value)
+
+
+def setdefault(msg_or_dict, key, value):
+ """Set the key on a protobuf Message or dictionary to a given value if the
+ current value is falsy.
+
+ Because protobuf Messages do not distinguish between unset values and
+ falsy ones particularly well (by design), this method treats any falsy
+ value (e.g. 0, empty list) as a target to be overwritten, on both Messages
+ and dictionaries.
+
+ Args:
+ msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key on the object in question.
+ value (Any): The value to set.
+
+ Raises:
+ TypeError: If ``msg_or_dict`` is not a Message or dictionary.
+ """
+ if not get(msg_or_dict, key, default=None):
+ set(msg_or_dict, key, value)
+
+
+def field_mask(original, modified):
+ """Create a field mask by comparing two messages.
+
+ Args:
+ original (~google.protobuf.message.Message): the original message.
+ If set to None, this field will be interpretted as an empty
+ message.
+ modified (~google.protobuf.message.Message): the modified message.
+ If set to None, this field will be interpretted as an empty
+ message.
+
+ Returns:
+ google.protobuf.field_mask_pb2.FieldMask: field mask that contains
+ the list of field names that have different values between the two
+ messages. If the messages are equivalent, then the field mask is empty.
+
+ Raises:
+ ValueError: If the ``original`` or ``modified`` are not the same type.
+ """
+ if original is None and modified is None:
+ return field_mask_pb2.FieldMask()
+
+ if original is None and modified is not None:
+ original = copy.deepcopy(modified)
+ original.Clear()
+
+ if modified is None and original is not None:
+ modified = copy.deepcopy(original)
+ modified.Clear()
+
+ if type(original) != type(modified):
+ raise ValueError(
+ "expected that both original and modified should be of the "
+ 'same type, received "{!r}" and "{!r}".'.format(
+ type(original), type(modified)
+ )
+ )
+
+ return field_mask_pb2.FieldMask(paths=_field_mask_helper(original, modified))
+
+
+def _field_mask_helper(original, modified, current=""):
+ answer = []
+
+ for name in original.DESCRIPTOR.fields_by_name:
+ field_path = _get_path(current, name)
+
+ original_val = getattr(original, name)
+ modified_val = getattr(modified, name)
+
+ if _is_message(original_val) or _is_message(modified_val):
+ if original_val != modified_val:
+ # Wrapper types do not need to include the .value part of the
+ # path.
+ if _is_wrapper(original_val) or _is_wrapper(modified_val):
+ answer.append(field_path)
+ elif not modified_val.ListFields():
+ answer.append(field_path)
+ else:
+ answer.extend(
+ _field_mask_helper(original_val, modified_val, field_path)
+ )
+ else:
+ if original_val != modified_val:
+ answer.append(field_path)
+
+ return answer
+
+
+def _get_path(current, name):
+ if not current:
+ return name
+ return "%s.%s" % (current, name)
+
+
+def _is_message(value):
+ return isinstance(value, message.Message)
+
+
+def _is_wrapper(value):
+ return type(value) in _WRAPPER_TYPES
diff --git a/venv/Lib/site-packages/google/api_core/retry.py b/venv/Lib/site-packages/google/api_core/retry.py
new file mode 100644
index 000000000..ea890628f
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/retry.py
@@ -0,0 +1,364 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying functions with exponential back-off.
+
+The :class:`Retry` decorator can be used to retry functions that raise
+exceptions using exponential backoff. Because a exponential sleep algorithm is
+used, the retry is limited by a `deadline`. The deadline is the maxmimum amount
+of time a method can block. This is used instead of total number of retries
+because it is difficult to ascertain the amount of time a function can block
+when using total number of retries and exponential backoff.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry.Retry()
+ def call_flaky_rpc():
+ return client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
+ def check_if_exists():
+ return client.does_thing_exist()
+
+ is_available = check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry.Retry(deadline=60)
+ result = client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import unicode_literals
+
+import datetime
+import functools
+import logging
+import random
+import time
+
+import six
+
+from google.api_core import datetime_helpers
+from google.api_core import exceptions
+from google.api_core import general_helpers
+
+_LOGGER = logging.getLogger(__name__)
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+
+def if_exception_type(*exception_types):
+ """Creates a predicate to check if the exception is of a given type.
+
+ Args:
+ exception_types (Sequence[:func:`type`]): The exception types to check
+ for.
+
+ Returns:
+ Callable[Exception]: A predicate that returns True if the provided
+ exception is of the given type(s).
+ """
+
+ def if_exception_type_predicate(exception):
+ """Bound predicate for checking an exception type."""
+ return isinstance(exception, exception_types)
+
+ return if_exception_type_predicate
+
+
+# pylint: disable=invalid-name
+# Pylint sees this as a constant, but it is also an alias that should be
+# considered a function.
+if_transient_error = if_exception_type(
+ exceptions.InternalServerError,
+ exceptions.TooManyRequests,
+ exceptions.ServiceUnavailable,
+)
+"""A predicate that checks if an exception is a transient API error.
+
+The following server errors are considered transient:
+
+- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
+ ``INTERNAL(13)`` and its subclasses.
+- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
+- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
+- :class:`google.api_core.exceptions.ResourceExhausted` - gRPC
+ ``RESOURCE_EXHAUSTED(8)``
+"""
+# pylint: enable=invalid-name
+
+
+def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER):
+ """Generates sleep intervals based on the exponential back-off algorithm.
+
+ This implements the `Truncated Exponential Back-off`_ algorithm.
+
+ .. _Truncated Exponential Back-off:
+ https://cloud.google.com/storage/docs/exponential-backoff
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Yields:
+ float: successive sleep intervals.
+ """
+ delay = initial
+ while True:
+ # Introduce jitter by yielding a delay that is uniformly distributed
+ # to average out to the delay time.
+ yield min(random.uniform(0.0, delay * 2.0), maximum)
+ delay = delay * multiplier
+
+
+def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ deadline (float): How long to keep retrying the target. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ on_error (Callable[Exception]): A function to call while processing a
+ retryable exception. Any error raised by this function will *not*
+ be caught.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ google.api_core.RetryError: If the deadline is exceeded while retrying.
+ ValueError: If the sleep generator stops yielding values.
+ Exception: If the target raises a method that isn't retryable.
+ """
+ if deadline is not None:
+ deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+ seconds=deadline
+ )
+ else:
+ deadline_datetime = None
+
+ last_exc = None
+
+ for sleep in sleep_generator:
+ try:
+ return target()
+
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ if not predicate(exc):
+ raise
+ last_exc = exc
+ if on_error is not None:
+ on_error(exc)
+
+ now = datetime_helpers.utcnow()
+
+ if deadline_datetime is not None:
+ if deadline_datetime <= now:
+ six.raise_from(
+ exceptions.RetryError(
+ "Deadline of {:.1f}s exceeded while calling {}".format(
+ deadline, target
+ ),
+ last_exc,
+ ),
+ last_exc,
+ )
+ else:
+ time_to_deadline = (deadline_datetime - now).total_seconds()
+ sleep = min(time_to_deadline, sleep)
+
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
+ )
+ time.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+@six.python_2_unicode_compatible
+class Retry(object):
+ """Exponential retry decorator.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum a,out of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amout of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ deadline (float): How long to keep retrying in seconds. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ """
+
+ def __init__(
+ self,
+ predicate=if_transient_error,
+ initial=_DEFAULT_INITIAL_DELAY,
+ maximum=_DEFAULT_MAXIMUM_DELAY,
+ multiplier=_DEFAULT_DELAY_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ on_error=None,
+ ):
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._deadline = deadline
+ self._on_error = on_error
+
+ def __call__(self, func, on_error=None):
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @general_helpers.wraps(func)
+ def retry_wrapped_func(*args, **kwargs):
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ self._deadline,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
+
+ @property
+ def deadline(self):
+ return self._deadline
+
+ def with_deadline(self, deadline):
+ """Return a copy of this retry with the given deadline.
+
+ Args:
+ deadline (float): How long to keep retrying.
+
+ Returns:
+ Retry: A new retry instance with the given deadline.
+ """
+ return Retry(
+ predicate=self._predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=deadline,
+ on_error=self._on_error,
+ )
+
+ def with_predicate(self, predicate):
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return Retry(
+ predicate=predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=self._deadline,
+ on_error=self._on_error,
+ )
+
+ def with_delay(self, initial=None, maximum=None, multiplier=None):
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return Retry(
+ predicate=self._predicate,
+ initial=initial if initial is not None else self._initial,
+ maximum=maximum if maximum is not None else self._maximum,
+ multiplier=multiplier if maximum is not None else self._multiplier,
+ deadline=self._deadline,
+ on_error=self._on_error,
+ )
+
+ def __str__(self):
+ return (
+ "".format(
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._deadline,
+ self._on_error,
+ )
+ )
diff --git a/venv/Lib/site-packages/google/api_core/retry_async.py b/venv/Lib/site-packages/google/api_core/retry_async.py
new file mode 100644
index 000000000..f925c3d31
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/retry_async.py
@@ -0,0 +1,282 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying coroutine functions with exponential back-off.
+
+The :class:`AsyncRetry` decorator shares most functionality and behavior with
+:class:`Retry`, but supports coroutine functions. Please refer to description
+of :class:`Retry` for more details.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry()
+ async def call_flaky_rpc():
+ return await client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = await call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
+ async def check_if_exists():
+ return await client.does_thing_exist()
+
+ is_available = await check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry_async.AsyncRetry(deadline=60)
+ result = await client.some_method(retry=my_retry)
+
+"""
+
+import asyncio
+import datetime
+import functools
+import logging
+
+from google.api_core import datetime_helpers, exceptions
+from google.api_core.retry import (exponential_sleep_generator, # noqa: F401
+ if_exception_type, if_transient_error)
+
+_LOGGER = logging.getLogger(__name__)
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+
+async def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ deadline (float): How long to keep retrying the target. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ on_error (Callable[Exception]): A function to call while processing a
+ retryable exception. Any error raised by this function will *not*
+ be caught.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ google.api_core.RetryError: If the deadline is exceeded while retrying.
+ ValueError: If the sleep generator stops yielding values.
+ Exception: If the target raises a method that isn't retryable.
+ """
+ deadline_dt = (datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline)) if deadline else None
+
+ last_exc = None
+
+ for sleep in sleep_generator:
+ try:
+ if not deadline_dt:
+ return await target()
+ else:
+ return await asyncio.wait_for(
+ target(),
+ timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds()
+ )
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError):
+ raise
+ last_exc = exc
+ if on_error is not None:
+ on_error(exc)
+
+ now = datetime_helpers.utcnow()
+
+ if deadline_dt:
+ if deadline_dt <= now:
+ # Chains the raising RetryError with the root cause error,
+ # which helps observability and debugability.
+ raise exceptions.RetryError(
+ "Deadline of {:.1f}s exceeded while calling {}".format(
+ deadline, target
+ ),
+ last_exc,
+ ) from last_exc
+ else:
+ time_to_deadline = (deadline_dt - now).total_seconds()
+ sleep = min(time_to_deadline, sleep)
+
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
+ )
+ await asyncio.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class AsyncRetry:
+ """Exponential retry decorator for async functions.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum a,out of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amout of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ deadline (float): How long to keep retrying in seconds. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ """
+
+ def __init__(
+ self,
+ predicate=if_transient_error,
+ initial=_DEFAULT_INITIAL_DELAY,
+ maximum=_DEFAULT_MAXIMUM_DELAY,
+ multiplier=_DEFAULT_DELAY_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ on_error=None,
+ ):
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._deadline = deadline
+ self._on_error = on_error
+
+ def __call__(self, func, on_error=None):
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(*args, **kwargs):
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return await retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ self._deadline,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
+
+ def _replace(self,
+ predicate=None,
+ initial=None,
+ maximum=None,
+ multiplier=None,
+ deadline=None,
+ on_error=None):
+ return AsyncRetry(
+ predicate=predicate or self._predicate,
+ initial=initial or self._initial,
+ maximum=maximum or self._maximum,
+ multiplier=multiplier or self._multiplier,
+ deadline=deadline or self._deadline,
+ on_error=on_error or self._on_error,
+ )
+
+ def with_deadline(self, deadline):
+ """Return a copy of this retry with the given deadline.
+
+ Args:
+ deadline (float): How long to keep retrying.
+
+ Returns:
+ AsyncRetry: A new retry instance with the given deadline.
+ """
+ return self._replace(deadline=deadline)
+
+ def with_predicate(self, predicate):
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ AsyncRetry: A new retry instance with the given predicate.
+ """
+ return self._replace(predicate=predicate)
+
+ def with_delay(self, initial=None, maximum=None, multiplier=None):
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amout of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amout of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Returns:
+ AsyncRetry: A new retry instance with the given predicate.
+ """
+ return self._replace(initial=initial, maximum=maximum, multiplier=multiplier)
+
+ def __str__(self):
+ return (
+ "".format(
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._deadline,
+ self._on_error,
+ )
+ )
diff --git a/venv/Lib/site-packages/google/api_core/timeout.py b/venv/Lib/site-packages/google/api_core/timeout.py
new file mode 100644
index 000000000..17c1beabb
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/timeout.py
@@ -0,0 +1,224 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Decorators for applying timeout arguments to functions.
+
+These decorators are used to wrap API methods to apply either a constant
+or exponential timeout argument.
+
+For example, imagine an API method that can take a while to return results,
+such as one that might block until a resource is ready:
+
+.. code-block:: python
+
+ def is_thing_ready(timeout=None):
+ response = requests.get('https://example.com/is_thing_ready')
+ response.raise_for_status()
+ return response.json()
+
+This module allows a function like this to be wrapped so that timeouts are
+automatically determined, for example:
+
+.. code-block:: python
+
+ timeout_ = timeout.ExponentialTimeout()
+ is_thing_ready_with_timeout = timeout_(is_thing_ready)
+
+ for n in range(10):
+ try:
+ is_thing_ready_with_timeout({'example': 'data'})
+ except:
+ pass
+
+In this example the first call to ``is_thing_ready`` will have a relatively
+small timeout (like 1 second). If the resource is available and the request
+completes quickly, the loop exits. But, if the resource isn't yet available
+and the request times out, it'll be retried - this time with a larger timeout.
+
+In the broader context these decorators are typically combined with
+:mod:`google.api_core.retry` to implement API methods with a signature that
+matches ``api_method(request, timeout=None, retry=None)``.
+"""
+
+from __future__ import unicode_literals
+
+import datetime
+
+import six
+
+from google.api_core import datetime_helpers
+from google.api_core import general_helpers
+
+_DEFAULT_INITIAL_TIMEOUT = 5.0 # seconds
+_DEFAULT_MAXIMUM_TIMEOUT = 30.0 # seconds
+_DEFAULT_TIMEOUT_MULTIPLIER = 2.0
+# If specified, must be in seconds. If none, deadline is not used in the
+# timeout calculation.
+_DEFAULT_DEADLINE = None
+
+
+@six.python_2_unicode_compatible
+class ConstantTimeout(object):
+ """A decorator that adds a constant timeout argument.
+
+ This is effectively equivalent to
+ ``functools.partial(func, timeout=timeout)``.
+
+ Args:
+ timeout (Optional[float]): the timeout (in seconds) to applied to the
+ wrapped function. If `None`, the target function is expected to
+ never timeout.
+ """
+
+ def __init__(self, timeout=None):
+ self._timeout = timeout
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+
+ @general_helpers.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+ kwargs["timeout"] = self._timeout
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return "".format(self._timeout)
+
+
+def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
+ """A generator that yields exponential timeout values.
+
+ Args:
+ initial (float): The initial timeout.
+ maximum (float): The maximum timeout.
+ multiplier (float): The multiplier applied to the timeout.
+ deadline (float): The overall deadline across all invocations.
+
+ Yields:
+ float: A timeout value.
+ """
+ if deadline is not None:
+ deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+ seconds=deadline
+ )
+ else:
+ deadline_datetime = datetime.datetime.max
+
+ timeout = initial
+ while True:
+ now = datetime_helpers.utcnow()
+ yield min(
+ # The calculated timeout based on invocations.
+ timeout,
+ # The set maximum timeout.
+ maximum,
+ # The remaining time before the deadline is reached.
+ float((deadline_datetime - now).seconds),
+ )
+ timeout = timeout * multiplier
+
+
+@six.python_2_unicode_compatible
+class ExponentialTimeout(object):
+ """A decorator that adds an exponentially increasing timeout argument.
+
+ This is useful if a function is called multiple times. Each time the
+ function is called this decorator will calculate a new timeout parameter
+ based on the the number of times the function has been called.
+
+ For example
+
+ .. code-block:: python
+
+ Args:
+ initial (float): The initial timeout to pass.
+ maximum (float): The maximum timeout for any one call.
+ multiplier (float): The multiplier applied to the timeout for each
+ invocation.
+ deadline (Optional[float]): The overall deadline across all
+ invocations. This is used to prevent a very large calculated
+ timeout from pushing the overall execution time over the deadline.
+ This is especially useful in conjuction with
+ :mod:`google.api_core.retry`. If ``None``, the timeouts will not
+ be adjusted to accomodate an overall deadline.
+ """
+
+ def __init__(
+ self,
+ initial=_DEFAULT_INITIAL_TIMEOUT,
+ maximum=_DEFAULT_MAXIMUM_TIMEOUT,
+ multiplier=_DEFAULT_TIMEOUT_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ ):
+ self._initial = initial
+ self._maximum = maximum
+ self._multiplier = multiplier
+ self._deadline = deadline
+
+ def with_deadline(self, deadline):
+ """Return a copy of this teimout with the given deadline.
+
+ Args:
+ deadline (float): The overall deadline across all invocations.
+
+ Returns:
+ ExponentialTimeout: A new instance with the given deadline.
+ """
+ return ExponentialTimeout(
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=deadline,
+ )
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+ timeouts = _exponential_timeout_generator(
+ self._initial, self._maximum, self._multiplier, self._deadline
+ )
+
+ @general_helpers.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+ kwargs["timeout"] = next(timeouts)
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return (
+ "".format(
+ self._initial, self._maximum, self._multiplier, self._deadline
+ )
+ )
diff --git a/venv/Lib/site-packages/google/api_core/version.py b/venv/Lib/site-packages/google/api_core/version.py
new file mode 100644
index 000000000..bce5ed4f6
--- /dev/null
+++ b/venv/Lib/site-packages/google/api_core/version.py
@@ -0,0 +1,15 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "1.22.4"
diff --git a/venv/Lib/site-packages/google/auth/__init__.py b/venv/Lib/site-packages/google/auth/__init__.py
new file mode 100644
index 000000000..22d61c66f
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Auth Library for Python."""
+
+import logging
+
+from google.auth._default import default, load_credentials_from_file
+
+__all__ = ["default", "load_credentials_from_file"]
+
+# Set default logging handler to avoid "No handler found" warnings.
+logging.getLogger(__name__).addHandler(logging.NullHandler())
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..20d077b93
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_cloud_sdk.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_cloud_sdk.cpython-36.pyc
new file mode 100644
index 000000000..75a8766c9
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_cloud_sdk.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_credentials_async.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_credentials_async.cpython-36.pyc
new file mode 100644
index 000000000..4165abb9e
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_credentials_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_default.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_default.cpython-36.pyc
new file mode 100644
index 000000000..60ff8ec02
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_default.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_default_async.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_default_async.cpython-36.pyc
new file mode 100644
index 000000000..34fb7a456
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_default_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..d1c786eef
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_jwt_async.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_jwt_async.cpython-36.pyc
new file mode 100644
index 000000000..437dab084
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_jwt_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_oauth2client.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_oauth2client.cpython-36.pyc
new file mode 100644
index 000000000..37ae0fdfa
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_oauth2client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/_service_account_info.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/_service_account_info.cpython-36.pyc
new file mode 100644
index 000000000..8b9a61586
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/_service_account_info.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/app_engine.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/app_engine.cpython-36.pyc
new file mode 100644
index 000000000..3c9dfa7b7
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/app_engine.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/credentials.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/credentials.cpython-36.pyc
new file mode 100644
index 000000000..2b9b5d2e6
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/credentials.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/environment_vars.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/environment_vars.cpython-36.pyc
new file mode 100644
index 000000000..c1eea8064
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/environment_vars.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/exceptions.cpython-36.pyc
new file mode 100644
index 000000000..29368791f
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/exceptions.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/iam.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/iam.cpython-36.pyc
new file mode 100644
index 000000000..4d0ea2a15
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/iam.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/impersonated_credentials.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/impersonated_credentials.cpython-36.pyc
new file mode 100644
index 000000000..8fe0e5930
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/impersonated_credentials.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/__pycache__/jwt.cpython-36.pyc b/venv/Lib/site-packages/google/auth/__pycache__/jwt.cpython-36.pyc
new file mode 100644
index 000000000..ed226f6f6
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/__pycache__/jwt.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/_cloud_sdk.py b/venv/Lib/site-packages/google/auth/_cloud_sdk.py
new file mode 100644
index 000000000..e772fe964
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_cloud_sdk.py
@@ -0,0 +1,152 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for reading the Google Cloud SDK's configuration."""
+
+import json
+import os
+import subprocess
+
+import six
+
+from google.auth import environment_vars
+from google.auth import exceptions
+
+
+# The ~/.config subdirectory containing gcloud credentials.
+_CONFIG_DIRECTORY = "gcloud"
+# Windows systems store config at %APPDATA%\gcloud
+_WINDOWS_CONFIG_ROOT_ENV_VAR = "APPDATA"
+# The name of the file in the Cloud SDK config that contains default
+# credentials.
+_CREDENTIALS_FILENAME = "application_default_credentials.json"
+# The name of the Cloud SDK shell script
+_CLOUD_SDK_POSIX_COMMAND = "gcloud"
+_CLOUD_SDK_WINDOWS_COMMAND = "gcloud.cmd"
+# The command to get the Cloud SDK configuration
+_CLOUD_SDK_CONFIG_COMMAND = ("config", "config-helper", "--format", "json")
+# The command to get google user access token
+_CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND = ("auth", "print-access-token")
+# Cloud SDK's application-default client ID
+CLOUD_SDK_CLIENT_ID = (
+ "764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com"
+)
+
+
+def get_config_path():
+ """Returns the absolute path the the Cloud SDK's configuration directory.
+
+ Returns:
+ str: The Cloud SDK config path.
+ """
+ # If the path is explicitly set, return that.
+ try:
+ return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR]
+ except KeyError:
+ pass
+
+ # Non-windows systems store this at ~/.config/gcloud
+ if os.name != "nt":
+ return os.path.join(os.path.expanduser("~"), ".config", _CONFIG_DIRECTORY)
+ # Windows systems store config at %APPDATA%\gcloud
+ else:
+ try:
+ return os.path.join(
+ os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY
+ )
+ except KeyError:
+ # This should never happen unless someone is really
+ # messing with things, but we'll cover the case anyway.
+ drive = os.environ.get("SystemDrive", "C:")
+ return os.path.join(drive, "\\", _CONFIG_DIRECTORY)
+
+
+def get_application_default_credentials_path():
+ """Gets the path to the application default credentials file.
+
+ The path may or may not exist.
+
+ Returns:
+ str: The full path to application default credentials.
+ """
+ config_path = get_config_path()
+ return os.path.join(config_path, _CREDENTIALS_FILENAME)
+
+
+def get_project_id():
+ """Gets the project ID from the Cloud SDK.
+
+ Returns:
+ Optional[str]: The project ID.
+ """
+ if os.name == "nt":
+ command = _CLOUD_SDK_WINDOWS_COMMAND
+ else:
+ command = _CLOUD_SDK_POSIX_COMMAND
+
+ try:
+ output = subprocess.check_output(
+ (command,) + _CLOUD_SDK_CONFIG_COMMAND, stderr=subprocess.STDOUT
+ )
+ except (subprocess.CalledProcessError, OSError, IOError):
+ return None
+
+ try:
+ configuration = json.loads(output.decode("utf-8"))
+ except ValueError:
+ return None
+
+ try:
+ return configuration["configuration"]["properties"]["core"]["project"]
+ except KeyError:
+ return None
+
+
+def get_auth_access_token(account=None):
+ """Load user access token with the ``gcloud auth print-access-token`` command.
+
+ Args:
+ account (Optional[str]): Account to get the access token for. If not
+ specified, the current active account will be used.
+
+ Returns:
+ str: The user access token.
+
+ Raises:
+ google.auth.exceptions.UserAccessTokenError: if failed to get access
+ token from gcloud.
+ """
+ if os.name == "nt":
+ command = _CLOUD_SDK_WINDOWS_COMMAND
+ else:
+ command = _CLOUD_SDK_POSIX_COMMAND
+
+ try:
+ if account:
+ command = (
+ (command,)
+ + _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
+ + ("--account=" + account,)
+ )
+ else:
+ command = (command,) + _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
+
+ access_token = subprocess.check_output(command, stderr=subprocess.STDOUT)
+ # remove the trailing "\n"
+ return access_token.decode("utf-8").strip()
+ except (subprocess.CalledProcessError, OSError, IOError) as caught_exc:
+ new_exc = exceptions.UserAccessTokenError(
+ "Failed to obtain access token", caught_exc
+ )
+ six.raise_from(new_exc, caught_exc)
diff --git a/venv/Lib/site-packages/google/auth/_credentials_async.py b/venv/Lib/site-packages/google/auth/_credentials_async.py
new file mode 100644
index 000000000..d4d4e2c0e
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_credentials_async.py
@@ -0,0 +1,176 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Interfaces for credentials."""
+
+import abc
+import inspect
+
+import six
+
+from google.auth import credentials
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Credentials(credentials.Credentials):
+ """Async inherited credentials class from google.auth.credentials.
+ The added functionality is the before_request call which requires
+ async/await syntax.
+ All credentials have a :attr:`token` that is used for authentication and
+ may also optionally set an :attr:`expiry` to indicate when the token will
+ no longer be valid.
+
+ Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
+ Credentials can do this automatically before the first HTTP request in
+ :meth:`before_request`.
+
+ Although the token and expiration will change as the credentials are
+ :meth:`refreshed ` and used, credentials should be considered
+ immutable. Various credentials will accept configuration such as private
+ keys, scopes, and other options. These options are not changeable after
+ construction. Some classes will provide mechanisms to copy the credentials
+ with modifications such as :meth:`ScopedCredentials.with_scopes`.
+ """
+
+ async def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+
+ Refreshes the credentials if necessary, then calls :meth:`apply` to
+ apply the token to the authentication header.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ method (str): The request's HTTP method or the RPC method being
+ invoked.
+ url (str): The request's URI or the RPC service's URI.
+ headers (Mapping): The request's headers.
+ """
+ # pylint: disable=unused-argument
+ # (Subclasses may use these arguments to ascertain information about
+ # the http request.)
+
+ if not self.valid:
+ if inspect.iscoroutinefunction(self.refresh):
+ await self.refresh(request)
+ else:
+ self.refresh(request)
+ self.apply(headers)
+
+
+class CredentialsWithQuotaProject(credentials.CredentialsWithQuotaProject):
+ """Abstract base for credentials supporting ``with_quota_project`` factory"""
+
+
+class AnonymousCredentials(credentials.AnonymousCredentials, Credentials):
+ """Credentials that do not provide any authentication information.
+
+ These are useful in the case of services that support anonymous access or
+ local service emulators that do not use credentials. This class inherits
+ from the sync anonymous credentials file, but is kept if async credentials
+ is initialized and we would like anonymous credentials.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ReadOnlyScoped(credentials.ReadOnlyScoped):
+ """Interface for credentials whose scopes can be queried.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = _credentials_async.with_scopes(scopes=['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = _credentials_async.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+
+class Scoped(credentials.Scoped):
+ """Interface for credentials whose scopes can be replaced while copying.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = _credentials_async.create_scoped(['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+
+def with_scopes_if_required(credentials, scopes):
+ """Creates a copy of the credentials with scopes if scoping is required.
+
+ This helper function is useful when you do not know (or care to know) the
+ specific type of credentials you are using (such as when you use
+ :func:`google.auth.default`). This function will call
+ :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
+ the credentials require scoping. Otherwise, it will return the credentials
+ as-is.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ scope if necessary.
+ scopes (Sequence[str]): The list of scopes to use.
+
+ Returns:
+ google.auth._credentials_async.Credentials: Either a new set of scoped
+ credentials, or the passed in credentials instance if no scoping
+ was required.
+ """
+ if isinstance(credentials, Scoped) and credentials.requires_scopes:
+ return credentials.with_scopes(scopes)
+ else:
+ return credentials
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Signing(credentials.Signing):
+ """Interface for credentials that can cryptographically sign messages."""
diff --git a/venv/Lib/site-packages/google/auth/_default.py b/venv/Lib/site-packages/google/auth/_default.py
new file mode 100644
index 000000000..de81c5b2c
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_default.py
@@ -0,0 +1,354 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Application default credentials.
+
+Implements application default credentials and project ID detection.
+"""
+
+import io
+import json
+import logging
+import os
+import warnings
+
+import six
+
+from google.auth import environment_vars
+from google.auth import exceptions
+import google.auth.transport._http_client
+
+_LOGGER = logging.getLogger(__name__)
+
+# Valid types accepted for file-based credentials.
+_AUTHORIZED_USER_TYPE = "authorized_user"
+_SERVICE_ACCOUNT_TYPE = "service_account"
+_VALID_TYPES = (_AUTHORIZED_USER_TYPE, _SERVICE_ACCOUNT_TYPE)
+
+# Help message when no credentials can be found.
+_HELP_MESSAGE = """\
+Could not automatically determine credentials. Please set {env} or \
+explicitly create credentials and re-run the application. For more \
+information, please see \
+https://cloud.google.com/docs/authentication/getting-started
+""".format(
+ env=environment_vars.CREDENTIALS
+).strip()
+
+# Warning when using Cloud SDK user credentials
+_CLOUD_SDK_CREDENTIALS_WARNING = """\
+Your application has authenticated using end user credentials from Google \
+Cloud SDK without a quota project. You might receive a "quota exceeded" \
+or "API not enabled" error. We recommend you rerun \
+`gcloud auth application-default login` and make sure a quota project is \
+added. Or you can use service accounts instead. For more information \
+about service accounts, see https://cloud.google.com/docs/authentication/"""
+
+
+def _warn_about_problematic_credentials(credentials):
+ """Determines if the credentials are problematic.
+
+ Credentials from the Cloud SDK that are associated with Cloud SDK's project
+ are problematic because they may not have APIs enabled and have limited
+ quota. If this is the case, warn about it.
+ """
+ from google.auth import _cloud_sdk
+
+ if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID:
+ warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
+
+
+def load_credentials_from_file(filename, scopes=None, quota_project_id=None):
+ """Loads Google credentials from a file.
+
+ The credentials file must be a service account key or stored authorized
+ user credentials.
+
+ Args:
+ filename (str): The full path to the credentials file.
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+
+ Returns:
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
+ credentials and the project ID. Authorized user credentials do not
+ have the project ID information.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
+ wrong format or is missing.
+ """
+ if not os.path.exists(filename):
+ raise exceptions.DefaultCredentialsError(
+ "File {} was not found.".format(filename)
+ )
+
+ with io.open(filename, "r") as file_obj:
+ try:
+ info = json.load(file_obj)
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "File {} is not a valid json file.".format(filename), caught_exc
+ )
+ six.raise_from(new_exc, caught_exc)
+
+ # The type key should indicate that the file is either a service account
+ # credentials file or an authorized user credentials file.
+ credential_type = info.get("type")
+
+ if credential_type == _AUTHORIZED_USER_TYPE:
+ from google.oauth2 import credentials
+
+ try:
+ credentials = credentials.Credentials.from_authorized_user_info(
+ info, scopes=scopes
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load authorized user credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ six.raise_from(new_exc, caught_exc)
+ if quota_project_id:
+ credentials = credentials.with_quota_project(quota_project_id)
+ if not credentials.quota_project_id:
+ _warn_about_problematic_credentials(credentials)
+ return credentials, None
+
+ elif credential_type == _SERVICE_ACCOUNT_TYPE:
+ from google.oauth2 import service_account
+
+ try:
+ credentials = service_account.Credentials.from_service_account_info(
+ info, scopes=scopes
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load service account credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ six.raise_from(new_exc, caught_exc)
+ if quota_project_id:
+ credentials = credentials.with_quota_project(quota_project_id)
+ return credentials, info.get("project_id")
+
+ else:
+ raise exceptions.DefaultCredentialsError(
+ "The file {file} does not have a valid type. "
+ "Type is {type}, expected one of {valid_types}.".format(
+ file=filename, type=credential_type, valid_types=_VALID_TYPES
+ )
+ )
+
+
+def _get_gcloud_sdk_credentials():
+ """Gets the credentials and project ID from the Cloud SDK."""
+ from google.auth import _cloud_sdk
+
+ _LOGGER.debug("Checking Cloud SDK credentials as part of auth process...")
+
+ # Check if application default credentials exist.
+ credentials_filename = _cloud_sdk.get_application_default_credentials_path()
+
+ if not os.path.isfile(credentials_filename):
+ _LOGGER.debug("Cloud SDK credentials not found on disk; not using them")
+ return None, None
+
+ credentials, project_id = load_credentials_from_file(credentials_filename)
+
+ if not project_id:
+ project_id = _cloud_sdk.get_project_id()
+
+ return credentials, project_id
+
+
+def _get_explicit_environ_credentials():
+ """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ variable."""
+ explicit_file = os.environ.get(environment_vars.CREDENTIALS)
+
+ _LOGGER.debug(
+ "Checking %s for explicit credentials as part of auth process...", explicit_file
+ )
+
+ if explicit_file is not None:
+ credentials, project_id = load_credentials_from_file(
+ os.environ[environment_vars.CREDENTIALS]
+ )
+
+ return credentials, project_id
+
+ else:
+ return None, None
+
+
+def _get_gae_credentials():
+ """Gets Google App Engine App Identity credentials and project ID."""
+ # While this library is normally bundled with app_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+ try:
+ _LOGGER.debug("Checking for App Engine runtime as part of auth process...")
+ import google.auth.app_engine as app_engine
+ except ImportError:
+ _LOGGER.warning("Import of App Engine auth library failed.")
+ return None, None
+
+ try:
+ credentials = app_engine.Credentials()
+ project_id = app_engine.get_project_id()
+ return credentials, project_id
+ except EnvironmentError:
+ _LOGGER.debug(
+ "No App Engine library was found so cannot authentication via App Engine Identity Credentials."
+ )
+ return None, None
+
+
+def _get_gce_credentials(request=None):
+ """Gets credentials and project ID from the GCE Metadata Service."""
+ # Ping requires a transport, but we want application default credentials
+ # to require no arguments. So, we'll use the _http_client transport which
+ # uses http.client. This is only acceptable because the metadata server
+ # doesn't do SSL and never requires proxies.
+
+ # While this library is normally bundled with compute_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+ try:
+ from google.auth import compute_engine
+ from google.auth.compute_engine import _metadata
+ except ImportError:
+ _LOGGER.warning("Import of Compute Engine auth library failed.")
+ return None, None
+
+ if request is None:
+ request = google.auth.transport._http_client.Request()
+
+ if _metadata.ping(request=request):
+ # Get the project ID.
+ try:
+ project_id = _metadata.get_project_id(request=request)
+ except exceptions.TransportError:
+ project_id = None
+
+ return compute_engine.Credentials(), project_id
+ else:
+ _LOGGER.warning(
+ "Authentication failed using Compute Engine authentication due to unavailable metadata server."
+ )
+ return None, None
+
+
+def default(scopes=None, request=None, quota_project_id=None):
+ """Gets the default credentials for the current environment.
+
+ `Application Default Credentials`_ provides an easy way to obtain
+ credentials to call Google APIs for server-to-server or local applications.
+ This function acquires credentials from the environment in the following
+ order:
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON private key file, then it is
+ loaded and returned. The project ID returned is the project ID defined
+ in the service account file if available (some older files do not
+ contain project ID information).
+ 2. If the `Google Cloud SDK`_ is installed and has application default
+ credentials set they are loaded and returned.
+
+ To enable application default credentials with the Cloud SDK run::
+
+ gcloud auth application-default login
+
+ If the Cloud SDK has an active project, the project ID is returned. The
+ active project can be set using::
+
+ gcloud config set project
+
+ 3. If the application is running in the `App Engine standard environment`_
+ then the credentials and project ID from the `App Identity Service`_
+ are used.
+ 4. If the application is running in `Compute Engine`_ or the
+ `App Engine flexible environment`_ then the credentials and project ID
+ are obtained from the `Metadata Service`_.
+ 5. If no credentials are found,
+ :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
+
+ .. _Application Default Credentials: https://developers.google.com\
+ /identity/protocols/application-default-credentials
+ .. _Google Cloud SDK: https://cloud.google.com/sdk
+ .. _App Engine standard environment: https://cloud.google.com/appengine
+ .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
+ /appidentity/
+ .. _Compute Engine: https://cloud.google.com/compute
+ .. _App Engine flexible environment: https://cloud.google.com\
+ /appengine/flexible
+ .. _Metadata Service: https://cloud.google.com/compute/docs\
+ /storing-retrieving-metadata
+
+ Example::
+
+ import google.auth
+
+ credentials, project_id = google.auth.default()
+
+ Args:
+ scopes (Sequence[str]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary.
+ request (google.auth.transport.Request): An object used to make
+ HTTP requests. This is used to detect whether the application
+ is running on Compute Engine. If not specified, then it will
+ use the standard library http client to make requests.
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ Returns:
+ Tuple[~google.auth.credentials.Credentials, Optional[str]]:
+ the current environment's credentials and project ID. Project ID
+ may be None, which indicates that the Project ID could not be
+ ascertained from the environment.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If no credentials were found, or if the credentials found were
+ invalid.
+ """
+ from google.auth.credentials import with_scopes_if_required
+
+ explicit_project_id = os.environ.get(
+ environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
+ )
+
+ checkers = (
+ _get_explicit_environ_credentials,
+ _get_gcloud_sdk_credentials,
+ _get_gae_credentials,
+ lambda: _get_gce_credentials(request),
+ )
+
+ for checker in checkers:
+ credentials, project_id = checker()
+ if credentials is not None:
+ credentials = with_scopes_if_required(credentials, scopes)
+ if quota_project_id:
+ credentials = credentials.with_quota_project(quota_project_id)
+
+ effective_project_id = explicit_project_id or project_id
+ if not effective_project_id:
+ _LOGGER.warning(
+ "No project ID could be determined. Consider running "
+ "`gcloud config set project` or setting the %s "
+ "environment variable",
+ environment_vars.PROJECT,
+ )
+ return credentials, effective_project_id
+
+ raise exceptions.DefaultCredentialsError(_HELP_MESSAGE)
diff --git a/venv/Lib/site-packages/google/auth/_default_async.py b/venv/Lib/site-packages/google/auth/_default_async.py
new file mode 100644
index 000000000..3347fbfdc
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_default_async.py
@@ -0,0 +1,266 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Application default credentials.
+
+Implements application default credentials and project ID detection.
+"""
+
+import io
+import json
+import os
+
+import six
+
+from google.auth import _default
+from google.auth import environment_vars
+from google.auth import exceptions
+
+
+def load_credentials_from_file(filename, scopes=None, quota_project_id=None):
+ """Loads Google credentials from a file.
+
+ The credentials file must be a service account key or stored authorized
+ user credentials.
+
+ Args:
+ filename (str): The full path to the credentials file.
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+
+ Returns:
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
+ credentials and the project ID. Authorized user credentials do not
+ have the project ID information.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
+ wrong format or is missing.
+ """
+ if not os.path.exists(filename):
+ raise exceptions.DefaultCredentialsError(
+ "File {} was not found.".format(filename)
+ )
+
+ with io.open(filename, "r") as file_obj:
+ try:
+ info = json.load(file_obj)
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "File {} is not a valid json file.".format(filename), caught_exc
+ )
+ six.raise_from(new_exc, caught_exc)
+
+ # The type key should indicate that the file is either a service account
+ # credentials file or an authorized user credentials file.
+ credential_type = info.get("type")
+
+ if credential_type == _default._AUTHORIZED_USER_TYPE:
+ from google.oauth2 import _credentials_async as credentials
+
+ try:
+ credentials = credentials.Credentials.from_authorized_user_info(
+ info, scopes=scopes
+ ).with_quota_project(quota_project_id)
+ except ValueError as caught_exc:
+ msg = "Failed to load authorized user credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ six.raise_from(new_exc, caught_exc)
+ if not credentials.quota_project_id:
+ _default._warn_about_problematic_credentials(credentials)
+ return credentials, None
+
+ elif credential_type == _default._SERVICE_ACCOUNT_TYPE:
+ from google.oauth2 import _service_account_async as service_account
+
+ try:
+ credentials = service_account.Credentials.from_service_account_info(
+ info, scopes=scopes
+ ).with_quota_project(quota_project_id)
+ except ValueError as caught_exc:
+ msg = "Failed to load service account credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ six.raise_from(new_exc, caught_exc)
+ return credentials, info.get("project_id")
+
+ else:
+ raise exceptions.DefaultCredentialsError(
+ "The file {file} does not have a valid type. "
+ "Type is {type}, expected one of {valid_types}.".format(
+ file=filename, type=credential_type, valid_types=_default._VALID_TYPES
+ )
+ )
+
+
+def _get_gcloud_sdk_credentials():
+ """Gets the credentials and project ID from the Cloud SDK."""
+ from google.auth import _cloud_sdk
+
+ # Check if application default credentials exist.
+ credentials_filename = _cloud_sdk.get_application_default_credentials_path()
+
+ if not os.path.isfile(credentials_filename):
+ return None, None
+
+ credentials, project_id = load_credentials_from_file(credentials_filename)
+
+ if not project_id:
+ project_id = _cloud_sdk.get_project_id()
+
+ return credentials, project_id
+
+
+def _get_explicit_environ_credentials():
+ """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ variable."""
+ explicit_file = os.environ.get(environment_vars.CREDENTIALS)
+
+ if explicit_file is not None:
+ credentials, project_id = load_credentials_from_file(
+ os.environ[environment_vars.CREDENTIALS]
+ )
+
+ return credentials, project_id
+
+ else:
+ return None, None
+
+
+def _get_gae_credentials():
+ """Gets Google App Engine App Identity credentials and project ID."""
+ # While this library is normally bundled with app_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+
+ return _default._get_gae_credentials()
+
+
+def _get_gce_credentials(request=None):
+ """Gets credentials and project ID from the GCE Metadata Service."""
+ # Ping requires a transport, but we want application default credentials
+ # to require no arguments. So, we'll use the _http_client transport which
+ # uses http.client. This is only acceptable because the metadata server
+ # doesn't do SSL and never requires proxies.
+
+ # While this library is normally bundled with compute_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+
+ return _default._get_gce_credentials(request)
+
+
+def default_async(scopes=None, request=None, quota_project_id=None):
+ """Gets the default credentials for the current environment.
+
+ `Application Default Credentials`_ provides an easy way to obtain
+ credentials to call Google APIs for server-to-server or local applications.
+ This function acquires credentials from the environment in the following
+ order:
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON private key file, then it is
+ loaded and returned. The project ID returned is the project ID defined
+ in the service account file if available (some older files do not
+ contain project ID information).
+ 2. If the `Google Cloud SDK`_ is installed and has application default
+ credentials set they are loaded and returned.
+
+ To enable application default credentials with the Cloud SDK run::
+
+ gcloud auth application-default login
+
+ If the Cloud SDK has an active project, the project ID is returned. The
+ active project can be set using::
+
+ gcloud config set project
+
+ 3. If the application is running in the `App Engine standard environment`_
+ then the credentials and project ID from the `App Identity Service`_
+ are used.
+ 4. If the application is running in `Compute Engine`_ or the
+ `App Engine flexible environment`_ then the credentials and project ID
+ are obtained from the `Metadata Service`_.
+ 5. If no credentials are found,
+ :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
+
+ .. _Application Default Credentials: https://developers.google.com\
+ /identity/protocols/application-default-credentials
+ .. _Google Cloud SDK: https://cloud.google.com/sdk
+ .. _App Engine standard environment: https://cloud.google.com/appengine
+ .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
+ /appidentity/
+ .. _Compute Engine: https://cloud.google.com/compute
+ .. _App Engine flexible environment: https://cloud.google.com\
+ /appengine/flexible
+ .. _Metadata Service: https://cloud.google.com/compute/docs\
+ /storing-retrieving-metadata
+
+ Example::
+
+ import google.auth
+
+ credentials, project_id = google.auth.default()
+
+ Args:
+ scopes (Sequence[str]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary.
+ request (google.auth.transport.Request): An object used to make
+ HTTP requests. This is used to detect whether the application
+ is running on Compute Engine. If not specified, then it will
+ use the standard library http client to make requests.
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ Returns:
+ Tuple[~google.auth.credentials.Credentials, Optional[str]]:
+ the current environment's credentials and project ID. Project ID
+ may be None, which indicates that the Project ID could not be
+ ascertained from the environment.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If no credentials were found, or if the credentials found were
+ invalid.
+ """
+ from google.auth._credentials_async import with_scopes_if_required
+
+ explicit_project_id = os.environ.get(
+ environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
+ )
+
+ checkers = (
+ _get_explicit_environ_credentials,
+ _get_gcloud_sdk_credentials,
+ _get_gae_credentials,
+ lambda: _get_gce_credentials(request),
+ )
+
+ for checker in checkers:
+ credentials, project_id = checker()
+ if credentials is not None:
+ credentials = with_scopes_if_required(
+ credentials, scopes
+ ).with_quota_project(quota_project_id)
+ effective_project_id = explicit_project_id or project_id
+ if not effective_project_id:
+ _default._LOGGER.warning(
+ "No project ID could be determined. Consider running "
+ "`gcloud config set project` or setting the %s "
+ "environment variable",
+ environment_vars.PROJECT,
+ )
+ return credentials, effective_project_id
+
+ raise exceptions.DefaultCredentialsError(_default._HELP_MESSAGE)
diff --git a/venv/Lib/site-packages/google/auth/_helpers.py b/venv/Lib/site-packages/google/auth/_helpers.py
new file mode 100644
index 000000000..21c987a73
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_helpers.py
@@ -0,0 +1,232 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for commonly used utilities."""
+
+import base64
+import calendar
+import datetime
+
+import six
+from six.moves import urllib
+
+
+CLOCK_SKEW_SECS = 10 # 10 seconds
+CLOCK_SKEW = datetime.timedelta(seconds=CLOCK_SKEW_SECS)
+
+
+def copy_docstring(source_class):
+ """Decorator that copies a method's docstring from another class.
+
+ Args:
+ source_class (type): The class that has the documented method.
+
+ Returns:
+ Callable: A decorator that will copy the docstring of the same
+ named method in the source class to the decorated method.
+ """
+
+ def decorator(method):
+ """Decorator implementation.
+
+ Args:
+ method (Callable): The method to copy the docstring to.
+
+ Returns:
+ Callable: the same method passed in with an updated docstring.
+
+ Raises:
+ ValueError: if the method already has a docstring.
+ """
+ if method.__doc__:
+ raise ValueError("Method already has a docstring.")
+
+ source_method = getattr(source_class, method.__name__)
+ method.__doc__ = source_method.__doc__
+
+ return method
+
+ return decorator
+
+
+def utcnow():
+ """Returns the current UTC datetime.
+
+ Returns:
+ datetime: The current time in UTC.
+ """
+ return datetime.datetime.utcnow()
+
+
+def datetime_to_secs(value):
+ """Convert a datetime object to the number of seconds since the UNIX epoch.
+
+ Args:
+ value (datetime): The datetime to convert.
+
+ Returns:
+ int: The number of seconds since the UNIX epoch.
+ """
+ return calendar.timegm(value.utctimetuple())
+
+
+def to_bytes(value, encoding="utf-8"):
+ """Converts a string value to bytes, if necessary.
+
+ Unfortunately, ``six.b`` is insufficient for this task since in
+ Python 2 because it does not modify ``unicode`` objects.
+
+ Args:
+ value (Union[str, bytes]): The value to be converted.
+ encoding (str): The encoding to use to convert unicode to bytes.
+ Defaults to "utf-8".
+
+ Returns:
+ bytes: The original value converted to bytes (if unicode) or as
+ passed in if it started out as bytes.
+
+ Raises:
+ ValueError: If the value could not be converted to bytes.
+ """
+ result = value.encode(encoding) if isinstance(value, six.text_type) else value
+ if isinstance(result, six.binary_type):
+ return result
+ else:
+ raise ValueError("{0!r} could not be converted to bytes".format(value))
+
+
+def from_bytes(value):
+ """Converts bytes to a string value, if necessary.
+
+ Args:
+ value (Union[str, bytes]): The value to be converted.
+
+ Returns:
+ str: The original value converted to unicode (if bytes) or as passed in
+ if it started out as unicode.
+
+ Raises:
+ ValueError: If the value could not be converted to unicode.
+ """
+ result = value.decode("utf-8") if isinstance(value, six.binary_type) else value
+ if isinstance(result, six.text_type):
+ return result
+ else:
+ raise ValueError("{0!r} could not be converted to unicode".format(value))
+
+
+def update_query(url, params, remove=None):
+ """Updates a URL's query parameters.
+
+ Replaces any current values if they are already present in the URL.
+
+ Args:
+ url (str): The URL to update.
+ params (Mapping[str, str]): A mapping of query parameter
+ keys to values.
+ remove (Sequence[str]): Parameters to remove from the query string.
+
+ Returns:
+ str: The URL with updated query parameters.
+
+ Examples:
+
+ >>> url = 'http://example.com?a=1'
+ >>> update_query(url, {'a': '2'})
+ http://example.com?a=2
+ >>> update_query(url, {'b': '3'})
+ http://example.com?a=1&b=3
+ >> update_query(url, {'b': '3'}, remove=['a'])
+ http://example.com?b=3
+
+ """
+ if remove is None:
+ remove = []
+
+ # Split the URL into parts.
+ parts = urllib.parse.urlparse(url)
+ # Parse the query string.
+ query_params = urllib.parse.parse_qs(parts.query)
+ # Update the query parameters with the new parameters.
+ query_params.update(params)
+ # Remove any values specified in remove.
+ query_params = {
+ key: value for key, value in six.iteritems(query_params) if key not in remove
+ }
+ # Re-encoded the query string.
+ new_query = urllib.parse.urlencode(query_params, doseq=True)
+ # Unsplit the url.
+ new_parts = parts._replace(query=new_query)
+ return urllib.parse.urlunparse(new_parts)
+
+
+def scopes_to_string(scopes):
+ """Converts scope value to a string suitable for sending to OAuth 2.0
+ authorization servers.
+
+ Args:
+ scopes (Sequence[str]): The sequence of scopes to convert.
+
+ Returns:
+ str: The scopes formatted as a single string.
+ """
+ return " ".join(scopes)
+
+
+def string_to_scopes(scopes):
+ """Converts stringifed scopes value to a list.
+
+ Args:
+ scopes (Union[Sequence, str]): The string of space-separated scopes
+ to convert.
+ Returns:
+ Sequence(str): The separated scopes.
+ """
+ if not scopes:
+ return []
+
+ return scopes.split(" ")
+
+
+def padded_urlsafe_b64decode(value):
+ """Decodes base64 strings lacking padding characters.
+
+ Google infrastructure tends to omit the base64 padding characters.
+
+ Args:
+ value (Union[str, bytes]): The encoded value.
+
+ Returns:
+ bytes: The decoded value
+ """
+ b64string = to_bytes(value)
+ padded = b64string + b"=" * (-len(b64string) % 4)
+ return base64.urlsafe_b64decode(padded)
+
+
+def unpadded_urlsafe_b64encode(value):
+ """Encodes base64 strings removing any padding characters.
+
+ `rfc 7515`_ defines Base64url to NOT include any padding
+ characters, but the stdlib doesn't do that by default.
+
+ _rfc7515: https://tools.ietf.org/html/rfc7515#page-6
+
+ Args:
+ value (Union[str|bytes]): The bytes-like value to encode
+
+ Returns:
+ Union[str|bytes]: The encoded value
+ """
+ return base64.urlsafe_b64encode(value).rstrip(b"=")
diff --git a/venv/Lib/site-packages/google/auth/_jwt_async.py b/venv/Lib/site-packages/google/auth/_jwt_async.py
new file mode 100644
index 000000000..49e3026e5
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_jwt_async.py
@@ -0,0 +1,168 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""JSON Web Tokens
+
+Provides support for creating (encoding) and verifying (decoding) JWTs,
+especially JWTs generated and consumed by Google infrastructure.
+
+See `rfc7519`_ for more details on JWTs.
+
+To encode a JWT use :func:`encode`::
+
+ from google.auth import crypt
+ from google.auth import jwt_async
+
+ signer = crypt.Signer(private_key)
+ payload = {'some': 'payload'}
+ encoded = jwt_async.encode(signer, payload)
+
+To decode a JWT and verify claims use :func:`decode`::
+
+ claims = jwt_async.decode(encoded, certs=public_certs)
+
+You can also skip verification::
+
+ claims = jwt_async.decode(encoded, verify=False)
+
+.. _rfc7519: https://tools.ietf.org/html/rfc7519
+
+
+NOTE: This async support is experimental and marked internal. This surface may
+change in minor releases.
+"""
+
+import google.auth
+from google.auth import jwt
+
+
+def encode(signer, payload, header=None, key_id=None):
+ """Make a signed JWT.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign the JWT.
+ payload (Mapping[str, str]): The JWT payload.
+ header (Mapping[str, str]): Additional JWT header payload.
+ key_id (str): The key id to add to the JWT header. If the
+ signer has a key id it will be used as the default. If this is
+ specified it will override the signer's key id.
+
+ Returns:
+ bytes: The encoded JWT.
+ """
+ return jwt.encode(signer, payload, header, key_id)
+
+
+def decode(token, certs=None, verify=True, audience=None):
+ """Decode and verify a JWT.
+
+ Args:
+ token (str): The encoded JWT.
+ certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
+ certificate used to validate the JWT signature. If bytes or string,
+ it must the the public key certificate in PEM format. If a mapping,
+ it must be a mapping of key IDs to public key certificates in PEM
+ format. The mapping must contain the same key ID that's specified
+ in the token's header.
+ verify (bool): Whether to perform signature and claim validation.
+ Verification is done by default.
+ audience (str): The audience claim, 'aud', that this JWT should
+ contain. If None then the JWT's 'aud' parameter is not verified.
+
+ Returns:
+ Mapping[str, str]: The deserialized JSON payload in the JWT.
+
+ Raises:
+ ValueError: if any verification checks failed.
+ """
+
+ return jwt.decode(token, certs, verify, audience)
+
+
+class Credentials(
+ jwt.Credentials,
+ google.auth._credentials_async.Signing,
+ google.auth._credentials_async.Credentials,
+):
+ """Credentials that use a JWT as the bearer token.
+
+ These credentials require an "audience" claim. This claim identifies the
+ intended recipient of the bearer token.
+
+ The constructor arguments determine the claims for the JWT that is
+ sent with requests. Usually, you'll construct these credentials with
+ one of the helper constructors as shown in the next section.
+
+ To create JWT credentials using a Google service account private key
+ JSON file::
+
+ audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
+ credentials = jwt_async.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience)
+
+ If you already have the service account file loaded and parsed::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = jwt_async.Credentials.from_service_account_info(
+ service_account_info,
+ audience=audience)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify the JWT claims::
+
+ credentials = jwt_async.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience,
+ additional_claims={'meta': 'data'})
+
+ You can also construct the credentials directly if you have a
+ :class:`~google.auth.crypt.Signer` instance::
+
+ credentials = jwt_async.Credentials(
+ signer,
+ issuer='your-issuer',
+ subject='your-subject',
+ audience=audience)
+
+ The claims are considered immutable. If you want to modify the claims,
+ you can easily create another instance using :meth:`with_claims`::
+
+ new_audience = (
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
+ new_credentials = credentials.with_claims(audience=new_audience)
+ """
+
+
+class OnDemandCredentials(
+ jwt.OnDemandCredentials,
+ google.auth._credentials_async.Signing,
+ google.auth._credentials_async.Credentials,
+):
+ """On-demand JWT credentials.
+
+ Like :class:`Credentials`, this class uses a JWT as the bearer token for
+ authentication. However, this class does not require the audience at
+ construction time. Instead, it will generate a new token on-demand for
+ each request using the request URI as the audience. It caches tokens
+ so that multiple requests to the same URI do not incur the overhead
+ of generating a new token every time.
+
+ This behavior is especially useful for `gRPC`_ clients. A gRPC service may
+ have multiple audience and gRPC clients may not know all of the audiences
+ required for accessing a particular service. With these credentials,
+ no knowledge of the audiences is required ahead of time.
+
+ .. _grpc: http://www.grpc.io/
+ """
diff --git a/venv/Lib/site-packages/google/auth/_oauth2client.py b/venv/Lib/site-packages/google/auth/_oauth2client.py
new file mode 100644
index 000000000..95a9876f3
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_oauth2client.py
@@ -0,0 +1,169 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for transitioning from oauth2client to google-auth.
+
+.. warning::
+ This module is private as it is intended to assist first-party downstream
+ clients with the transition from oauth2client to google-auth.
+"""
+
+from __future__ import absolute_import
+
+import six
+
+from google.auth import _helpers
+import google.auth.app_engine
+import google.auth.compute_engine
+import google.oauth2.credentials
+import google.oauth2.service_account
+
+try:
+ import oauth2client.client
+ import oauth2client.contrib.gce
+ import oauth2client.service_account
+except ImportError as caught_exc:
+ six.raise_from(ImportError("oauth2client is not installed."), caught_exc)
+
+try:
+ import oauth2client.contrib.appengine # pytype: disable=import-error
+
+ _HAS_APPENGINE = True
+except ImportError:
+ _HAS_APPENGINE = False
+
+
+_CONVERT_ERROR_TMPL = "Unable to convert {} to a google-auth credentials class."
+
+
+def _convert_oauth2_credentials(credentials):
+ """Converts to :class:`google.oauth2.credentials.Credentials`.
+
+ Args:
+ credentials (Union[oauth2client.client.OAuth2Credentials,
+ oauth2client.client.GoogleCredentials]): The credentials to
+ convert.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The converted credentials.
+ """
+ new_credentials = google.oauth2.credentials.Credentials(
+ token=credentials.access_token,
+ refresh_token=credentials.refresh_token,
+ token_uri=credentials.token_uri,
+ client_id=credentials.client_id,
+ client_secret=credentials.client_secret,
+ scopes=credentials.scopes,
+ )
+
+ new_credentials._expires = credentials.token_expiry
+
+ return new_credentials
+
+
+def _convert_service_account_credentials(credentials):
+ """Converts to :class:`google.oauth2.service_account.Credentials`.
+
+ Args:
+ credentials (Union[
+ oauth2client.service_account.ServiceAccountCredentials,
+ oauth2client.service_account._JWTAccessCredentials]): The
+ credentials to convert.
+
+ Returns:
+ google.oauth2.service_account.Credentials: The converted credentials.
+ """
+ info = credentials.serialization_data.copy()
+ info["token_uri"] = credentials.token_uri
+ return google.oauth2.service_account.Credentials.from_service_account_info(info)
+
+
+def _convert_gce_app_assertion_credentials(credentials):
+ """Converts to :class:`google.auth.compute_engine.Credentials`.
+
+ Args:
+ credentials (oauth2client.contrib.gce.AppAssertionCredentials): The
+ credentials to convert.
+
+ Returns:
+ google.oauth2.service_account.Credentials: The converted credentials.
+ """
+ return google.auth.compute_engine.Credentials(
+ service_account_email=credentials.service_account_email
+ )
+
+
+def _convert_appengine_app_assertion_credentials(credentials):
+ """Converts to :class:`google.auth.app_engine.Credentials`.
+
+ Args:
+ credentials (oauth2client.contrib.app_engine.AppAssertionCredentials):
+ The credentials to convert.
+
+ Returns:
+ google.oauth2.service_account.Credentials: The converted credentials.
+ """
+ # pylint: disable=invalid-name
+ return google.auth.app_engine.Credentials(
+ scopes=_helpers.string_to_scopes(credentials.scope),
+ service_account_id=credentials.service_account_id,
+ )
+
+
+_CLASS_CONVERSION_MAP = {
+ oauth2client.client.OAuth2Credentials: _convert_oauth2_credentials,
+ oauth2client.client.GoogleCredentials: _convert_oauth2_credentials,
+ oauth2client.service_account.ServiceAccountCredentials: _convert_service_account_credentials,
+ oauth2client.service_account._JWTAccessCredentials: _convert_service_account_credentials,
+ oauth2client.contrib.gce.AppAssertionCredentials: _convert_gce_app_assertion_credentials,
+}
+
+if _HAS_APPENGINE:
+ _CLASS_CONVERSION_MAP[
+ oauth2client.contrib.appengine.AppAssertionCredentials
+ ] = _convert_appengine_app_assertion_credentials
+
+
+def convert(credentials):
+ """Convert oauth2client credentials to google-auth credentials.
+
+ This class converts:
+
+ - :class:`oauth2client.client.OAuth2Credentials` to
+ :class:`google.oauth2.credentials.Credentials`.
+ - :class:`oauth2client.client.GoogleCredentials` to
+ :class:`google.oauth2.credentials.Credentials`.
+ - :class:`oauth2client.service_account.ServiceAccountCredentials` to
+ :class:`google.oauth2.service_account.Credentials`.
+ - :class:`oauth2client.service_account._JWTAccessCredentials` to
+ :class:`google.oauth2.service_account.Credentials`.
+ - :class:`oauth2client.contrib.gce.AppAssertionCredentials` to
+ :class:`google.auth.compute_engine.Credentials`.
+ - :class:`oauth2client.contrib.appengine.AppAssertionCredentials` to
+ :class:`google.auth.app_engine.Credentials`.
+
+ Returns:
+ google.auth.credentials.Credentials: The converted credentials.
+
+ Raises:
+ ValueError: If the credentials could not be converted.
+ """
+
+ credentials_class = type(credentials)
+
+ try:
+ return _CLASS_CONVERSION_MAP[credentials_class](credentials)
+ except KeyError as caught_exc:
+ new_exc = ValueError(_CONVERT_ERROR_TMPL.format(credentials_class))
+ six.raise_from(new_exc, caught_exc)
diff --git a/venv/Lib/site-packages/google/auth/_service_account_info.py b/venv/Lib/site-packages/google/auth/_service_account_info.py
new file mode 100644
index 000000000..3d340c78d
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/_service_account_info.py
@@ -0,0 +1,74 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for loading data from a Google service account file."""
+
+import io
+import json
+
+import six
+
+from google.auth import crypt
+
+
+def from_dict(data, require=None):
+ """Validates a dictionary containing Google service account data.
+
+ Creates and returns a :class:`google.auth.crypt.Signer` instance from the
+ private key specified in the data.
+
+ Args:
+ data (Mapping[str, str]): The service account data
+ require (Sequence[str]): List of keys required to be present in the
+ info.
+
+ Returns:
+ google.auth.crypt.Signer: A signer created from the private key in the
+ service account file.
+
+ Raises:
+ ValueError: if the data was in the wrong format, or if one of the
+ required keys is missing.
+ """
+ keys_needed = set(require if require is not None else [])
+
+ missing = keys_needed.difference(six.iterkeys(data))
+
+ if missing:
+ raise ValueError(
+ "Service account info was not in the expected format, missing "
+ "fields {}.".format(", ".join(missing))
+ )
+
+ # Create a signer.
+ signer = crypt.RSASigner.from_service_account_info(data)
+
+ return signer
+
+
+def from_filename(filename, require=None):
+ """Reads a Google service account JSON file and returns its parsed info.
+
+ Args:
+ filename (str): The path to the service account .json file.
+ require (Sequence[str]): List of keys required to be present in the
+ info.
+
+ Returns:
+ Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified
+ info and a signer instance.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+ return data, from_dict(data, require=require)
diff --git a/venv/Lib/site-packages/google/auth/app_engine.py b/venv/Lib/site-packages/google/auth/app_engine.py
new file mode 100644
index 000000000..f1d21280e
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/app_engine.py
@@ -0,0 +1,170 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google App Engine standard environment support.
+
+This module provides authentication and signing for applications running on App
+Engine in the standard environment using the `App Identity API`_.
+
+
+.. _App Identity API:
+ https://cloud.google.com/appengine/docs/python/appidentity/
+"""
+
+import datetime
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import crypt
+
+# pytype: disable=import-error
+try:
+ from google.appengine.api import app_identity
+except ImportError:
+ app_identity = None
+# pytype: enable=import-error
+
+
+class Signer(crypt.Signer):
+ """Signs messages using the App Engine App Identity service.
+
+ This can be used in place of :class:`google.auth.crypt.Signer` when
+ running in the App Engine standard environment.
+ """
+
+ @property
+ def key_id(self):
+ """Optional[str]: The key ID used to identify this private key.
+
+ .. warning::
+ This is always ``None``. The key ID used by App Engine can not
+ be reliably determined ahead of time.
+ """
+ return None
+
+ @_helpers.copy_docstring(crypt.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ _, signature = app_identity.sign_blob(message)
+ return signature
+
+
+def get_project_id():
+ """Gets the project ID for the current App Engine application.
+
+ Returns:
+ str: The project ID
+
+ Raises:
+ EnvironmentError: If the App Engine APIs are unavailable.
+ """
+ # pylint: disable=missing-raises-doc
+ # Pylint rightfully thinks EnvironmentError is OSError, but doesn't
+ # realize it's a valid alias.
+ if app_identity is None:
+ raise EnvironmentError("The App Engine APIs are not available.")
+ return app_identity.get_application_id()
+
+
+class Credentials(
+ credentials.Scoped, credentials.Signing, credentials.CredentialsWithQuotaProject
+):
+ """App Engine standard environment credentials.
+
+ These credentials use the App Engine App Identity API to obtain access
+ tokens.
+ """
+
+ def __init__(self, scopes=None, service_account_id=None, quota_project_id=None):
+ """
+ Args:
+ scopes (Sequence[str]): Scopes to request from the App Identity
+ API.
+ service_account_id (str): The service account ID passed into
+ :func:`google.appengine.api.app_identity.get_access_token`.
+ If not specified, the default application service account
+ ID will be used.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+
+ Raises:
+ EnvironmentError: If the App Engine APIs are unavailable.
+ """
+ # pylint: disable=missing-raises-doc
+ # Pylint rightfully thinks EnvironmentError is OSError, but doesn't
+ # realize it's a valid alias.
+ if app_identity is None:
+ raise EnvironmentError("The App Engine APIs are not available.")
+
+ super(Credentials, self).__init__()
+ self._scopes = scopes
+ self._service_account_id = service_account_id
+ self._signer = Signer()
+ self._quota_project_id = quota_project_id
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ # pylint: disable=unused-argument
+ token, ttl = app_identity.get_access_token(
+ self._scopes, self._service_account_id
+ )
+ expiry = datetime.datetime.utcfromtimestamp(ttl)
+
+ self.token, self.expiry = token, expiry
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ if self._service_account_id is None:
+ self._service_account_id = app_identity.get_service_account_name()
+ return self._service_account_id
+
+ @property
+ def requires_scopes(self):
+ """Checks if the credentials requires scopes.
+
+ Returns:
+ bool: True if there are no scopes set otherwise False.
+ """
+ return not self._scopes
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes):
+ return self.__class__(
+ scopes=scopes,
+ service_account_id=self._service_account_id,
+ quota_project_id=self.quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ scopes=self._scopes,
+ service_account_id=self._service_account_id,
+ quota_project_id=quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer_email(self):
+ return self.service_account_email
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
diff --git a/venv/Lib/site-packages/google/auth/compute_engine/__init__.py b/venv/Lib/site-packages/google/auth/compute_engine/__init__.py
new file mode 100644
index 000000000..5c84234e9
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/compute_engine/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Compute Engine authentication."""
+
+from google.auth.compute_engine.credentials import Credentials
+from google.auth.compute_engine.credentials import IDTokenCredentials
+
+
+__all__ = ["Credentials", "IDTokenCredentials"]
diff --git a/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..19d6ce416
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/_metadata.cpython-36.pyc b/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/_metadata.cpython-36.pyc
new file mode 100644
index 000000000..a994e5236
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/_metadata.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/credentials.cpython-36.pyc b/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/credentials.cpython-36.pyc
new file mode 100644
index 000000000..c49944b52
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/compute_engine/__pycache__/credentials.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/compute_engine/_metadata.py b/venv/Lib/site-packages/google/auth/compute_engine/_metadata.py
new file mode 100644
index 000000000..fe821418e
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/compute_engine/_metadata.py
@@ -0,0 +1,257 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provides helper methods for talking to the Compute Engine metadata server.
+
+See https://cloud.google.com/compute/docs/metadata for more details.
+"""
+
+import datetime
+import json
+import logging
+import os
+
+import six
+from six.moves import http_client
+from six.moves.urllib import parse as urlparse
+
+from google.auth import _helpers
+from google.auth import environment_vars
+from google.auth import exceptions
+
+_LOGGER = logging.getLogger(__name__)
+
+# Environment variable GCE_METADATA_HOST is originally named
+# GCE_METADATA_ROOT. For compatiblity reasons, here it checks
+# the new variable first; if not set, the system falls back
+# to the old variable.
+_GCE_METADATA_HOST = os.getenv(environment_vars.GCE_METADATA_HOST, None)
+if not _GCE_METADATA_HOST:
+ _GCE_METADATA_HOST = os.getenv(
+ environment_vars.GCE_METADATA_ROOT, "metadata.google.internal"
+ )
+_METADATA_ROOT = "http://{}/computeMetadata/v1/".format(_GCE_METADATA_HOST)
+
+# This is used to ping the metadata server, it avoids the cost of a DNS
+# lookup.
+_METADATA_IP_ROOT = "http://{}".format(
+ os.getenv(environment_vars.GCE_METADATA_IP, "169.254.169.254")
+)
+_METADATA_FLAVOR_HEADER = "metadata-flavor"
+_METADATA_FLAVOR_VALUE = "Google"
+_METADATA_HEADERS = {_METADATA_FLAVOR_HEADER: _METADATA_FLAVOR_VALUE}
+
+# Timeout in seconds to wait for the GCE metadata server when detecting the
+# GCE environment.
+try:
+ _METADATA_DEFAULT_TIMEOUT = int(os.getenv("GCE_METADATA_TIMEOUT", 3))
+except ValueError: # pragma: NO COVER
+ _METADATA_DEFAULT_TIMEOUT = 3
+
+
+def ping(request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=3):
+ """Checks to see if the metadata server is available.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ timeout (int): How long to wait for the metadata server to respond.
+ retry_count (int): How many times to attempt connecting to metadata
+ server using above timeout.
+
+ Returns:
+ bool: True if the metadata server is reachable, False otherwise.
+ """
+ # NOTE: The explicit ``timeout`` is a workaround. The underlying
+ # issue is that resolving an unknown host on some networks will take
+ # 20-30 seconds; making this timeout short fixes the issue, but
+ # could lead to false negatives in the event that we are on GCE, but
+ # the metadata resolution was particularly slow. The latter case is
+ # "unlikely".
+ retries = 0
+ while retries < retry_count:
+ try:
+ response = request(
+ url=_METADATA_IP_ROOT,
+ method="GET",
+ headers=_METADATA_HEADERS,
+ timeout=timeout,
+ )
+
+ metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER)
+ return (
+ response.status == http_client.OK
+ and metadata_flavor == _METADATA_FLAVOR_VALUE
+ )
+
+ except exceptions.TransportError as e:
+ _LOGGER.warning(
+ "Compute Engine Metadata server unavailable on"
+ "attempt %s of %s. Reason: %s",
+ retries + 1,
+ retry_count,
+ e,
+ )
+ retries += 1
+
+ return False
+
+
+def get(request, path, root=_METADATA_ROOT, recursive=False, retry_count=5):
+ """Fetch a resource from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ path (str): The resource to retrieve. For example,
+ ``'instance/service-accounts/default'``.
+ root (str): The full path to the metadata server root.
+ recursive (bool): Whether to do a recursive query of metadata. See
+ https://cloud.google.com/compute/docs/metadata#aggcontents for more
+ details.
+ retry_count (int): How many times to attempt connecting to metadata
+ server using above timeout.
+
+ Returns:
+ Union[Mapping, str]: If the metadata server returns JSON, a mapping of
+ the decoded JSON is return. Otherwise, the response content is
+ returned as a string.
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ base_url = urlparse.urljoin(root, path)
+ query_params = {}
+
+ if recursive:
+ query_params["recursive"] = "true"
+
+ url = _helpers.update_query(base_url, query_params)
+
+ retries = 0
+ while retries < retry_count:
+ try:
+ response = request(url=url, method="GET", headers=_METADATA_HEADERS)
+ break
+
+ except exceptions.TransportError as e:
+ _LOGGER.warning(
+ "Compute Engine Metadata server unavailable on"
+ "attempt %s of %s. Reason: %s",
+ retries + 1,
+ retry_count,
+ e,
+ )
+ retries += 1
+ else:
+ raise exceptions.TransportError(
+ "Failed to retrieve {} from the Google Compute Engine"
+ "metadata service. Compute Engine Metadata server unavailable".format(url)
+ )
+
+ if response.status == http_client.OK:
+ content = _helpers.from_bytes(response.data)
+ if response.headers["content-type"] == "application/json":
+ try:
+ return json.loads(content)
+ except ValueError as caught_exc:
+ new_exc = exceptions.TransportError(
+ "Received invalid JSON from the Google Compute Engine"
+ "metadata service: {:.20}".format(content)
+ )
+ six.raise_from(new_exc, caught_exc)
+ else:
+ return content
+ else:
+ raise exceptions.TransportError(
+ "Failed to retrieve {} from the Google Compute Engine"
+ "metadata service. Status: {} Response:\n{}".format(
+ url, response.status, response.data
+ ),
+ response,
+ )
+
+
+def get_project_id(request):
+ """Get the Google Cloud Project ID from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+
+ Returns:
+ str: The project ID
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ return get(request, "project/project-id")
+
+
+def get_service_account_info(request, service_account="default"):
+ """Get information about a service account from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ service_account (str): The string 'default' or a service account email
+ address. The determines which service account for which to acquire
+ information.
+
+ Returns:
+ Mapping: The service account's information, for example::
+
+ {
+ 'email': '...',
+ 'scopes': ['scope', ...],
+ 'aliases': ['default', '...']
+ }
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ return get(
+ request,
+ "instance/service-accounts/{0}/".format(service_account),
+ recursive=True,
+ )
+
+
+def get_service_account_token(request, service_account="default"):
+ """Get the OAuth 2.0 access token for a service account.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ service_account (str): The string 'default' or a service account email
+ address. The determines which service account for which to acquire
+ an access token.
+
+ Returns:
+ Union[str, datetime]: The access token and its expiration.
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ token_json = get(
+ request, "instance/service-accounts/{0}/token".format(service_account)
+ )
+ token_expiry = _helpers.utcnow() + datetime.timedelta(
+ seconds=token_json["expires_in"]
+ )
+ return token_json["access_token"], token_expiry
diff --git a/venv/Lib/site-packages/google/auth/compute_engine/credentials.py b/venv/Lib/site-packages/google/auth/compute_engine/credentials.py
new file mode 100644
index 000000000..b7fca1832
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/compute_engine/credentials.py
@@ -0,0 +1,392 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Compute Engine credentials.
+
+This module provides authentication for application running on Google Compute
+Engine using the Compute Engine metadata server.
+
+"""
+
+import datetime
+
+import six
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import iam
+from google.auth import jwt
+from google.auth.compute_engine import _metadata
+from google.oauth2 import _client
+
+
+class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaProject):
+ """Compute Engine Credentials.
+
+ These credentials use the Google Compute Engine metadata server to obtain
+ OAuth 2.0 access tokens associated with the instance's service account.
+
+ For more information about Compute Engine authentication, including how
+ to configure scopes, see the `Compute Engine authentication
+ documentation`_.
+
+ .. note:: Compute Engine instances can be created with scopes and therefore
+ these credentials are considered to be 'scoped'. However, you can
+ not use :meth:`~google.auth.credentials.ScopedCredentials.with_scopes`
+ because it is not possible to change the scopes that the instance
+ has. Also note that
+ :meth:`~google.auth.credentials.ScopedCredentials.has_scopes` will not
+ work until the credentials have been refreshed.
+
+ .. _Compute Engine authentication documentation:
+ https://cloud.google.com/compute/docs/authentication#using
+ """
+
+ def __init__(self, service_account_email="default", quota_project_id=None):
+ """
+ Args:
+ service_account_email (str): The service account email to use, or
+ 'default'. A Compute Engine instance may have multiple service
+ accounts.
+ quota_project_id (Optional[str]): The project ID used for quota and
+ billing.
+ """
+ super(Credentials, self).__init__()
+ self._service_account_email = service_account_email
+ self._quota_project_id = quota_project_id
+
+ def _retrieve_info(self, request):
+ """Retrieve information about the service account.
+
+ Updates the scopes and retrieves the full service account email.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ """
+ info = _metadata.get_service_account_info(
+ request, service_account=self._service_account_email
+ )
+
+ self._service_account_email = info["email"]
+ self._scopes = info["scopes"]
+
+ def refresh(self, request):
+ """Refresh the access token and scopes.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the Compute Engine metadata
+ service can't be reached if if the instance has not
+ credentials.
+ """
+ try:
+ self._retrieve_info(request)
+ self.token, self.expiry = _metadata.get_service_account_token(
+ request, service_account=self._service_account_email
+ )
+ except exceptions.TransportError as caught_exc:
+ new_exc = exceptions.RefreshError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ @property
+ def service_account_email(self):
+ """The service account email.
+
+ .. note:: This is not guaranteed to be set until :meth:`refresh` has been
+ called.
+ """
+ return self._service_account_email
+
+ @property
+ def requires_scopes(self):
+ """False: Compute Engine credentials can not be scoped."""
+ return False
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ service_account_email=self._service_account_email,
+ quota_project_id=quota_project_id,
+ )
+
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+_DEFAULT_TOKEN_URI = "https://www.googleapis.com/oauth2/v4/token"
+
+
+class IDTokenCredentials(credentials.CredentialsWithQuotaProject, credentials.Signing):
+ """Open ID Connect ID Token-based service account credentials.
+
+ These credentials relies on the default service account of a GCE instance.
+
+ ID token can be requested from `GCE metadata server identity endpoint`_, IAM
+ token endpoint or other token endpoints you specify. If metadata server
+ identity endpoint is not used, the GCE instance must have been started with
+ a service account that has access to the IAM Cloud API.
+
+ .. _GCE metadata server identity endpoint:
+ https://cloud.google.com/compute/docs/instances/verifying-instance-identity
+ """
+
+ def __init__(
+ self,
+ request,
+ target_audience,
+ token_uri=None,
+ additional_claims=None,
+ service_account_email=None,
+ signer=None,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token. The ID Token's ``aud`` claim
+ will be set to this string.
+ token_uri (str): The OAuth 2.0 Token URI.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT assertion used in the authorization grant.
+ service_account_email (str): Optional explicit service account to
+ use to sign JWT tokens.
+ By default, this is the default GCE service account.
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ In case the signer is specified, the request argument will be
+ ignored.
+ use_metadata_identity_endpoint (bool): Whether to use GCE metadata
+ identity endpoint. For backward compatibility the default value
+ is False. If set to True, ``token_uri``, ``additional_claims``,
+ ``service_account_email``, ``signer`` argument should not be set;
+ otherwise ValueError will be raised.
+ quota_project_id (Optional[str]): The project ID used for quota and
+ billing.
+
+ Raises:
+ ValueError:
+ If ``use_metadata_identity_endpoint`` is set to True, and one of
+ ``token_uri``, ``additional_claims``, ``service_account_email``,
+ ``signer`` arguments is set.
+ """
+ super(IDTokenCredentials, self).__init__()
+
+ self._quota_project_id = quota_project_id
+ self._use_metadata_identity_endpoint = use_metadata_identity_endpoint
+ self._target_audience = target_audience
+
+ if use_metadata_identity_endpoint:
+ if token_uri or additional_claims or service_account_email or signer:
+ raise ValueError(
+ "If use_metadata_identity_endpoint is set, token_uri, "
+ "additional_claims, service_account_email, signer arguments"
+ " must not be set"
+ )
+ self._token_uri = None
+ self._additional_claims = None
+ self._signer = None
+
+ if service_account_email is None:
+ sa_info = _metadata.get_service_account_info(request)
+ self._service_account_email = sa_info["email"]
+ else:
+ self._service_account_email = service_account_email
+
+ if not use_metadata_identity_endpoint:
+ if signer is None:
+ signer = iam.Signer(
+ request=request,
+ credentials=Credentials(),
+ service_account_email=self._service_account_email,
+ )
+ self._signer = signer
+ self._token_uri = token_uri or _DEFAULT_TOKEN_URI
+
+ if additional_claims is not None:
+ self._additional_claims = additional_claims
+ else:
+ self._additional_claims = {}
+
+ def with_target_audience(self, target_audience):
+ """Create a copy of these credentials with the specified target
+ audience.
+ Args:
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token.
+ Returns:
+ google.auth.service_account.IDTokenCredentials: A new credentials
+ instance.
+ """
+ # since the signer is already instantiated,
+ # the request is not needed
+ if self._use_metadata_identity_endpoint:
+ return self.__class__(
+ None,
+ target_audience=target_audience,
+ use_metadata_identity_endpoint=True,
+ quota_project_id=self._quota_project_id,
+ )
+ else:
+ return self.__class__(
+ None,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=target_audience,
+ additional_claims=self._additional_claims.copy(),
+ signer=self.signer,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ # since the signer is already instantiated,
+ # the request is not needed
+ if self._use_metadata_identity_endpoint:
+ return self.__class__(
+ None,
+ target_audience=self._target_audience,
+ use_metadata_identity_endpoint=True,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ return self.__class__(
+ None,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=self._target_audience,
+ additional_claims=self._additional_claims.copy(),
+ signer=self.signer,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=quota_project_id,
+ )
+
+ def _make_authorization_grant_assertion(self):
+ """Create the OAuth 2.0 assertion.
+ This assertion is used during the OAuth 2.0 grant to acquire an
+ ID token.
+ Returns:
+ bytes: The authorization grant assertion.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
+ expiry = now + lifetime
+
+ payload = {
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ # The issuer must be the service account email.
+ "iss": self.service_account_email,
+ # The audience must be the auth token endpoint's URI
+ "aud": self._token_uri,
+ # The target audience specifies which service the ID token is
+ # intended for.
+ "target_audience": self._target_audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ token = jwt.encode(self._signer, payload)
+
+ return token
+
+ def _call_metadata_identity_endpoint(self, request):
+ """Request ID token from metadata identity endpoint.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Returns:
+ Tuple[str, datetime.datetime]: The ID token and the expiry of the ID token.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the Compute Engine metadata
+ service can't be reached or if the instance has no credentials.
+ ValueError: If extracting expiry from the obtained ID token fails.
+ """
+ try:
+ id_token = _metadata.get(
+ request,
+ "instance/service-accounts/default/identity?audience={}&format=full".format(
+ self._target_audience
+ ),
+ )
+ except exceptions.TransportError as caught_exc:
+ new_exc = exceptions.RefreshError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ _, payload, _, _ = jwt._unverified_decode(id_token)
+ return id_token, datetime.datetime.fromtimestamp(payload["exp"])
+
+ def refresh(self, request):
+ """Refreshes the ID token.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the credentials could
+ not be refreshed.
+ ValueError: If extracting expiry from the obtained ID token fails.
+ """
+ if self._use_metadata_identity_endpoint:
+ self.token, self.expiry = self._call_metadata_identity_endpoint(request)
+ else:
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = _client.id_token_jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ def sign_bytes(self, message):
+ """Signs the given message.
+
+ Args:
+ message (bytes): The message to sign.
+
+ Returns:
+ bytes: The message's cryptographic signature.
+
+ Raises:
+ ValueError:
+ Signer is not available if metadata identity endpoint is used.
+ """
+ if self._use_metadata_identity_endpoint:
+ raise ValueError(
+ "Signer is not available if metadata identity endpoint is used"
+ )
+ return self._signer.sign(message)
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ return self._service_account_email
+
+ @property
+ def signer_email(self):
+ return self._service_account_email
diff --git a/venv/Lib/site-packages/google/auth/credentials.py b/venv/Lib/site-packages/google/auth/credentials.py
new file mode 100644
index 000000000..bc42546b9
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/credentials.py
@@ -0,0 +1,351 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Interfaces for credentials."""
+
+import abc
+
+import six
+
+from google.auth import _helpers
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Credentials(object):
+ """Base class for all credentials.
+
+ All credentials have a :attr:`token` that is used for authentication and
+ may also optionally set an :attr:`expiry` to indicate when the token will
+ no longer be valid.
+
+ Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
+ Credentials can do this automatically before the first HTTP request in
+ :meth:`before_request`.
+
+ Although the token and expiration will change as the credentials are
+ :meth:`refreshed ` and used, credentials should be considered
+ immutable. Various credentials will accept configuration such as private
+ keys, scopes, and other options. These options are not changeable after
+ construction. Some classes will provide mechanisms to copy the credentials
+ with modifications such as :meth:`ScopedCredentials.with_scopes`.
+ """
+
+ def __init__(self):
+ self.token = None
+ """str: The bearer token that can be used in HTTP headers to make
+ authenticated requests."""
+ self.expiry = None
+ """Optional[datetime]: When the token expires and is no longer valid.
+ If this is None, the token is assumed to never expire."""
+ self._quota_project_id = None
+ """Optional[str]: Project to use for quota and billing purposes."""
+
+ @property
+ def expired(self):
+ """Checks if the credentials are expired.
+
+ Note that credentials can be invalid but not expired because
+ Credentials with :attr:`expiry` set to None is considered to never
+ expire.
+ """
+ if not self.expiry:
+ return False
+
+ # Remove 5 minutes from expiry to err on the side of reporting
+ # expiration early so that we avoid the 401-refresh-retry loop.
+ skewed_expiry = self.expiry - _helpers.CLOCK_SKEW
+ return _helpers.utcnow() >= skewed_expiry
+
+ @property
+ def valid(self):
+ """Checks the validity of the credentials.
+
+ This is True if the credentials have a :attr:`token` and the token
+ is not :attr:`expired`.
+ """
+ return self.token is not None and not self.expired
+
+ @property
+ def quota_project_id(self):
+ """Project to use for quota and billing purposes."""
+ return self._quota_project_id
+
+ @abc.abstractmethod
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the credentials could
+ not be refreshed.
+ """
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Refresh must be implemented")
+
+ def apply(self, headers, token=None):
+ """Apply the token to the authentication header.
+
+ Args:
+ headers (Mapping): The HTTP request headers.
+ token (Optional[str]): If specified, overrides the current access
+ token.
+ """
+ headers["authorization"] = "Bearer {}".format(
+ _helpers.from_bytes(token or self.token)
+ )
+ if self.quota_project_id:
+ headers["x-goog-user-project"] = self.quota_project_id
+
+ def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+
+ Refreshes the credentials if necessary, then calls :meth:`apply` to
+ apply the token to the authentication header.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ method (str): The request's HTTP method or the RPC method being
+ invoked.
+ url (str): The request's URI or the RPC service's URI.
+ headers (Mapping): The request's headers.
+ """
+ # pylint: disable=unused-argument
+ # (Subclasses may use these arguments to ascertain information about
+ # the http request.)
+ if not self.valid:
+ self.refresh(request)
+ self.apply(headers)
+
+
+class CredentialsWithQuotaProject(Credentials):
+ """Abstract base for credentials supporting ``with_quota_project`` factory"""
+
+ def with_quota_project(self, quota_project_id):
+ """Returns a copy of these credentials with a modified quota project.
+
+ Args:
+ quota_project_id (str): The project to use for quota and
+ billing purposes
+
+ Returns:
+ google.oauth2.credentials.Credentials: A new credentials instance.
+ """
+ raise NotImplementedError("This credential does not support quota project.")
+
+
+class AnonymousCredentials(Credentials):
+ """Credentials that do not provide any authentication information.
+
+ These are useful in the case of services that support anonymous access or
+ local service emulators that do not use credentials.
+ """
+
+ @property
+ def expired(self):
+ """Returns `False`, anonymous credentials never expire."""
+ return False
+
+ @property
+ def valid(self):
+ """Returns `True`, anonymous credentials are always valid."""
+ return True
+
+ def refresh(self, request):
+ """Raises :class:`ValueError``, anonymous credentials cannot be
+ refreshed."""
+ raise ValueError("Anonymous credentials cannot be refreshed.")
+
+ def apply(self, headers, token=None):
+ """Anonymous credentials do nothing to the request.
+
+ The optional ``token`` argument is not supported.
+
+ Raises:
+ ValueError: If a token was specified.
+ """
+ if token is not None:
+ raise ValueError("Anonymous credentials don't support tokens.")
+
+ def before_request(self, request, method, url, headers):
+ """Anonymous credentials do nothing to the request."""
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ReadOnlyScoped(object):
+ """Interface for credentials whose scopes can be queried.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+ def __init__(self):
+ super(ReadOnlyScoped, self).__init__()
+ self._scopes = None
+
+ @property
+ def scopes(self):
+ """Sequence[str]: the credentials' current set of scopes."""
+ return self._scopes
+
+ @abc.abstractproperty
+ def requires_scopes(self):
+ """True if these credentials require scopes to obtain an access token.
+ """
+ return False
+
+ def has_scopes(self, scopes):
+ """Checks if the credentials have the given scopes.
+
+ .. warning: This method is not guaranteed to be accurate if the
+ credentials are :attr:`~Credentials.invalid`.
+
+ Args:
+ scopes (Sequence[str]): The list of scopes to check.
+
+ Returns:
+ bool: True if the credentials have the given scopes.
+ """
+ return set(scopes).issubset(set(self._scopes or []))
+
+
+class Scoped(ReadOnlyScoped):
+ """Interface for credentials whose scopes can be replaced while copying.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = credentials.create_scoped(['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+ @abc.abstractmethod
+ def with_scopes(self, scopes):
+ """Create a copy of these credentials with the specified scopes.
+
+ Args:
+ scopes (Sequence[str]): The list of scopes to attach to the
+ current credentials.
+
+ Raises:
+ NotImplementedError: If the credentials' scopes can not be changed.
+ This can be avoided by checking :attr:`requires_scopes` before
+ calling this method.
+ """
+ raise NotImplementedError("This class does not require scoping.")
+
+
+def with_scopes_if_required(credentials, scopes):
+ """Creates a copy of the credentials with scopes if scoping is required.
+
+ This helper function is useful when you do not know (or care to know) the
+ specific type of credentials you are using (such as when you use
+ :func:`google.auth.default`). This function will call
+ :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
+ the credentials require scoping. Otherwise, it will return the credentials
+ as-is.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ scope if necessary.
+ scopes (Sequence[str]): The list of scopes to use.
+
+ Returns:
+ google.auth.credentials.Credentials: Either a new set of scoped
+ credentials, or the passed in credentials instance if no scoping
+ was required.
+ """
+ if isinstance(credentials, Scoped) and credentials.requires_scopes:
+ return credentials.with_scopes(scopes)
+ else:
+ return credentials
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Signing(object):
+ """Interface for credentials that can cryptographically sign messages."""
+
+ @abc.abstractmethod
+ def sign_bytes(self, message):
+ """Signs the given message.
+
+ Args:
+ message (bytes): The message to sign.
+
+ Returns:
+ bytes: The message's cryptographic signature.
+ """
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Sign bytes must be implemented.")
+
+ @abc.abstractproperty
+ def signer_email(self):
+ """Optional[str]: An email address that identifies the signer."""
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Signer email must be implemented.")
+
+ @abc.abstractproperty
+ def signer(self):
+ """google.auth.crypt.Signer: The signer used to sign bytes."""
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Signer must be implemented.")
diff --git a/venv/Lib/site-packages/google/auth/crypt/__init__.py b/venv/Lib/site-packages/google/auth/crypt/__init__.py
new file mode 100644
index 000000000..15ac95068
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/crypt/__init__.py
@@ -0,0 +1,100 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Cryptography helpers for verifying and signing messages.
+
+The simplest way to verify signatures is using :func:`verify_signature`::
+
+ cert = open('certs.pem').read()
+ valid = crypt.verify_signature(message, signature, cert)
+
+If you're going to verify many messages with the same certificate, you can use
+:class:`RSAVerifier`::
+
+ cert = open('certs.pem').read()
+ verifier = crypt.RSAVerifier.from_string(cert)
+ valid = verifier.verify(message, signature)
+
+To sign messages use :class:`RSASigner` with a private key::
+
+ private_key = open('private_key.pem').read()
+ signer = crypt.RSASigner.from_string(private_key)
+ signature = signer.sign(message)
+
+The code above also works for :class:`ES256Signer` and :class:`ES256Verifier`.
+Note that these two classes are only available if your `cryptography` dependency
+version is at least 1.4.0.
+"""
+
+import six
+
+from google.auth.crypt import base
+from google.auth.crypt import rsa
+
+try:
+ from google.auth.crypt import es256
+except ImportError: # pragma: NO COVER
+ es256 = None
+
+if es256 is not None: # pragma: NO COVER
+ __all__ = [
+ "ES256Signer",
+ "ES256Verifier",
+ "RSASigner",
+ "RSAVerifier",
+ "Signer",
+ "Verifier",
+ ]
+else: # pragma: NO COVER
+ __all__ = ["RSASigner", "RSAVerifier", "Signer", "Verifier"]
+
+
+# Aliases to maintain the v1.0.0 interface, as the crypt module was split
+# into submodules.
+Signer = base.Signer
+Verifier = base.Verifier
+RSASigner = rsa.RSASigner
+RSAVerifier = rsa.RSAVerifier
+
+if es256 is not None: # pragma: NO COVER
+ ES256Signer = es256.ES256Signer
+ ES256Verifier = es256.ES256Verifier
+
+
+def verify_signature(message, signature, certs, verifier_cls=rsa.RSAVerifier):
+ """Verify an RSA or ECDSA cryptographic signature.
+
+ Checks that the provided ``signature`` was generated from ``bytes`` using
+ the private key associated with the ``cert``.
+
+ Args:
+ message (Union[str, bytes]): The plaintext message.
+ signature (Union[str, bytes]): The cryptographic signature to check.
+ certs (Union[Sequence, str, bytes]): The certificate or certificates
+ to use to check the signature.
+ verifier_cls (Optional[~google.auth.crypt.base.Signer]): Which verifier
+ class to use for verification. This can be used to select different
+ algorithms, such as RSA or ECDSA. Default value is :class:`RSAVerifier`.
+
+ Returns:
+ bool: True if the signature is valid, otherwise False.
+ """
+ if isinstance(certs, (six.text_type, six.binary_type)):
+ certs = [certs]
+
+ for cert in certs:
+ verifier = verifier_cls.from_string(cert)
+ if verifier.verify(message, signature):
+ return True
+ return False
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..fcccc5d5c
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/_cryptography_rsa.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/_cryptography_rsa.cpython-36.pyc
new file mode 100644
index 000000000..ae9cf582a
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/_cryptography_rsa.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..9bae584e1
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/_python_rsa.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/_python_rsa.cpython-36.pyc
new file mode 100644
index 000000000..b1f2625c3
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/_python_rsa.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/base.cpython-36.pyc
new file mode 100644
index 000000000..e2c44b2a7
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/base.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/es256.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/es256.cpython-36.pyc
new file mode 100644
index 000000000..f50a95b08
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/es256.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/__pycache__/rsa.cpython-36.pyc b/venv/Lib/site-packages/google/auth/crypt/__pycache__/rsa.cpython-36.pyc
new file mode 100644
index 000000000..5371e9934
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/crypt/__pycache__/rsa.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/crypt/_cryptography_rsa.py b/venv/Lib/site-packages/google/auth/crypt/_cryptography_rsa.py
new file mode 100644
index 000000000..e94bc681e
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/crypt/_cryptography_rsa.py
@@ -0,0 +1,149 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""RSA verifier and signer that use the ``cryptography`` library.
+
+This is a much faster implementation than the default (in
+``google.auth.crypt._python_rsa``), which depends on the pure-Python
+``rsa`` library.
+"""
+
+import cryptography.exceptions
+from cryptography.hazmat import backends
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import padding
+import cryptography.x509
+import pkg_resources
+
+from google.auth import _helpers
+from google.auth.crypt import base
+
+_IMPORT_ERROR_MSG = (
+ "cryptography>=1.4.0 is required to use cryptography-based RSA " "implementation."
+)
+
+try: # pragma: NO COVER
+ release = pkg_resources.get_distribution("cryptography").parsed_version
+ if release < pkg_resources.parse_version("1.4.0"):
+ raise ImportError(_IMPORT_ERROR_MSG)
+except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ raise ImportError(_IMPORT_ERROR_MSG)
+
+
+_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
+_BACKEND = backends.default_backend()
+_PADDING = padding.PKCS1v15()
+_SHA256 = hashes.SHA256()
+
+
+class RSAVerifier(base.Verifier):
+ """Verifies RSA cryptographic signatures using public keys.
+
+ Args:
+ public_key (
+ cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey):
+ The public key used to verify signatures.
+ """
+
+ def __init__(self, public_key):
+ self._pubkey = public_key
+
+ @_helpers.copy_docstring(base.Verifier)
+ def verify(self, message, signature):
+ message = _helpers.to_bytes(message)
+ try:
+ self._pubkey.verify(signature, message, _PADDING, _SHA256)
+ return True
+ except (ValueError, cryptography.exceptions.InvalidSignature):
+ return False
+
+ @classmethod
+ def from_string(cls, public_key):
+ """Construct an Verifier instance from a public key or public
+ certificate string.
+
+ Args:
+ public_key (Union[str, bytes]): The public key in PEM format or the
+ x509 public key certificate.
+
+ Returns:
+ Verifier: The constructed verifier.
+
+ Raises:
+ ValueError: If the public key can't be parsed.
+ """
+ public_key_data = _helpers.to_bytes(public_key)
+
+ if _CERTIFICATE_MARKER in public_key_data:
+ cert = cryptography.x509.load_pem_x509_certificate(
+ public_key_data, _BACKEND
+ )
+ pubkey = cert.public_key()
+
+ else:
+ pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
+
+ return cls(pubkey)
+
+
+class RSASigner(base.Signer, base.FromServiceAccountMixin):
+ """Signs messages with an RSA private key.
+
+ Args:
+ private_key (
+ cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
+ The private key to sign with.
+ key_id (str): Optional key ID used to identify this private key. This
+ can be useful to associate the private key with its associated
+ public key or certificate.
+ """
+
+ def __init__(self, private_key, key_id=None):
+ self._key = private_key
+ self._key_id = key_id
+
+ @property
+ @_helpers.copy_docstring(base.Signer)
+ def key_id(self):
+ return self._key_id
+
+ @_helpers.copy_docstring(base.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ return self._key.sign(message, _PADDING, _SHA256)
+
+ @classmethod
+ def from_string(cls, key, key_id=None):
+ """Construct a RSASigner from a private key in PEM format.
+
+ Args:
+ key (Union[bytes, str]): Private key in PEM format.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt._cryptography_rsa.RSASigner: The
+ constructed signer.
+
+ Raises:
+ ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
+ UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
+ into a UTF-8 ``str``.
+ ValueError: If ``cryptography`` "Could not deserialize key data."
+ """
+ key = _helpers.to_bytes(key)
+ private_key = serialization.load_pem_private_key(
+ key, password=None, backend=_BACKEND
+ )
+ return cls(private_key, key_id=key_id)
diff --git a/venv/Lib/site-packages/google/auth/crypt/_helpers.py b/venv/Lib/site-packages/google/auth/crypt/_helpers.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/auth/crypt/_python_rsa.py b/venv/Lib/site-packages/google/auth/crypt/_python_rsa.py
new file mode 100644
index 000000000..e288c5016
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/crypt/_python_rsa.py
@@ -0,0 +1,173 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Pure-Python RSA cryptography implementation.
+
+Uses the ``rsa``, ``pyasn1`` and ``pyasn1_modules`` packages
+to parse PEM files storing PKCS#1 or PKCS#8 keys as well as
+certificates. There is no support for p12 files.
+"""
+
+from __future__ import absolute_import
+
+from pyasn1.codec.der import decoder
+from pyasn1_modules import pem
+from pyasn1_modules.rfc2459 import Certificate
+from pyasn1_modules.rfc5208 import PrivateKeyInfo
+import rsa
+import six
+
+from google.auth import _helpers
+from google.auth.crypt import base
+
+_POW2 = (128, 64, 32, 16, 8, 4, 2, 1)
+_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
+_PKCS1_MARKER = ("-----BEGIN RSA PRIVATE KEY-----", "-----END RSA PRIVATE KEY-----")
+_PKCS8_MARKER = ("-----BEGIN PRIVATE KEY-----", "-----END PRIVATE KEY-----")
+_PKCS8_SPEC = PrivateKeyInfo()
+
+
+def _bit_list_to_bytes(bit_list):
+ """Converts an iterable of 1s and 0s to bytes.
+
+ Combines the list 8 at a time, treating each group of 8 bits
+ as a single byte.
+
+ Args:
+ bit_list (Sequence): Sequence of 1s and 0s.
+
+ Returns:
+ bytes: The decoded bytes.
+ """
+ num_bits = len(bit_list)
+ byte_vals = bytearray()
+ for start in six.moves.xrange(0, num_bits, 8):
+ curr_bits = bit_list[start : start + 8]
+ char_val = sum(val * digit for val, digit in six.moves.zip(_POW2, curr_bits))
+ byte_vals.append(char_val)
+ return bytes(byte_vals)
+
+
+class RSAVerifier(base.Verifier):
+ """Verifies RSA cryptographic signatures using public keys.
+
+ Args:
+ public_key (rsa.key.PublicKey): The public key used to verify
+ signatures.
+ """
+
+ def __init__(self, public_key):
+ self._pubkey = public_key
+
+ @_helpers.copy_docstring(base.Verifier)
+ def verify(self, message, signature):
+ message = _helpers.to_bytes(message)
+ try:
+ return rsa.pkcs1.verify(message, signature, self._pubkey)
+ except (ValueError, rsa.pkcs1.VerificationError):
+ return False
+
+ @classmethod
+ def from_string(cls, public_key):
+ """Construct an Verifier instance from a public key or public
+ certificate string.
+
+ Args:
+ public_key (Union[str, bytes]): The public key in PEM format or the
+ x509 public key certificate.
+
+ Returns:
+ Verifier: The constructed verifier.
+
+ Raises:
+ ValueError: If the public_key can't be parsed.
+ """
+ public_key = _helpers.to_bytes(public_key)
+ is_x509_cert = _CERTIFICATE_MARKER in public_key
+
+ # If this is a certificate, extract the public key info.
+ if is_x509_cert:
+ der = rsa.pem.load_pem(public_key, "CERTIFICATE")
+ asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate())
+ if remaining != b"":
+ raise ValueError("Unused bytes", remaining)
+
+ cert_info = asn1_cert["tbsCertificate"]["subjectPublicKeyInfo"]
+ key_bytes = _bit_list_to_bytes(cert_info["subjectPublicKey"])
+ pubkey = rsa.PublicKey.load_pkcs1(key_bytes, "DER")
+ else:
+ pubkey = rsa.PublicKey.load_pkcs1(public_key, "PEM")
+ return cls(pubkey)
+
+
+class RSASigner(base.Signer, base.FromServiceAccountMixin):
+ """Signs messages with an RSA private key.
+
+ Args:
+ private_key (rsa.key.PrivateKey): The private key to sign with.
+ key_id (str): Optional key ID used to identify this private key. This
+ can be useful to associate the private key with its associated
+ public key or certificate.
+ """
+
+ def __init__(self, private_key, key_id=None):
+ self._key = private_key
+ self._key_id = key_id
+
+ @property
+ @_helpers.copy_docstring(base.Signer)
+ def key_id(self):
+ return self._key_id
+
+ @_helpers.copy_docstring(base.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ return rsa.pkcs1.sign(message, self._key, "SHA-256")
+
+ @classmethod
+ def from_string(cls, key, key_id=None):
+ """Construct an Signer instance from a private key in PEM format.
+
+ Args:
+ key (str): Private key in PEM format.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+
+ Raises:
+ ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in
+ PEM format.
+ """
+ key = _helpers.from_bytes(key) # PEM expects str in Python 3
+ marker_id, key_bytes = pem.readPemBlocksFromFile(
+ six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER
+ )
+
+ # Key is in pkcs1 format.
+ if marker_id == 0:
+ private_key = rsa.key.PrivateKey.load_pkcs1(key_bytes, format="DER")
+ # Key is in pkcs8.
+ elif marker_id == 1:
+ key_info, remaining = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
+ if remaining != b"":
+ raise ValueError("Unused bytes", remaining)
+ private_key_info = key_info.getComponentByName("privateKey")
+ private_key = rsa.key.PrivateKey.load_pkcs1(
+ private_key_info.asOctets(), format="DER"
+ )
+ else:
+ raise ValueError("No key could be detected.")
+
+ return cls(private_key, key_id=key_id)
diff --git a/venv/Lib/site-packages/google/auth/crypt/base.py b/venv/Lib/site-packages/google/auth/crypt/base.py
new file mode 100644
index 000000000..c98d5bf64
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/crypt/base.py
@@ -0,0 +1,131 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Base classes for cryptographic signers and verifiers."""
+
+import abc
+import io
+import json
+
+import six
+
+
+_JSON_FILE_PRIVATE_KEY = "private_key"
+_JSON_FILE_PRIVATE_KEY_ID = "private_key_id"
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Verifier(object):
+ """Abstract base class for crytographic signature verifiers."""
+
+ @abc.abstractmethod
+ def verify(self, message, signature):
+ """Verifies a message against a cryptographic signature.
+
+ Args:
+ message (Union[str, bytes]): The message to verify.
+ signature (Union[str, bytes]): The cryptography signature to check.
+
+ Returns:
+ bool: True if message was signed by the private key associated
+ with the public key that this object was constructed with.
+ """
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Verify must be implemented")
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Signer(object):
+ """Abstract base class for cryptographic signers."""
+
+ @abc.abstractproperty
+ def key_id(self):
+ """Optional[str]: The key ID used to identify this private key."""
+ raise NotImplementedError("Key id must be implemented")
+
+ @abc.abstractmethod
+ def sign(self, message):
+ """Signs a message.
+
+ Args:
+ message (Union[str, bytes]): The message to be signed.
+
+ Returns:
+ bytes: The signature of the message.
+ """
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Sign must be implemented")
+
+
+@six.add_metaclass(abc.ABCMeta)
+class FromServiceAccountMixin(object):
+ """Mix-in to enable factory constructors for a Signer."""
+
+ @abc.abstractmethod
+ def from_string(cls, key, key_id=None):
+ """Construct an Signer instance from a private key string.
+
+ Args:
+ key (str): Private key as a string.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+
+ Raises:
+ ValueError: If the key cannot be parsed.
+ """
+ raise NotImplementedError("from_string must be implemented")
+
+ @classmethod
+ def from_service_account_info(cls, info):
+ """Creates a Signer instance instance from a dictionary containing
+ service account info in Google format.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ if _JSON_FILE_PRIVATE_KEY not in info:
+ raise ValueError(
+ "The private_key field was not found in the service account " "info."
+ )
+
+ return cls.from_string(
+ info[_JSON_FILE_PRIVATE_KEY], info.get(_JSON_FILE_PRIVATE_KEY_ID)
+ )
+
+ @classmethod
+ def from_service_account_file(cls, filename):
+ """Creates a Signer instance from a service account .json file
+ in Google format.
+
+ Args:
+ filename (str): The path to the service account .json file.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+
+ return cls.from_service_account_info(data)
diff --git a/venv/Lib/site-packages/google/auth/crypt/es256.py b/venv/Lib/site-packages/google/auth/crypt/es256.py
new file mode 100644
index 000000000..6955efcc5
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/crypt/es256.py
@@ -0,0 +1,160 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""ECDSA (ES256) verifier and signer that use the ``cryptography`` library.
+"""
+
+from cryptography import utils
+import cryptography.exceptions
+from cryptography.hazmat import backends
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+from cryptography.hazmat.primitives.asymmetric import padding
+from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature
+from cryptography.hazmat.primitives.asymmetric.utils import encode_dss_signature
+import cryptography.x509
+import pkg_resources
+
+from google.auth import _helpers
+from google.auth.crypt import base
+
+_IMPORT_ERROR_MSG = (
+ "cryptography>=1.4.0 is required to use cryptography-based ECDSA " "algorithms"
+)
+
+try: # pragma: NO COVER
+ release = pkg_resources.get_distribution("cryptography").parsed_version
+ if release < pkg_resources.parse_version("1.4.0"):
+ raise ImportError(_IMPORT_ERROR_MSG)
+except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ raise ImportError(_IMPORT_ERROR_MSG)
+
+
+_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
+_BACKEND = backends.default_backend()
+_PADDING = padding.PKCS1v15()
+
+
+class ES256Verifier(base.Verifier):
+ """Verifies ECDSA cryptographic signatures using public keys.
+
+ Args:
+ public_key (
+ cryptography.hazmat.primitives.asymmetric.ec.ECDSAPublicKey):
+ The public key used to verify signatures.
+ """
+
+ def __init__(self, public_key):
+ self._pubkey = public_key
+
+ @_helpers.copy_docstring(base.Verifier)
+ def verify(self, message, signature):
+ # First convert (r||s) raw signature to ASN1 encoded signature.
+ sig_bytes = _helpers.to_bytes(signature)
+ if len(sig_bytes) != 64:
+ return False
+ r = utils.int_from_bytes(sig_bytes[:32], byteorder="big")
+ s = utils.int_from_bytes(sig_bytes[32:], byteorder="big")
+ asn1_sig = encode_dss_signature(r, s)
+
+ message = _helpers.to_bytes(message)
+ try:
+ self._pubkey.verify(asn1_sig, message, ec.ECDSA(hashes.SHA256()))
+ return True
+ except (ValueError, cryptography.exceptions.InvalidSignature):
+ return False
+
+ @classmethod
+ def from_string(cls, public_key):
+ """Construct an Verifier instance from a public key or public
+ certificate string.
+
+ Args:
+ public_key (Union[str, bytes]): The public key in PEM format or the
+ x509 public key certificate.
+
+ Returns:
+ Verifier: The constructed verifier.
+
+ Raises:
+ ValueError: If the public key can't be parsed.
+ """
+ public_key_data = _helpers.to_bytes(public_key)
+
+ if _CERTIFICATE_MARKER in public_key_data:
+ cert = cryptography.x509.load_pem_x509_certificate(
+ public_key_data, _BACKEND
+ )
+ pubkey = cert.public_key()
+
+ else:
+ pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
+
+ return cls(pubkey)
+
+
+class ES256Signer(base.Signer, base.FromServiceAccountMixin):
+ """Signs messages with an ECDSA private key.
+
+ Args:
+ private_key (
+ cryptography.hazmat.primitives.asymmetric.ec.ECDSAPrivateKey):
+ The private key to sign with.
+ key_id (str): Optional key ID used to identify this private key. This
+ can be useful to associate the private key with its associated
+ public key or certificate.
+ """
+
+ def __init__(self, private_key, key_id=None):
+ self._key = private_key
+ self._key_id = key_id
+
+ @property
+ @_helpers.copy_docstring(base.Signer)
+ def key_id(self):
+ return self._key_id
+
+ @_helpers.copy_docstring(base.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ asn1_signature = self._key.sign(message, ec.ECDSA(hashes.SHA256()))
+
+ # Convert ASN1 encoded signature to (r||s) raw signature.
+ (r, s) = decode_dss_signature(asn1_signature)
+ return utils.int_to_bytes(r, 32) + utils.int_to_bytes(s, 32)
+
+ @classmethod
+ def from_string(cls, key, key_id=None):
+ """Construct a RSASigner from a private key in PEM format.
+
+ Args:
+ key (Union[bytes, str]): Private key in PEM format.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt._cryptography_rsa.RSASigner: The
+ constructed signer.
+
+ Raises:
+ ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
+ UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
+ into a UTF-8 ``str``.
+ ValueError: If ``cryptography`` "Could not deserialize key data."
+ """
+ key = _helpers.to_bytes(key)
+ private_key = serialization.load_pem_private_key(
+ key, password=None, backend=_BACKEND
+ )
+ return cls(private_key, key_id=key_id)
diff --git a/venv/Lib/site-packages/google/auth/crypt/rsa.py b/venv/Lib/site-packages/google/auth/crypt/rsa.py
new file mode 100644
index 000000000..8b2d64c10
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/crypt/rsa.py
@@ -0,0 +1,30 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""RSA cryptography signer and verifier."""
+
+
+try:
+ # Prefer cryptograph-based RSA implementation.
+ from google.auth.crypt import _cryptography_rsa
+
+ RSASigner = _cryptography_rsa.RSASigner
+ RSAVerifier = _cryptography_rsa.RSAVerifier
+except ImportError: # pragma: NO COVER
+ # Fallback to pure-python RSA implementation if cryptography is
+ # unavailable.
+ from google.auth.crypt import _python_rsa
+
+ RSASigner = _python_rsa.RSASigner
+ RSAVerifier = _python_rsa.RSAVerifier
diff --git a/venv/Lib/site-packages/google/auth/environment_vars.py b/venv/Lib/site-packages/google/auth/environment_vars.py
new file mode 100644
index 000000000..46a892664
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/environment_vars.py
@@ -0,0 +1,61 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Environment variables used by :mod:`google.auth`."""
+
+
+PROJECT = "GOOGLE_CLOUD_PROJECT"
+"""Environment variable defining default project.
+
+This used by :func:`google.auth.default` to explicitly set a project ID. This
+environment variable is also used by the Google Cloud Python Library.
+"""
+
+LEGACY_PROJECT = "GCLOUD_PROJECT"
+"""Previously used environment variable defining the default project.
+
+This environment variable is used instead of the current one in some
+situations (such as Google App Engine).
+"""
+
+CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
+"""Environment variable defining the location of Google application default
+credentials."""
+
+# The environment variable name which can replace ~/.config if set.
+CLOUD_SDK_CONFIG_DIR = "CLOUDSDK_CONFIG"
+"""Environment variable defines the location of Google Cloud SDK's config
+files."""
+
+# These two variables allow for customization of the addresses used when
+# contacting the GCE metadata service.
+GCE_METADATA_HOST = "GCE_METADATA_HOST"
+GCE_METADATA_ROOT = "GCE_METADATA_ROOT"
+"""Environment variable providing an alternate hostname or host:port to be
+used for GCE metadata requests.
+
+This environment variable is originally named GCE_METADATA_ROOT. System will
+check the new variable first; should there be no value present,
+the system falls back to the old variable.
+"""
+
+GCE_METADATA_IP = "GCE_METADATA_IP"
+"""Environment variable providing an alternate ip:port to be used for ip-only
+GCE metadata requests."""
+
+GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
+"""Environment variable controlling whether to use client certificate or not.
+
+The default value is false. Users have to explicitly set this value to true
+in order to use client certificate to establish a mutual TLS channel."""
diff --git a/venv/Lib/site-packages/google/auth/exceptions.py b/venv/Lib/site-packages/google/auth/exceptions.py
new file mode 100644
index 000000000..da06d8696
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/exceptions.py
@@ -0,0 +1,45 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Exceptions used in the google.auth package."""
+
+
+class GoogleAuthError(Exception):
+ """Base class for all google.auth errors."""
+
+
+class TransportError(GoogleAuthError):
+ """Used to indicate an error occurred during an HTTP request."""
+
+
+class RefreshError(GoogleAuthError):
+ """Used to indicate that an refreshing the credentials' access token
+ failed."""
+
+
+class UserAccessTokenError(GoogleAuthError):
+ """Used to indicate ``gcloud auth print-access-token`` command failed."""
+
+
+class DefaultCredentialsError(GoogleAuthError):
+ """Used to indicate that acquiring default credentials failed."""
+
+
+class MutualTLSChannelError(GoogleAuthError):
+ """Used to indicate that mutual TLS channel creation is failed, or mutual
+ TLS channel credentials is missing or invalid."""
+
+
+class ClientCertError(GoogleAuthError):
+ """Used to indicate that client certificate is missing or invalid."""
diff --git a/venv/Lib/site-packages/google/auth/iam.py b/venv/Lib/site-packages/google/auth/iam.py
new file mode 100644
index 000000000..5e88a0435
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/iam.py
@@ -0,0 +1,100 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tools for using the Google `Cloud Identity and Access Management (IAM)
+API`_'s auth-related functionality.
+
+.. _Cloud Identity and Access Management (IAM) API:
+ https://cloud.google.com/iam/docs/
+"""
+
+import base64
+import json
+
+from six.moves import http_client
+
+from google.auth import _helpers
+from google.auth import crypt
+from google.auth import exceptions
+
+_IAM_API_ROOT_URI = "https://iamcredentials.googleapis.com/v1"
+_SIGN_BLOB_URI = _IAM_API_ROOT_URI + "/projects/-/serviceAccounts/{}:signBlob?alt=json"
+
+
+class Signer(crypt.Signer):
+ """Signs messages using the IAM `signBlob API`_.
+
+ This is useful when you need to sign bytes but do not have access to the
+ credential's private key file.
+
+ .. _signBlob API:
+ https://cloud.google.com/iam/reference/rest/v1/projects.serviceAccounts
+ /signBlob
+ """
+
+ def __init__(self, request, credentials, service_account_email):
+ """
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ credentials (google.auth.credentials.Credentials): The credentials
+ that will be used to authenticate the request to the IAM API.
+ The credentials must have of one the following scopes:
+
+ - https://www.googleapis.com/auth/iam
+ - https://www.googleapis.com/auth/cloud-platform
+ service_account_email (str): The service account email identifying
+ which service account to use to sign bytes. Often, this can
+ be the same as the service account email in the given
+ credentials.
+ """
+ self._request = request
+ self._credentials = credentials
+ self._service_account_email = service_account_email
+
+ def _make_signing_request(self, message):
+ """Makes a request to the API signBlob API."""
+ message = _helpers.to_bytes(message)
+
+ method = "POST"
+ url = _SIGN_BLOB_URI.format(self._service_account_email)
+ headers = {"Content-Type": "application/json"}
+ body = json.dumps(
+ {"payload": base64.b64encode(message).decode("utf-8")}
+ ).encode("utf-8")
+
+ self._credentials.before_request(self._request, method, url, headers)
+ response = self._request(url=url, method=method, body=body, headers=headers)
+
+ if response.status != http_client.OK:
+ raise exceptions.TransportError(
+ "Error calling the IAM signBytes API: {}".format(response.data)
+ )
+
+ return json.loads(response.data.decode("utf-8"))
+
+ @property
+ def key_id(self):
+ """Optional[str]: The key ID used to identify this private key.
+
+ .. warning::
+ This is always ``None``. The key ID used by IAM can not
+ be reliably determined ahead of time.
+ """
+ return None
+
+ @_helpers.copy_docstring(crypt.Signer)
+ def sign(self, message):
+ response = self._make_signing_request(message)
+ return base64.b64decode(response["signedBlob"])
diff --git a/venv/Lib/site-packages/google/auth/impersonated_credentials.py b/venv/Lib/site-packages/google/auth/impersonated_credentials.py
new file mode 100644
index 000000000..d2c5ded1c
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/impersonated_credentials.py
@@ -0,0 +1,398 @@
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Cloud Impersonated credentials.
+
+This module provides authentication for applications where local credentials
+impersonates a remote service account using `IAM Credentials API`_.
+
+This class can be used to impersonate a service account as long as the original
+Credential object has the "Service Account Token Creator" role on the target
+service account.
+
+ .. _IAM Credentials API:
+ https://cloud.google.com/iam/credentials/reference/rest/
+"""
+
+import base64
+import copy
+from datetime import datetime
+import json
+
+import six
+from six.moves import http_client
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import jwt
+from google.auth.transport.requests import AuthorizedSession
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+
+_IAM_SCOPE = ["https://www.googleapis.com/auth/iam"]
+
+_IAM_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/projects/-"
+ + "/serviceAccounts/{}:generateAccessToken"
+)
+
+_IAM_SIGN_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/projects/-"
+ + "/serviceAccounts/{}:signBlob"
+)
+
+_IAM_IDTOKEN_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/"
+ + "projects/-/serviceAccounts/{}:generateIdToken"
+)
+
+_REFRESH_ERROR = "Unable to acquire impersonated credentials"
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+
+_DEFAULT_TOKEN_URI = "https://oauth2.googleapis.com/token"
+
+
+def _make_iam_token_request(request, principal, headers, body):
+ """Makes a request to the Google Cloud IAM service for an access token.
+ Args:
+ request (Request): The Request object to use.
+ principal (str): The principal to request an access token for.
+ headers (Mapping[str, str]): Map of headers to transmit.
+ body (Mapping[str, str]): JSON Payload body for the iamcredentials
+ API call.
+
+ Raises:
+ google.auth.exceptions.TransportError: Raised if there is an underlying
+ HTTP connection error
+ google.auth.exceptions.RefreshError: Raised if the impersonated
+ credentials are not available. Common reasons are
+ `iamcredentials.googleapis.com` is not enabled or the
+ `Service Account Token Creator` is not assigned
+ """
+ iam_endpoint = _IAM_ENDPOINT.format(principal)
+
+ body = json.dumps(body).encode("utf-8")
+
+ response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
+
+ # support both string and bytes type response.data
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ if response.status != http_client.OK:
+ exceptions.RefreshError(_REFRESH_ERROR, response_body)
+
+ try:
+ token_response = json.loads(response_body)
+ token = token_response["accessToken"]
+ expiry = datetime.strptime(token_response["expireTime"], "%Y-%m-%dT%H:%M:%SZ")
+
+ return token, expiry
+
+ except (KeyError, ValueError) as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "{}: No access token or invalid expiration in response.".format(
+ _REFRESH_ERROR
+ ),
+ response_body,
+ )
+ six.raise_from(new_exc, caught_exc)
+
+
+class Credentials(credentials.CredentialsWithQuotaProject, credentials.Signing):
+ """This module defines impersonated credentials which are essentially
+ impersonated identities.
+
+ Impersonated Credentials allows credentials issued to a user or
+ service account to impersonate another. The target service account must
+ grant the originating credential principal the
+ `Service Account Token Creator`_ IAM role:
+
+ For more information about Token Creator IAM role and
+ IAMCredentials API, see
+ `Creating Short-Lived Service Account Credentials`_.
+
+ .. _Service Account Token Creator:
+ https://cloud.google.com/iam/docs/service-accounts#the_service_account_token_creator_role
+
+ .. _Creating Short-Lived Service Account Credentials:
+ https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials
+
+ Usage:
+
+ First grant source_credentials the `Service Account Token Creator`
+ role on the target account to impersonate. In this example, the
+ service account represented by svc_account.json has the
+ token creator role on
+ `impersonated-account@_project_.iam.gserviceaccount.com`.
+
+ Enable the IAMCredentials API on the source project:
+ `gcloud services enable iamcredentials.googleapis.com`.
+
+ Initialize a source credential which does not have access to
+ list bucket::
+
+ from google.oauth2 import service_acccount
+
+ target_scopes = [
+ 'https://www.googleapis.com/auth/devstorage.read_only']
+
+ source_credentials = (
+ service_account.Credentials.from_service_account_file(
+ '/path/to/svc_account.json',
+ scopes=target_scopes))
+
+ Now use the source credentials to acquire credentials to impersonate
+ another service account::
+
+ from google.auth import impersonated_credentials
+
+ target_credentials = impersonated_credentials.Credentials(
+ source_credentials=source_credentials,
+ target_principal='impersonated-account@_project_.iam.gserviceaccount.com',
+ target_scopes = target_scopes,
+ lifetime=500)
+
+ Resource access is granted::
+
+ client = storage.Client(credentials=target_credentials)
+ buckets = client.list_buckets(project='your_project')
+ for bucket in buckets:
+ print(bucket.name)
+ """
+
+ def __init__(
+ self,
+ source_credentials,
+ target_principal,
+ target_scopes,
+ delegates=None,
+ lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ source_credentials (google.auth.Credentials): The source credential
+ used as to acquire the impersonated credentials.
+ target_principal (str): The service account to impersonate.
+ target_scopes (Sequence[str]): Scopes to request during the
+ authorization grant.
+ delegates (Sequence[str]): The chained list of delegates required
+ to grant the final access_token. If set, the sequence of
+ identities must have "Service Account Token Creator" capability
+ granted to the prceeding identity. For example, if set to
+ [serviceAccountB, serviceAccountC], the source_credential
+ must have the Token Creator role on serviceAccountB.
+ serviceAccountB must have the Token Creator on
+ serviceAccountC.
+ Finally, C must have Token Creator on target_principal.
+ If left unset, source_credential must have that role on
+ target_principal.
+ lifetime (int): Number of seconds the delegated credential should
+ be valid for (upto 3600).
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
+ This project may be different from the project used to
+ create the credentials.
+ """
+
+ super(Credentials, self).__init__()
+
+ self._source_credentials = copy.copy(source_credentials)
+ # Service account source credentials must have the _IAM_SCOPE
+ # added to refresh correctly. User credentials cannot have
+ # their original scopes modified.
+ if isinstance(self._source_credentials, credentials.Scoped):
+ self._source_credentials = self._source_credentials.with_scopes(_IAM_SCOPE)
+ self._target_principal = target_principal
+ self._target_scopes = target_scopes
+ self._delegates = delegates
+ self._lifetime = lifetime
+ self.token = None
+ self.expiry = _helpers.utcnow()
+ self._quota_project_id = quota_project_id
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ self._update_token(request)
+
+ def _update_token(self, request):
+ """Updates credentials with a new access_token representing
+ the impersonated account.
+
+ Args:
+ request (google.auth.transport.requests.Request): Request object
+ to use for refreshing credentials.
+ """
+
+ # Refresh our source credentials if it is not valid.
+ if not self._source_credentials.valid:
+ self._source_credentials.refresh(request)
+
+ body = {
+ "delegates": self._delegates,
+ "scope": self._target_scopes,
+ "lifetime": str(self._lifetime) + "s",
+ }
+
+ headers = {"Content-Type": "application/json"}
+
+ # Apply the source credentials authentication info.
+ self._source_credentials.apply(headers)
+
+ self.token, self.expiry = _make_iam_token_request(
+ request=request,
+ principal=self._target_principal,
+ headers=headers,
+ body=body,
+ )
+
+ def sign_bytes(self, message):
+
+ iam_sign_endpoint = _IAM_SIGN_ENDPOINT.format(self._target_principal)
+
+ body = {
+ "payload": base64.b64encode(message).decode("utf-8"),
+ "delegates": self._delegates,
+ }
+
+ headers = {"Content-Type": "application/json"}
+
+ authed_session = AuthorizedSession(self._source_credentials)
+
+ response = authed_session.post(
+ url=iam_sign_endpoint, headers=headers, json=body
+ )
+
+ return base64.b64decode(response.json()["signedBlob"])
+
+ @property
+ def signer_email(self):
+ return self._target_principal
+
+ @property
+ def service_account_email(self):
+ return self._target_principal
+
+ @property
+ def signer(self):
+ return self
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ self._source_credentials,
+ target_principal=self._target_principal,
+ target_scopes=self._target_scopes,
+ delegates=self._delegates,
+ lifetime=self._lifetime,
+ quota_project_id=quota_project_id,
+ )
+
+
+class IDTokenCredentials(credentials.CredentialsWithQuotaProject):
+ """Open ID Connect ID Token-based service account credentials.
+
+ """
+
+ def __init__(
+ self,
+ target_credentials,
+ target_audience=None,
+ include_email=False,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ target_credentials (google.auth.Credentials): The target
+ credential used as to acquire the id tokens for.
+ target_audience (string): Audience to issue the token for.
+ include_email (bool): Include email in IdToken
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ """
+ super(IDTokenCredentials, self).__init__()
+
+ if not isinstance(target_credentials, Credentials):
+ raise exceptions.GoogleAuthError(
+ "Provided Credential must be " "impersonated_credentials"
+ )
+ self._target_credentials = target_credentials
+ self._target_audience = target_audience
+ self._include_email = include_email
+ self._quota_project_id = quota_project_id
+
+ def from_credentials(self, target_credentials, target_audience=None):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=target_audience,
+ quota_project_id=self._quota_project_id,
+ )
+
+ def with_target_audience(self, target_audience):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=target_audience,
+ quota_project_id=self._quota_project_id,
+ )
+
+ def with_include_email(self, include_email):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=self._target_audience,
+ include_email=include_email,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=self._target_audience,
+ include_email=self._include_email,
+ quota_project_id=quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+
+ iam_sign_endpoint = _IAM_IDTOKEN_ENDPOINT.format(
+ self._target_credentials.signer_email
+ )
+
+ body = {
+ "audience": self._target_audience,
+ "delegates": self._target_credentials._delegates,
+ "includeEmail": self._include_email,
+ }
+
+ headers = {"Content-Type": "application/json"}
+
+ authed_session = AuthorizedSession(
+ self._target_credentials._source_credentials, auth_request=request
+ )
+
+ response = authed_session.post(
+ url=iam_sign_endpoint,
+ headers=headers,
+ data=json.dumps(body).encode("utf-8"),
+ )
+
+ id_token = response.json()["token"]
+ self.token = id_token
+ self.expiry = datetime.fromtimestamp(jwt.decode(id_token, verify=False)["exp"])
diff --git a/venv/Lib/site-packages/google/auth/jwt.py b/venv/Lib/site-packages/google/auth/jwt.py
new file mode 100644
index 000000000..a4f04f529
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/jwt.py
@@ -0,0 +1,844 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""JSON Web Tokens
+
+Provides support for creating (encoding) and verifying (decoding) JWTs,
+especially JWTs generated and consumed by Google infrastructure.
+
+See `rfc7519`_ for more details on JWTs.
+
+To encode a JWT use :func:`encode`::
+
+ from google.auth import crypt
+ from google.auth import jwt
+
+ signer = crypt.Signer(private_key)
+ payload = {'some': 'payload'}
+ encoded = jwt.encode(signer, payload)
+
+To decode a JWT and verify claims use :func:`decode`::
+
+ claims = jwt.decode(encoded, certs=public_certs)
+
+You can also skip verification::
+
+ claims = jwt.decode(encoded, verify=False)
+
+.. _rfc7519: https://tools.ietf.org/html/rfc7519
+
+"""
+
+try:
+ from collections.abc import Mapping
+# Python 2.7 compatibility
+except ImportError: # pragma: NO COVER
+ from collections import Mapping
+import copy
+import datetime
+import json
+
+import cachetools
+import six
+from six.moves import urllib
+
+from google.auth import _helpers
+from google.auth import _service_account_info
+from google.auth import crypt
+from google.auth import exceptions
+import google.auth.credentials
+
+try:
+ from google.auth.crypt import es256
+except ImportError: # pragma: NO COVER
+ es256 = None
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+_DEFAULT_MAX_CACHE_SIZE = 10
+_ALGORITHM_TO_VERIFIER_CLASS = {"RS256": crypt.RSAVerifier}
+_CRYPTOGRAPHY_BASED_ALGORITHMS = frozenset(["ES256"])
+
+if es256 is not None: # pragma: NO COVER
+ _ALGORITHM_TO_VERIFIER_CLASS["ES256"] = es256.ES256Verifier
+
+
+def encode(signer, payload, header=None, key_id=None):
+ """Make a signed JWT.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign the JWT.
+ payload (Mapping[str, str]): The JWT payload.
+ header (Mapping[str, str]): Additional JWT header payload.
+ key_id (str): The key id to add to the JWT header. If the
+ signer has a key id it will be used as the default. If this is
+ specified it will override the signer's key id.
+
+ Returns:
+ bytes: The encoded JWT.
+ """
+ if header is None:
+ header = {}
+
+ if key_id is None:
+ key_id = signer.key_id
+
+ header.update({"typ": "JWT"})
+
+ if es256 is not None and isinstance(signer, es256.ES256Signer):
+ header.update({"alg": "ES256"})
+ else:
+ header.update({"alg": "RS256"})
+
+ if key_id is not None:
+ header["kid"] = key_id
+
+ segments = [
+ _helpers.unpadded_urlsafe_b64encode(json.dumps(header).encode("utf-8")),
+ _helpers.unpadded_urlsafe_b64encode(json.dumps(payload).encode("utf-8")),
+ ]
+
+ signing_input = b".".join(segments)
+ signature = signer.sign(signing_input)
+ segments.append(_helpers.unpadded_urlsafe_b64encode(signature))
+
+ return b".".join(segments)
+
+
+def _decode_jwt_segment(encoded_section):
+ """Decodes a single JWT segment."""
+ section_bytes = _helpers.padded_urlsafe_b64decode(encoded_section)
+ try:
+ return json.loads(section_bytes.decode("utf-8"))
+ except ValueError as caught_exc:
+ new_exc = ValueError("Can't parse segment: {0}".format(section_bytes))
+ six.raise_from(new_exc, caught_exc)
+
+
+def _unverified_decode(token):
+ """Decodes a token and does no verification.
+
+ Args:
+ token (Union[str, bytes]): The encoded JWT.
+
+ Returns:
+ Tuple[str, str, str, str]: header, payload, signed_section, and
+ signature.
+
+ Raises:
+ ValueError: if there are an incorrect amount of segments in the token.
+ """
+ token = _helpers.to_bytes(token)
+
+ if token.count(b".") != 2:
+ raise ValueError("Wrong number of segments in token: {0}".format(token))
+
+ encoded_header, encoded_payload, signature = token.split(b".")
+ signed_section = encoded_header + b"." + encoded_payload
+ signature = _helpers.padded_urlsafe_b64decode(signature)
+
+ # Parse segments
+ header = _decode_jwt_segment(encoded_header)
+ payload = _decode_jwt_segment(encoded_payload)
+
+ return header, payload, signed_section, signature
+
+
+def decode_header(token):
+ """Return the decoded header of a token.
+
+ No verification is done. This is useful to extract the key id from
+ the header in order to acquire the appropriate certificate to verify
+ the token.
+
+ Args:
+ token (Union[str, bytes]): the encoded JWT.
+
+ Returns:
+ Mapping: The decoded JWT header.
+ """
+ header, _, _, _ = _unverified_decode(token)
+ return header
+
+
+def _verify_iat_and_exp(payload):
+ """Verifies the ``iat`` (Issued At) and ``exp`` (Expires) claims in a token
+ payload.
+
+ Args:
+ payload (Mapping[str, str]): The JWT payload.
+
+ Raises:
+ ValueError: if any checks failed.
+ """
+ now = _helpers.datetime_to_secs(_helpers.utcnow())
+
+ # Make sure the iat and exp claims are present.
+ for key in ("iat", "exp"):
+ if key not in payload:
+ raise ValueError("Token does not contain required claim {}".format(key))
+
+ # Make sure the token wasn't issued in the future.
+ iat = payload["iat"]
+ # Err on the side of accepting a token that is slightly early to account
+ # for clock skew.
+ earliest = iat - _helpers.CLOCK_SKEW_SECS
+ if now < earliest:
+ raise ValueError("Token used too early, {} < {}".format(now, iat))
+
+ # Make sure the token wasn't issued in the past.
+ exp = payload["exp"]
+ # Err on the side of accepting a token that is slightly out of date
+ # to account for clow skew.
+ latest = exp + _helpers.CLOCK_SKEW_SECS
+ if latest < now:
+ raise ValueError("Token expired, {} < {}".format(latest, now))
+
+
+def decode(token, certs=None, verify=True, audience=None):
+ """Decode and verify a JWT.
+
+ Args:
+ token (str): The encoded JWT.
+ certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
+ certificate used to validate the JWT signature. If bytes or string,
+ it must the the public key certificate in PEM format. If a mapping,
+ it must be a mapping of key IDs to public key certificates in PEM
+ format. The mapping must contain the same key ID that's specified
+ in the token's header.
+ verify (bool): Whether to perform signature and claim validation.
+ Verification is done by default.
+ audience (str): The audience claim, 'aud', that this JWT should
+ contain. If None then the JWT's 'aud' parameter is not verified.
+
+ Returns:
+ Mapping[str, str]: The deserialized JSON payload in the JWT.
+
+ Raises:
+ ValueError: if any verification checks failed.
+ """
+ header, payload, signed_section, signature = _unverified_decode(token)
+
+ if not verify:
+ return payload
+
+ # Pluck the key id and algorithm from the header and make sure we have
+ # a verifier that can support it.
+ key_alg = header.get("alg")
+ key_id = header.get("kid")
+
+ try:
+ verifier_cls = _ALGORITHM_TO_VERIFIER_CLASS[key_alg]
+ except KeyError as exc:
+ if key_alg in _CRYPTOGRAPHY_BASED_ALGORITHMS:
+ six.raise_from(
+ ValueError(
+ "The key algorithm {} requires the cryptography package "
+ "to be installed.".format(key_alg)
+ ),
+ exc,
+ )
+ else:
+ six.raise_from(
+ ValueError("Unsupported signature algorithm {}".format(key_alg)), exc
+ )
+
+ # If certs is specified as a dictionary of key IDs to certificates, then
+ # use the certificate identified by the key ID in the token header.
+ if isinstance(certs, Mapping):
+ if key_id:
+ if key_id not in certs:
+ raise ValueError("Certificate for key id {} not found.".format(key_id))
+ certs_to_check = [certs[key_id]]
+ # If there's no key id in the header, check against all of the certs.
+ else:
+ certs_to_check = certs.values()
+ else:
+ certs_to_check = certs
+
+ # Verify that the signature matches the message.
+ if not crypt.verify_signature(
+ signed_section, signature, certs_to_check, verifier_cls
+ ):
+ raise ValueError("Could not verify token signature.")
+
+ # Verify the issued at and created times in the payload.
+ _verify_iat_and_exp(payload)
+
+ # Check audience.
+ if audience is not None:
+ claim_audience = payload.get("aud")
+ if audience != claim_audience:
+ raise ValueError(
+ "Token has wrong audience {}, expected {}".format(
+ claim_audience, audience
+ )
+ )
+
+ return payload
+
+
+class Credentials(
+ google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
+):
+ """Credentials that use a JWT as the bearer token.
+
+ These credentials require an "audience" claim. This claim identifies the
+ intended recipient of the bearer token.
+
+ The constructor arguments determine the claims for the JWT that is
+ sent with requests. Usually, you'll construct these credentials with
+ one of the helper constructors as shown in the next section.
+
+ To create JWT credentials using a Google service account private key
+ JSON file::
+
+ audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
+ credentials = jwt.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience)
+
+ If you already have the service account file loaded and parsed::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = jwt.Credentials.from_service_account_info(
+ service_account_info,
+ audience=audience)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify the JWT claims::
+
+ credentials = jwt.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience,
+ additional_claims={'meta': 'data'})
+
+ You can also construct the credentials directly if you have a
+ :class:`~google.auth.crypt.Signer` instance::
+
+ credentials = jwt.Credentials(
+ signer,
+ issuer='your-issuer',
+ subject='your-subject',
+ audience=audience)
+
+ The claims are considered immutable. If you want to modify the claims,
+ you can easily create another instance using :meth:`with_claims`::
+
+ new_audience = (
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
+ new_credentials = credentials.with_claims(audience=new_audience)
+ """
+
+ def __init__(
+ self,
+ signer,
+ issuer,
+ subject,
+ audience,
+ additional_claims=None,
+ token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ issuer (str): The `iss` claim.
+ subject (str): The `sub` claim.
+ audience (str): the `aud` claim. The intended audience for the
+ credentials.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload.
+ token_lifetime (int): The amount of time in seconds for
+ which the token is valid. Defaults to 1 hour.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+ """
+ super(Credentials, self).__init__()
+ self._signer = signer
+ self._issuer = issuer
+ self._subject = subject
+ self._audience = audience
+ self._token_lifetime = token_lifetime
+ self._quota_project_id = quota_project_id
+
+ if additional_claims is None:
+ additional_claims = {}
+
+ self._additional_claims = additional_claims
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates a Credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ kwargs.setdefault("subject", info["client_email"])
+ kwargs.setdefault("issuer", info["client_email"])
+ return cls(signer, **kwargs)
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates an Credentials instance from a dictionary.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(info, require=["client_email"])
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates a Credentials instance from a service account .json file
+ in Google format.
+
+ Args:
+ filename (str): The path to the service account .json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_signing_credentials(cls, credentials, audience, **kwargs):
+ """Creates a new :class:`google.auth.jwt.Credentials` instance from an
+ existing :class:`google.auth.credentials.Signing` instance.
+
+ The new instance will use the same signer as the existing instance and
+ will use the existing instance's signer email as the issuer and
+ subject by default.
+
+ Example::
+
+ svc_creds = service_account.Credentials.from_service_account_file(
+ 'service_account.json')
+ audience = (
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher')
+ jwt_creds = jwt.Credentials.from_signing_credentials(
+ svc_creds, audience=audience)
+
+ Args:
+ credentials (google.auth.credentials.Signing): The credentials to
+ use to construct the new credentials.
+ audience (str): the `aud` claim. The intended audience for the
+ credentials.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: A new Credentials instance.
+ """
+ kwargs.setdefault("issuer", credentials.signer_email)
+ kwargs.setdefault("subject", credentials.signer_email)
+ return cls(credentials.signer, audience=audience, **kwargs)
+
+ def with_claims(
+ self, issuer=None, subject=None, audience=None, additional_claims=None
+ ):
+ """Returns a copy of these credentials with modified claims.
+
+ Args:
+ issuer (str): The `iss` claim. If unspecified the current issuer
+ claim will be used.
+ subject (str): The `sub` claim. If unspecified the current subject
+ claim will be used.
+ audience (str): the `aud` claim. If unspecified the current
+ audience claim will be used.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload. This will be merged with the current
+ additional claims.
+
+ Returns:
+ google.auth.jwt.Credentials: A new credentials instance.
+ """
+ new_additional_claims = copy.deepcopy(self._additional_claims)
+ new_additional_claims.update(additional_claims or {})
+
+ return self.__class__(
+ self._signer,
+ issuer=issuer if issuer is not None else self._issuer,
+ subject=subject if subject is not None else self._subject,
+ audience=audience if audience is not None else self._audience,
+ additional_claims=new_additional_claims,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ self._signer,
+ issuer=self._issuer,
+ subject=self._subject,
+ audience=self._audience,
+ additional_claims=self._additional_claims,
+ quota_project_id=quota_project_id,
+ )
+
+ def _make_jwt(self):
+ """Make a signed JWT.
+
+ Returns:
+ Tuple[bytes, datetime]: The encoded JWT and the expiration.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=self._token_lifetime)
+ expiry = now + lifetime
+
+ payload = {
+ "iss": self._issuer,
+ "sub": self._subject,
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ "aud": self._audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ jwt = encode(self._signer, payload)
+
+ return jwt, expiry
+
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (Any): Unused.
+ """
+ # pylint: disable=unused-argument
+ # (pylint doesn't correctly recognize overridden methods.)
+ self.token, self.expiry = self._make_jwt()
+
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer_email(self):
+ return self._issuer
+
+ @property
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer(self):
+ return self._signer
+
+
+class OnDemandCredentials(
+ google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
+):
+ """On-demand JWT credentials.
+
+ Like :class:`Credentials`, this class uses a JWT as the bearer token for
+ authentication. However, this class does not require the audience at
+ construction time. Instead, it will generate a new token on-demand for
+ each request using the request URI as the audience. It caches tokens
+ so that multiple requests to the same URI do not incur the overhead
+ of generating a new token every time.
+
+ This behavior is especially useful for `gRPC`_ clients. A gRPC service may
+ have multiple audience and gRPC clients may not know all of the audiences
+ required for accessing a particular service. With these credentials,
+ no knowledge of the audiences is required ahead of time.
+
+ .. _grpc: http://www.grpc.io/
+ """
+
+ def __init__(
+ self,
+ signer,
+ issuer,
+ subject,
+ additional_claims=None,
+ token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
+ max_cache_size=_DEFAULT_MAX_CACHE_SIZE,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ issuer (str): The `iss` claim.
+ subject (str): The `sub` claim.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload.
+ token_lifetime (int): The amount of time in seconds for
+ which the token is valid. Defaults to 1 hour.
+ max_cache_size (int): The maximum number of JWT tokens to keep in
+ cache. Tokens are cached using :class:`cachetools.LRUCache`.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+
+ """
+ super(OnDemandCredentials, self).__init__()
+ self._signer = signer
+ self._issuer = issuer
+ self._subject = subject
+ self._token_lifetime = token_lifetime
+ self._quota_project_id = quota_project_id
+
+ if additional_claims is None:
+ additional_claims = {}
+
+ self._additional_claims = additional_claims
+ self._cache = cachetools.LRUCache(maxsize=max_cache_size)
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates an OnDemandCredentials instance from a signer and service
+ account info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ kwargs.setdefault("subject", info["client_email"])
+ kwargs.setdefault("issuer", info["client_email"])
+ return cls(signer, **kwargs)
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates an OnDemandCredentials instance from a dictionary.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(info, require=["client_email"])
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates an OnDemandCredentials instance from a service account .json
+ file in Google format.
+
+ Args:
+ filename (str): The path to the service account .json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_signing_credentials(cls, credentials, **kwargs):
+ """Creates a new :class:`google.auth.jwt.OnDemandCredentials` instance
+ from an existing :class:`google.auth.credentials.Signing` instance.
+
+ The new instance will use the same signer as the existing instance and
+ will use the existing instance's signer email as the issuer and
+ subject by default.
+
+ Example::
+
+ svc_creds = service_account.Credentials.from_service_account_file(
+ 'service_account.json')
+ jwt_creds = jwt.OnDemandCredentials.from_signing_credentials(
+ svc_creds)
+
+ Args:
+ credentials (google.auth.credentials.Signing): The credentials to
+ use to construct the new credentials.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: A new Credentials instance.
+ """
+ kwargs.setdefault("issuer", credentials.signer_email)
+ kwargs.setdefault("subject", credentials.signer_email)
+ return cls(credentials.signer, **kwargs)
+
+ def with_claims(self, issuer=None, subject=None, additional_claims=None):
+ """Returns a copy of these credentials with modified claims.
+
+ Args:
+ issuer (str): The `iss` claim. If unspecified the current issuer
+ claim will be used.
+ subject (str): The `sub` claim. If unspecified the current subject
+ claim will be used.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload. This will be merged with the current
+ additional claims.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: A new credentials instance.
+ """
+ new_additional_claims = copy.deepcopy(self._additional_claims)
+ new_additional_claims.update(additional_claims or {})
+
+ return self.__class__(
+ self._signer,
+ issuer=issuer if issuer is not None else self._issuer,
+ subject=subject if subject is not None else self._subject,
+ additional_claims=new_additional_claims,
+ max_cache_size=self._cache.maxsize,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ return self.__class__(
+ self._signer,
+ issuer=self._issuer,
+ subject=self._subject,
+ additional_claims=self._additional_claims,
+ max_cache_size=self._cache.maxsize,
+ quota_project_id=quota_project_id,
+ )
+
+ @property
+ def valid(self):
+ """Checks the validity of the credentials.
+
+ These credentials are always valid because it generates tokens on
+ demand.
+ """
+ return True
+
+ def _make_jwt_for_audience(self, audience):
+ """Make a new JWT for the given audience.
+
+ Args:
+ audience (str): The intended audience.
+
+ Returns:
+ Tuple[bytes, datetime]: The encoded JWT and the expiration.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=self._token_lifetime)
+ expiry = now + lifetime
+
+ payload = {
+ "iss": self._issuer,
+ "sub": self._subject,
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ "aud": audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ jwt = encode(self._signer, payload)
+
+ return jwt, expiry
+
+ def _get_jwt_for_audience(self, audience):
+ """Get a JWT For a given audience.
+
+ If there is already an existing, non-expired token in the cache for
+ the audience, that token is used. Otherwise, a new token will be
+ created.
+
+ Args:
+ audience (str): The intended audience.
+
+ Returns:
+ bytes: The encoded JWT.
+ """
+ token, expiry = self._cache.get(audience, (None, None))
+
+ if token is None or expiry < _helpers.utcnow():
+ token, expiry = self._make_jwt_for_audience(audience)
+ self._cache[audience] = token, expiry
+
+ return token
+
+ def refresh(self, request):
+ """Raises an exception, these credentials can not be directly
+ refreshed.
+
+ Args:
+ request (Any): Unused.
+
+ Raises:
+ google.auth.RefreshError
+ """
+ # pylint: disable=unused-argument
+ # (pylint doesn't correctly recognize overridden methods.)
+ raise exceptions.RefreshError(
+ "OnDemandCredentials can not be directly refreshed."
+ )
+
+ def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+
+ Args:
+ request (Any): Unused. JWT credentials do not need to make an
+ HTTP request to refresh.
+ method (str): The request's HTTP method.
+ url (str): The request's URI. This is used as the audience claim
+ when generating the JWT.
+ headers (Mapping): The request's headers.
+ """
+ # pylint: disable=unused-argument
+ # (pylint doesn't correctly recognize overridden methods.)
+ parts = urllib.parse.urlsplit(url)
+ # Strip query string and fragment
+ audience = urllib.parse.urlunsplit(
+ (parts.scheme, parts.netloc, parts.path, "", "")
+ )
+ token = self._get_jwt_for_audience(audience)
+ self.apply(headers, token=token)
+
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer_email(self):
+ return self._issuer
+
+ @property
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer(self):
+ return self._signer
diff --git a/venv/Lib/site-packages/google/auth/transport/__init__.py b/venv/Lib/site-packages/google/auth/transport/__init__.py
new file mode 100644
index 000000000..374e7b4d7
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/__init__.py
@@ -0,0 +1,97 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport - HTTP client library support.
+
+:mod:`google.auth` is designed to work with various HTTP client libraries such
+as urllib3 and requests. In order to work across these libraries with different
+interfaces some abstraction is needed.
+
+This module provides two interfaces that are implemented by transport adapters
+to support HTTP libraries. :class:`Request` defines the interface expected by
+:mod:`google.auth` to make requests. :class:`Response` defines the interface
+for the return value of :class:`Request`.
+"""
+
+import abc
+
+import six
+from six.moves import http_client
+
+DEFAULT_REFRESH_STATUS_CODES = (http_client.UNAUTHORIZED,)
+"""Sequence[int]: Which HTTP status code indicate that credentials should be
+refreshed and a request should be retried.
+"""
+
+DEFAULT_MAX_REFRESH_ATTEMPTS = 2
+"""int: How many times to refresh the credentials and retry a request."""
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Response(object):
+ """HTTP Response data."""
+
+ @abc.abstractproperty
+ def status(self):
+ """int: The HTTP status code."""
+ raise NotImplementedError("status must be implemented.")
+
+ @abc.abstractproperty
+ def headers(self):
+ """Mapping[str, str]: The HTTP response headers."""
+ raise NotImplementedError("headers must be implemented.")
+
+ @abc.abstractproperty
+ def data(self):
+ """bytes: The response body."""
+ raise NotImplementedError("data must be implemented.")
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Request(object):
+ """Interface for a callable that makes HTTP requests.
+
+ Specific transport implementations should provide an implementation of
+ this that adapts their specific request / response API.
+
+ .. automethod:: __call__
+ """
+
+ @abc.abstractmethod
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ transport-specific default timeout will be used.
+ kwargs: Additionally arguments passed on to the transport's
+ request method.
+
+ Returns:
+ Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ # (pylint doesn't play well with abstract docstrings.)
+ raise NotImplementedError("__call__ must be implemented.")
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..8d00f398a
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/_aiohttp_requests.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/_aiohttp_requests.cpython-36.pyc
new file mode 100644
index 000000000..2f6861c0e
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/_aiohttp_requests.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/_http_client.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/_http_client.cpython-36.pyc
new file mode 100644
index 000000000..84283b704
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/_http_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/_mtls_helper.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/_mtls_helper.cpython-36.pyc
new file mode 100644
index 000000000..452acd87e
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/_mtls_helper.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/grpc.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/grpc.cpython-36.pyc
new file mode 100644
index 000000000..d94a816e4
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/mtls.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/mtls.cpython-36.pyc
new file mode 100644
index 000000000..8107fa978
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/mtls.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/requests.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/requests.cpython-36.pyc
new file mode 100644
index 000000000..082adab65
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/requests.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/__pycache__/urllib3.cpython-36.pyc b/venv/Lib/site-packages/google/auth/transport/__pycache__/urllib3.cpython-36.pyc
new file mode 100644
index 000000000..65b8c2d1a
Binary files /dev/null and b/venv/Lib/site-packages/google/auth/transport/__pycache__/urllib3.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/auth/transport/_aiohttp_requests.py b/venv/Lib/site-packages/google/auth/transport/_aiohttp_requests.py
new file mode 100644
index 000000000..aaf4e2c0b
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/_aiohttp_requests.py
@@ -0,0 +1,384 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for Async HTTP (aiohttp).
+
+NOTE: This async support is experimental and marked internal. This surface may
+change in minor releases.
+"""
+
+from __future__ import absolute_import
+
+import asyncio
+import functools
+
+import aiohttp
+import six
+import urllib3
+
+from google.auth import exceptions
+from google.auth import transport
+from google.auth.transport import requests
+
+# Timeout can be re-defined depending on async requirement. Currently made 60s more than
+# sync timeout.
+_DEFAULT_TIMEOUT = 180 # in seconds
+
+
+class _CombinedResponse(transport.Response):
+ """
+ In order to more closely resemble the `requests` interface, where a raw
+ and deflated content could be accessed at once, this class lazily reads the
+ stream in `transport.Response` so both return forms can be used.
+
+ The gzip and deflate transfer-encodings are automatically decoded for you
+ because the default parameter for autodecompress into the ClientSession is set
+ to False, and therefore we add this class to act as a wrapper for a user to be
+ able to access both the raw and decoded response bodies - mirroring the sync
+ implementation.
+ """
+
+ def __init__(self, response):
+ self._response = response
+ self._raw_content = None
+
+ def _is_compressed(self):
+ headers = self._response.headers
+ return "Content-Encoding" in headers and (
+ headers["Content-Encoding"] == "gzip"
+ or headers["Content-Encoding"] == "deflate"
+ )
+
+ @property
+ def status(self):
+ return self._response.status
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.content
+
+ async def raw_content(self):
+ if self._raw_content is None:
+ self._raw_content = await self._response.content.read()
+ return self._raw_content
+
+ async def content(self):
+ # Load raw_content if necessary
+ await self.raw_content()
+ if self._is_compressed():
+ decoder = urllib3.response.MultiDecoder(
+ self._response.headers["Content-Encoding"]
+ )
+ decompressed = decoder.decompress(self._raw_content)
+ return decompressed
+
+ return self._raw_content
+
+
+class _Response(transport.Response):
+ """
+ Requests transport response adapter.
+
+ Args:
+ response (requests.Response): The raw Requests response.
+ """
+
+ def __init__(self, response):
+ self._response = response
+
+ @property
+ def status(self):
+ return self._response.status
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.content
+
+
+class Request(transport.Request):
+ """Requests request adapter.
+
+ This class is used internally for making requests using asyncio transports
+ in a consistent way. If you use :class:`AuthorizedSession` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google.auth.transport.aiohttp_requests
+
+ request = google.auth.transport.aiohttp_requests.Request()
+
+ credentials.refresh(request)
+
+ Args:
+ session (aiohttp.ClientSession): An instance :class: aiohttp.ClientSession used
+ to make HTTP requests. If not specified, a session will be created.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, session=None):
+ self.session = None
+
+ async def __call__(
+ self,
+ url,
+ method="GET",
+ body=None,
+ headers=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs,
+ ):
+ """
+ Make an HTTP request using aiohttp.
+
+ Args:
+ url (str): The URL to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ requests default timeout will be used.
+ kwargs: Additional arguments passed through to the underlying
+ requests :meth:`~requests.Session.request` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+
+ try:
+ if self.session is None: # pragma: NO COVER
+ self.session = aiohttp.ClientSession(
+ auto_decompress=False
+ ) # pragma: NO COVER
+ requests._LOGGER.debug("Making request: %s %s", method, url)
+ response = await self.session.request(
+ method, url, data=body, headers=headers, timeout=timeout, **kwargs
+ )
+ return _CombinedResponse(response)
+
+ except aiohttp.ClientError as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ except asyncio.TimeoutError as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+
+class AuthorizedSession(aiohttp.ClientSession):
+ """This is an async implementation of the Authorized Session class. We utilize an
+ aiohttp transport instance, and the interface mirrors the google.auth.transport.requests
+ Authorized Session class, except for the change in the transport used in the async use case.
+
+ A Requests Session class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport import aiohttp_requests
+
+ async with aiohttp_requests.AuthorizedSession(credentials) as authed_session:
+ response = await authed_session.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ Args:
+ credentials (google.auth._credentials_async.Credentials): The credentials to
+ add to the request.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
+ that credentials should be refreshed and the request should be
+ retried.
+ max_refresh_attempts (int): The maximum number of times to attempt to
+ refresh the credentials and retry the request.
+ refresh_timeout (Optional[int]): The timeout value in seconds for
+ credential refresh HTTP requests.
+ auth_request (google.auth.transport.aiohttp_requests.Request):
+ (Optional) An instance of
+ :class:`~google.auth.transport.aiohttp_requests.Request` used when
+ refreshing credentials. If not passed,
+ an instance of :class:`~google.auth.transport.aiohttp_requests.Request`
+ is created.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ refresh_timeout=None,
+ auth_request=None,
+ auto_decompress=False,
+ ):
+ super(AuthorizedSession, self).__init__()
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ self._refresh_timeout = refresh_timeout
+ self._is_mtls = False
+ self._auth_request = auth_request
+ self._auth_request_session = None
+ self._loop = asyncio.get_event_loop()
+ self._refresh_lock = asyncio.Lock()
+ self._auto_decompress = auto_decompress
+
+ async def request(
+ self,
+ method,
+ url,
+ data=None,
+ headers=None,
+ max_allowed_time=None,
+ timeout=_DEFAULT_TIMEOUT,
+ auto_decompress=False,
+ **kwargs,
+ ):
+
+ """Implementation of Authorized Session aiohttp request.
+
+ Args:
+ method: The http request method used (e.g. GET, PUT, DELETE)
+
+ url: The url at which the http request is sent.
+
+ data, headers: These fields parallel the associated data and headers
+ fields of a regular http request. Using the aiohttp client session to
+ send the http request allows us to use this parallel corresponding structure
+ in our Authorized Session class.
+
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The amount of time in seconds to wait for the server response
+ with each individual request.
+
+ Can also be passed as an `aiohttp.ClientTimeout` object.
+
+ max_allowed_time (Optional[float]):
+ If the method runs longer than this, a ``Timeout`` exception is
+ automatically raised. Unlike the ``timeout` parameter, this
+ value applies to the total method execution time, even if
+ multiple requests are made under the hood.
+
+ Mind that it is not guaranteed that the timeout error is raised
+ at ``max_allowed_time`. It might take longer, for example, if
+ an underlying request takes a lot of time, but the request
+ itself does not timeout, e.g. if a large file is being
+ transmitted. The timout error will be raised after such
+ request completes.
+ """
+ # Headers come in as bytes which isn't expected behavior, the resumable
+ # media libraries in some cases expect a str type for the header values,
+ # but sometimes the operations return these in bytes types.
+ if headers:
+ for key in headers.keys():
+ if type(headers[key]) is bytes:
+ headers[key] = headers[key].decode("utf-8")
+
+ async with aiohttp.ClientSession(
+ auto_decompress=self._auto_decompress
+ ) as self._auth_request_session:
+ auth_request = Request(self._auth_request_session)
+ self._auth_request = auth_request
+
+ # Use a kwarg for this instead of an attribute to maintain
+ # thread-safety.
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy() if headers is not None else {}
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ remaining_time = max_allowed_time
+
+ with requests.TimeoutGuard(remaining_time, asyncio.TimeoutError) as guard:
+ await self.credentials.before_request(
+ auth_request, method, url, request_headers
+ )
+
+ with requests.TimeoutGuard(remaining_time, asyncio.TimeoutError) as guard:
+ response = await super(AuthorizedSession, self).request(
+ method,
+ url,
+ data=data,
+ headers=request_headers,
+ timeout=timeout,
+ **kwargs,
+ )
+
+ remaining_time = guard.remaining_timeout
+
+ if (
+ response.status in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ requests._LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ with requests.TimeoutGuard(
+ remaining_time, asyncio.TimeoutError
+ ) as guard:
+ async with self._refresh_lock:
+ await self._loop.run_in_executor(
+ None, self.credentials.refresh, auth_request
+ )
+
+ remaining_time = guard.remaining_timeout
+
+ return await self.request(
+ method,
+ url,
+ data=data,
+ headers=headers,
+ max_allowed_time=remaining_time,
+ timeout=timeout,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs,
+ )
+
+ return response
diff --git a/venv/Lib/site-packages/google/auth/transport/_http_client.py b/venv/Lib/site-packages/google/auth/transport/_http_client.py
new file mode 100644
index 000000000..c153763ef
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/_http_client.py
@@ -0,0 +1,115 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for http.client, for internal use only."""
+
+import logging
+import socket
+
+import six
+from six.moves import http_client
+from six.moves import urllib
+
+from google.auth import exceptions
+from google.auth import transport
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class Response(transport.Response):
+ """http.client transport response adapter.
+
+ Args:
+ response (http.client.HTTPResponse): The raw http client response.
+ """
+
+ def __init__(self, response):
+ self._status = response.status
+ self._headers = {key.lower(): value for key, value in response.getheaders()}
+ self._data = response.read()
+
+ @property
+ def status(self):
+ return self._status
+
+ @property
+ def headers(self):
+ return self._headers
+
+ @property
+ def data(self):
+ return self._data
+
+
+class Request(transport.Request):
+ """http.client transport request adapter."""
+
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request using http.client.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping): Request headers.
+ timeout (Optional(int)): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ socket global default timeout will be used.
+ kwargs: Additional arguments passed throught to the underlying
+ :meth:`~http.client.HTTPConnection.request` method.
+
+ Returns:
+ Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # socket._GLOBAL_DEFAULT_TIMEOUT is the default in http.client.
+ if timeout is None:
+ timeout = socket._GLOBAL_DEFAULT_TIMEOUT
+
+ # http.client doesn't allow None as the headers argument.
+ if headers is None:
+ headers = {}
+
+ # http.client needs the host and path parts specified separately.
+ parts = urllib.parse.urlsplit(url)
+ path = urllib.parse.urlunsplit(
+ ("", "", parts.path, parts.query, parts.fragment)
+ )
+
+ if parts.scheme != "http":
+ raise exceptions.TransportError(
+ "http.client transport only supports the http scheme, {}"
+ "was specified".format(parts.scheme)
+ )
+
+ connection = http_client.HTTPConnection(parts.netloc, timeout=timeout)
+
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+
+ connection.request(method, path, body=body, headers=headers, **kwargs)
+ response = connection.getresponse()
+ return Response(response)
+
+ except (http_client.HTTPException, socket.error) as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ finally:
+ connection.close()
diff --git a/venv/Lib/site-packages/google/auth/transport/_mtls_helper.py b/venv/Lib/site-packages/google/auth/transport/_mtls_helper.py
new file mode 100644
index 000000000..388ae3c15
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/_mtls_helper.py
@@ -0,0 +1,250 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for getting mTLS cert and key."""
+
+import json
+import logging
+from os import path
+import re
+import subprocess
+
+import six
+
+from google.auth import exceptions
+
+CONTEXT_AWARE_METADATA_PATH = "~/.secureConnect/context_aware_metadata.json"
+_CERT_PROVIDER_COMMAND = "cert_provider_command"
+_CERT_REGEX = re.compile(
+ b"-----BEGIN CERTIFICATE-----.+-----END CERTIFICATE-----\r?\n?", re.DOTALL
+)
+
+# support various format of key files, e.g.
+# "-----BEGIN PRIVATE KEY-----...",
+# "-----BEGIN EC PRIVATE KEY-----...",
+# "-----BEGIN RSA PRIVATE KEY-----..."
+# "-----BEGIN ENCRYPTED PRIVATE KEY-----"
+_KEY_REGEX = re.compile(
+ b"-----BEGIN [A-Z ]*PRIVATE KEY-----.+-----END [A-Z ]*PRIVATE KEY-----\r?\n?",
+ re.DOTALL,
+)
+
+_LOGGER = logging.getLogger(__name__)
+
+
+_PASSPHRASE_REGEX = re.compile(
+ b"-----BEGIN PASSPHRASE-----(.+)-----END PASSPHRASE-----", re.DOTALL
+)
+
+
+def _check_dca_metadata_path(metadata_path):
+ """Checks for context aware metadata. If it exists, returns the absolute path;
+ otherwise returns None.
+
+ Args:
+ metadata_path (str): context aware metadata path.
+
+ Returns:
+ str: absolute path if exists and None otherwise.
+ """
+ metadata_path = path.expanduser(metadata_path)
+ if not path.exists(metadata_path):
+ _LOGGER.debug("%s is not found, skip client SSL authentication.", metadata_path)
+ return None
+ return metadata_path
+
+
+def _read_dca_metadata_file(metadata_path):
+ """Loads context aware metadata from the given path.
+
+ Args:
+ metadata_path (str): context aware metadata path.
+
+ Returns:
+ Dict[str, str]: The metadata.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: If failed to parse metadata as JSON.
+ """
+ try:
+ with open(metadata_path) as f:
+ metadata = json.load(f)
+ except ValueError as caught_exc:
+ new_exc = exceptions.ClientCertError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ return metadata
+
+
+def _run_cert_provider_command(command, expect_encrypted_key=False):
+ """Run the provided command, and return client side mTLS cert, key and
+ passphrase.
+
+ Args:
+ command (List[str]): cert provider command.
+ expect_encrypted_key (bool): If encrypted private key is expected.
+
+ Returns:
+ Tuple[bytes, bytes, bytes]: client certificate bytes in PEM format, key
+ bytes in PEM format and passphrase bytes.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: if problems occurs when running
+ the cert provider command or generating cert, key and passphrase.
+ """
+ try:
+ process = subprocess.Popen(
+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ stdout, stderr = process.communicate()
+ except OSError as caught_exc:
+ new_exc = exceptions.ClientCertError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ # Check cert provider command execution error.
+ if process.returncode != 0:
+ raise exceptions.ClientCertError(
+ "Cert provider command returns non-zero status code %s" % process.returncode
+ )
+
+ # Extract certificate (chain), key and passphrase.
+ cert_match = re.findall(_CERT_REGEX, stdout)
+ if len(cert_match) != 1:
+ raise exceptions.ClientCertError("Client SSL certificate is missing or invalid")
+ key_match = re.findall(_KEY_REGEX, stdout)
+ if len(key_match) != 1:
+ raise exceptions.ClientCertError("Client SSL key is missing or invalid")
+ passphrase_match = re.findall(_PASSPHRASE_REGEX, stdout)
+
+ if expect_encrypted_key:
+ if len(passphrase_match) != 1:
+ raise exceptions.ClientCertError("Passphrase is missing or invalid")
+ if b"ENCRYPTED" not in key_match[0]:
+ raise exceptions.ClientCertError("Encrypted private key is expected")
+ return cert_match[0], key_match[0], passphrase_match[0].strip()
+
+ if b"ENCRYPTED" in key_match[0]:
+ raise exceptions.ClientCertError("Encrypted private key is not expected")
+ if len(passphrase_match) > 0:
+ raise exceptions.ClientCertError("Passphrase is not expected")
+ return cert_match[0], key_match[0], None
+
+
+def get_client_ssl_credentials(generate_encrypted_key=False):
+ """Returns the client side certificate, private key and passphrase.
+
+ Args:
+ generate_encrypted_key (bool): If set to True, encrypted private key
+ and passphrase will be generated; otherwise, unencrypted private key
+ will be generated and passphrase will be None.
+
+ Returns:
+ Tuple[bool, bytes, bytes, bytes]:
+ A boolean indicating if cert, key and passphrase are obtained, the
+ cert bytes and key bytes both in PEM format, and passphrase bytes.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: if problems occurs when getting
+ the cert, key and passphrase.
+ """
+ metadata_path = _check_dca_metadata_path(CONTEXT_AWARE_METADATA_PATH)
+
+ if metadata_path:
+ metadata_json = _read_dca_metadata_file(metadata_path)
+
+ if _CERT_PROVIDER_COMMAND not in metadata_json:
+ raise exceptions.ClientCertError("Cert provider command is not found")
+
+ command = metadata_json[_CERT_PROVIDER_COMMAND]
+
+ if generate_encrypted_key and "--with_passphrase" not in command:
+ command.append("--with_passphrase")
+
+ # Execute the command.
+ cert, key, passphrase = _run_cert_provider_command(
+ command, expect_encrypted_key=generate_encrypted_key
+ )
+ return True, cert, key, passphrase
+
+ return False, None, None, None
+
+
+def get_client_cert_and_key(client_cert_callback=None):
+ """Returns the client side certificate and private key. The function first
+ tries to get certificate and key from client_cert_callback; if the callback
+ is None or doesn't provide certificate and key, the function tries application
+ default SSL credentials.
+
+ Args:
+ client_cert_callback (Optional[Callable[[], (bytes, bytes)]]): An
+ optional callback which returns client certificate bytes and private
+ key bytes both in PEM format.
+
+ Returns:
+ Tuple[bool, bytes, bytes]:
+ A boolean indicating if cert and key are obtained, the cert bytes
+ and key bytes both in PEM format.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: if problems occurs when getting
+ the cert and key.
+ """
+ if client_cert_callback:
+ cert, key = client_cert_callback()
+ return True, cert, key
+
+ has_cert, cert, key, _ = get_client_ssl_credentials(generate_encrypted_key=False)
+ return has_cert, cert, key
+
+
+def decrypt_private_key(key, passphrase):
+ """A helper function to decrypt the private key with the given passphrase.
+ google-auth library doesn't support passphrase protected private key for
+ mutual TLS channel. This helper function can be used to decrypt the
+ passphrase protected private key in order to estalish mutual TLS channel.
+
+ For example, if you have a function which produces client cert, passphrase
+ protected private key and passphrase, you can convert it to a client cert
+ callback function accepted by google-auth::
+
+ from google.auth.transport import _mtls_helper
+
+ def your_client_cert_function():
+ return cert, encrypted_key, passphrase
+
+ # callback accepted by google-auth for mutual TLS channel.
+ def client_cert_callback():
+ cert, encrypted_key, passphrase = your_client_cert_function()
+ decrypted_key = _mtls_helper.decrypt_private_key(encrypted_key,
+ passphrase)
+ return cert, decrypted_key
+
+ Args:
+ key (bytes): The private key bytes in PEM format.
+ passphrase (bytes): The passphrase bytes.
+
+ Returns:
+ bytes: The decrypted private key in PEM format.
+
+ Raises:
+ ImportError: If pyOpenSSL is not installed.
+ OpenSSL.crypto.Error: If there is any problem decrypting the private key.
+ """
+ from OpenSSL import crypto
+
+ # First convert encrypted_key_bytes to PKey object
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key, passphrase=passphrase)
+
+ # Then dump the decrypted key bytes
+ return crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)
diff --git a/venv/Lib/site-packages/google/auth/transport/grpc.py b/venv/Lib/site-packages/google/auth/transport/grpc.py
new file mode 100644
index 000000000..ab7d0dbf8
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/grpc.py
@@ -0,0 +1,334 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Authorization support for gRPC."""
+
+from __future__ import absolute_import
+
+import logging
+import os
+
+import six
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth.transport import _mtls_helper
+
+try:
+ import grpc
+except ImportError as caught_exc: # pragma: NO COVER
+ six.raise_from(
+ ImportError(
+ "gRPC is not installed, please install the grpcio package "
+ "to use the gRPC transport."
+ ),
+ caught_exc,
+ )
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class AuthMetadataPlugin(grpc.AuthMetadataPlugin):
+ """A `gRPC AuthMetadataPlugin`_ that inserts the credentials into each
+ request.
+
+ .. _gRPC AuthMetadataPlugin:
+ http://www.grpc.io/grpc/python/grpc.html#grpc.AuthMetadataPlugin
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to requests.
+ request (google.auth.transport.Request): A HTTP transport request
+ object used to refresh credentials as needed.
+ """
+
+ def __init__(self, credentials, request):
+ # pylint: disable=no-value-for-parameter
+ # pylint doesn't realize that the super method takes no arguments
+ # because this class is the same name as the superclass.
+ super(AuthMetadataPlugin, self).__init__()
+ self._credentials = credentials
+ self._request = request
+
+ def _get_authorization_headers(self, context):
+ """Gets the authorization headers for a request.
+
+ Returns:
+ Sequence[Tuple[str, str]]: A list of request headers (key, value)
+ to add to the request.
+ """
+ headers = {}
+ self._credentials.before_request(
+ self._request, context.method_name, context.service_url, headers
+ )
+
+ return list(six.iteritems(headers))
+
+ def __call__(self, context, callback):
+ """Passes authorization metadata into the given callback.
+
+ Args:
+ context (grpc.AuthMetadataContext): The RPC context.
+ callback (grpc.AuthMetadataPluginCallback): The callback that will
+ be invoked to pass in the authorization metadata.
+ """
+ callback(self._get_authorization_headers(context), None)
+
+
+def secure_authorized_channel(
+ credentials,
+ request,
+ target,
+ ssl_credentials=None,
+ client_cert_callback=None,
+ **kwargs
+):
+ """Creates a secure authorized gRPC channel.
+
+ This creates a channel with SSL and :class:`AuthMetadataPlugin`. This
+ channel can be used to create a stub that can make authorized requests.
+ Users can configure client certificate or rely on device certificates to
+ establish a mutual TLS channel, if the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ variable is explicitly set to `true`.
+
+ Example::
+
+ import google.auth
+ import google.auth.transport.grpc
+ import google.auth.transport.requests
+ from google.cloud.speech.v1 import cloud_speech_pb2
+
+ # Get credentials.
+ credentials, _ = google.auth.default()
+
+ # Get an HTTP request function to refresh credentials.
+ request = google.auth.transport.requests.Request()
+
+ # Create a channel.
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, regular_endpoint, request,
+ ssl_credentials=grpc.ssl_channel_credentials())
+
+ # Use the channel to create a stub.
+ cloud_speech.create_Speech_stub(channel)
+
+ Usage:
+
+ There are actually a couple of options to create a channel, depending on if
+ you want to create a regular or mutual TLS channel.
+
+ First let's list the endpoints (regular vs mutual TLS) to choose from::
+
+ regular_endpoint = 'speech.googleapis.com:443'
+ mtls_endpoint = 'speech.mtls.googleapis.com:443'
+
+ Option 1: create a regular (non-mutual) TLS channel by explicitly setting
+ the ssl_credentials::
+
+ regular_ssl_credentials = grpc.ssl_channel_credentials()
+
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, regular_endpoint, request,
+ ssl_credentials=regular_ssl_credentials)
+
+ Option 2: create a mutual TLS channel by calling a callback which returns
+ the client side certificate and the key (Note that
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be explicitly
+ set to `true`)::
+
+ def my_client_cert_callback():
+ code_to_load_client_cert_and_key()
+ if loaded:
+ return (pem_cert_bytes, pem_key_bytes)
+ raise MyClientCertFailureException()
+
+ try:
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, mtls_endpoint, request,
+ client_cert_callback=my_client_cert_callback)
+ except MyClientCertFailureException:
+ # handle the exception
+
+ Option 3: use application default SSL credentials. It searches and uses
+ the command in a context aware metadata file, which is available on devices
+ with endpoint verification support (Note that
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be explicitly
+ set to `true`).
+ See https://cloud.google.com/endpoint-verification/docs/overview::
+
+ try:
+ default_ssl_credentials = SslCredentials()
+ except:
+ # Exception can be raised if the context aware metadata is malformed.
+ # See :class:`SslCredentials` for the possible exceptions.
+
+ # Choose the endpoint based on the SSL credentials type.
+ if default_ssl_credentials.is_mtls:
+ endpoint_to_use = mtls_endpoint
+ else:
+ endpoint_to_use = regular_endpoint
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, endpoint_to_use, request,
+ ssl_credentials=default_ssl_credentials)
+
+ Option 4: not setting ssl_credentials and client_cert_callback. For devices
+ without endpoint verification support or `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable is not `true`, a regular TLS channel is created;
+ otherwise, a mutual TLS channel is created, however, the call should be
+ wrapped in a try/except block in case of malformed context aware metadata.
+
+ The following code uses regular_endpoint, it works the same no matter the
+ created channle is regular or mutual TLS. Regular endpoint ignores client
+ certificate and key::
+
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, regular_endpoint, request)
+
+ The following code uses mtls_endpoint, if the created channle is regular,
+ and API mtls_endpoint is confgured to require client SSL credentials, API
+ calls using this channel will be rejected::
+
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, mtls_endpoint, request)
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to requests.
+ request (google.auth.transport.Request): A HTTP transport request
+ object used to refresh credentials as needed. Even though gRPC
+ is a separate transport, there's no way to refresh the credentials
+ without using a standard http transport.
+ target (str): The host and port of the service.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ This argument is mutually exclusive with client_cert_callback;
+ providing both will raise an exception.
+ If ssl_credentials and client_cert_callback are None, application
+ default SSL credentials are used if `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable is explicitly set to `true`, otherwise one way TLS
+ SSL credentials are used.
+ client_cert_callback (Callable[[], (bytes, bytes)]): Optional
+ callback function to obtain client certicate and key for mutual TLS
+ connection. This argument is mutually exclusive with
+ ssl_credentials; providing both will raise an exception.
+ This argument does nothing unless `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable is explicitly set to `true`.
+ kwargs: Additional arguments to pass to :func:`grpc.secure_channel`.
+
+ Returns:
+ grpc.Channel: The created gRPC channel.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ # Create the metadata plugin for inserting the authorization header.
+ metadata_plugin = AuthMetadataPlugin(credentials, request)
+
+ # Create a set of grpc.CallCredentials using the metadata plugin.
+ google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
+
+ if ssl_credentials and client_cert_callback:
+ raise ValueError(
+ "Received both ssl_credentials and client_cert_callback; "
+ "these are mutually exclusive."
+ )
+
+ # If SSL credentials are not explicitly set, try client_cert_callback and ADC.
+ if not ssl_credentials:
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert == "true" and client_cert_callback:
+ # Use the callback if provided.
+ cert, key = client_cert_callback()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ elif use_client_cert == "true":
+ # Use application default SSL credentials.
+ adc_ssl_credentils = SslCredentials()
+ ssl_credentials = adc_ssl_credentils.ssl_credentials
+ else:
+ ssl_credentials = grpc.ssl_channel_credentials()
+
+ # Combine the ssl credentials and the authorization credentials.
+ composite_credentials = grpc.composite_channel_credentials(
+ ssl_credentials, google_auth_credentials
+ )
+
+ return grpc.secure_channel(target, composite_credentials, **kwargs)
+
+
+class SslCredentials:
+ """Class for application default SSL credentials.
+
+ The behavior is controlled by `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment
+ variable whose default value is `false`. Client certificate will not be used
+ unless the environment variable is explicitly set to `true`. See
+ https://google.aip.dev/auth/4114
+
+ If the environment variable is `true`, then for devices with endpoint verification
+ support, a device certificate will be automatically loaded and mutual TLS will
+ be established.
+ See https://cloud.google.com/endpoint-verification/docs/overview.
+ """
+
+ def __init__(self):
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert != "true":
+ self._is_mtls = False
+ else:
+ # Load client SSL credentials.
+ metadata_path = _mtls_helper._check_dca_metadata_path(
+ _mtls_helper.CONTEXT_AWARE_METADATA_PATH
+ )
+ self._is_mtls = metadata_path is not None
+
+ @property
+ def ssl_credentials(self):
+ """Get the created SSL channel credentials.
+
+ For devices with endpoint verification support, if the device certificate
+ loading has any problems, corresponding exceptions will be raised. For
+ a device without endpoint verification support, no exceptions will be
+ raised.
+
+ Returns:
+ grpc.ChannelCredentials: The created grpc channel credentials.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ if self._is_mtls:
+ try:
+ _, cert, key, _ = _mtls_helper.get_client_ssl_credentials()
+ self._ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ except exceptions.ClientCertError as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+ else:
+ self._ssl_credentials = grpc.ssl_channel_credentials()
+
+ return self._ssl_credentials
+
+ @property
+ def is_mtls(self):
+ """Indicates if the created SSL channel credentials is mutual TLS."""
+ return self._is_mtls
diff --git a/venv/Lib/site-packages/google/auth/transport/mtls.py b/venv/Lib/site-packages/google/auth/transport/mtls.py
new file mode 100644
index 000000000..b40bfbedf
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/mtls.py
@@ -0,0 +1,105 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilites for mutual TLS."""
+
+import six
+
+from google.auth import exceptions
+from google.auth.transport import _mtls_helper
+
+
+def has_default_client_cert_source():
+ """Check if default client SSL credentials exists on the device.
+
+ Returns:
+ bool: indicating if the default client cert source exists.
+ """
+ metadata_path = _mtls_helper._check_dca_metadata_path(
+ _mtls_helper.CONTEXT_AWARE_METADATA_PATH
+ )
+ return metadata_path is not None
+
+
+def default_client_cert_source():
+ """Get a callback which returns the default client SSL credentials.
+
+ Returns:
+ Callable[[], [bytes, bytes]]: A callback which returns the default
+ client certificate bytes and private key bytes, both in PEM format.
+
+ Raises:
+ google.auth.exceptions.DefaultClientCertSourceError: If the default
+ client SSL credentials don't exist or are malformed.
+ """
+ if not has_default_client_cert_source():
+ raise exceptions.MutualTLSChannelError(
+ "Default client cert source doesn't exist"
+ )
+
+ def callback():
+ try:
+ _, cert_bytes, key_bytes = _mtls_helper.get_client_cert_and_key()
+ except (OSError, RuntimeError, ValueError) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ return cert_bytes, key_bytes
+
+ return callback
+
+
+def default_client_encrypted_cert_source(cert_path, key_path):
+ """Get a callback which returns the default encrpyted client SSL credentials.
+
+ Args:
+ cert_path (str): The cert file path. The default client certificate will
+ be written to this file when the returned callback is called.
+ key_path (str): The key file path. The default encrypted client key will
+ be written to this file when the returned callback is called.
+
+ Returns:
+ Callable[[], [str, str, bytes]]: A callback which generates the default
+ client certificate, encrpyted private key and passphrase. It writes
+ the certificate and private key into the cert_path and key_path, and
+ returns the cert_path, key_path and passphrase bytes.
+
+ Raises:
+ google.auth.exceptions.DefaultClientCertSourceError: If any problem
+ occurs when loading or saving the client certificate and key.
+ """
+ if not has_default_client_cert_source():
+ raise exceptions.MutualTLSChannelError(
+ "Default client encrypted cert source doesn't exist"
+ )
+
+ def callback():
+ try:
+ (
+ _,
+ cert_bytes,
+ key_bytes,
+ passphrase_bytes,
+ ) = _mtls_helper.get_client_ssl_credentials(generate_encrypted_key=True)
+ with open(cert_path, "wb") as cert_file:
+ cert_file.write(cert_bytes)
+ with open(key_path, "wb") as key_file:
+ key_file.write(key_bytes)
+ except (exceptions.ClientCertError, OSError) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ return cert_path, key_path, passphrase_bytes
+
+ return callback
diff --git a/venv/Lib/site-packages/google/auth/transport/requests.py b/venv/Lib/site-packages/google/auth/transport/requests.py
new file mode 100644
index 000000000..9a2f3afc7
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/requests.py
@@ -0,0 +1,521 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for Requests."""
+
+from __future__ import absolute_import
+
+import functools
+import logging
+import numbers
+import os
+import time
+
+try:
+ import requests
+except ImportError as caught_exc: # pragma: NO COVER
+ import six
+
+ six.raise_from(
+ ImportError(
+ "The requests library is not installed, please install the "
+ "requests package to use the requests transport."
+ ),
+ caught_exc,
+ )
+import requests.adapters # pylint: disable=ungrouped-imports
+import requests.exceptions # pylint: disable=ungrouped-imports
+from requests.packages.urllib3.util.ssl_ import (
+ create_urllib3_context,
+) # pylint: disable=ungrouped-imports
+import six # pylint: disable=ungrouped-imports
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import transport
+import google.auth.transport._mtls_helper
+
+_LOGGER = logging.getLogger(__name__)
+
+_DEFAULT_TIMEOUT = 120 # in seconds
+
+
+class _Response(transport.Response):
+ """Requests transport response adapter.
+
+ Args:
+ response (requests.Response): The raw Requests response.
+ """
+
+ def __init__(self, response):
+ self._response = response
+
+ @property
+ def status(self):
+ return self._response.status_code
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.content
+
+
+class TimeoutGuard(object):
+ """A context manager raising an error if the suite execution took too long.
+
+ Args:
+ timeout ([Union[None, float, Tuple[float, float]]]):
+ The maximum number of seconds a suite can run without the context
+ manager raising a timeout exception on exit. If passed as a tuple,
+ the smaller of the values is taken as a timeout. If ``None``, a
+ timeout error is never raised.
+ timeout_error_type (Optional[Exception]):
+ The type of the error to raise on timeout. Defaults to
+ :class:`requests.exceptions.Timeout`.
+ """
+
+ def __init__(self, timeout, timeout_error_type=requests.exceptions.Timeout):
+ self._timeout = timeout
+ self.remaining_timeout = timeout
+ self._timeout_error_type = timeout_error_type
+
+ def __enter__(self):
+ self._start = time.time()
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if exc_value:
+ return # let the error bubble up automatically
+
+ if self._timeout is None:
+ return # nothing to do, the timeout was not specified
+
+ elapsed = time.time() - self._start
+ deadline_hit = False
+
+ if isinstance(self._timeout, numbers.Number):
+ self.remaining_timeout = self._timeout - elapsed
+ deadline_hit = self.remaining_timeout <= 0
+ else:
+ self.remaining_timeout = tuple(x - elapsed for x in self._timeout)
+ deadline_hit = min(self.remaining_timeout) <= 0
+
+ if deadline_hit:
+ raise self._timeout_error_type()
+
+
+class Request(transport.Request):
+ """Requests request adapter.
+
+ This class is used internally for making requests using various transports
+ in a consistent way. If you use :class:`AuthorizedSession` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google.auth.transport.requests
+ import requests
+
+ request = google.auth.transport.requests.Request()
+
+ credentials.refresh(request)
+
+ Args:
+ session (requests.Session): An instance :class:`requests.Session` used
+ to make HTTP requests. If not specified, a session will be created.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, session=None):
+ if not session:
+ session = requests.Session()
+
+ self.session = session
+
+ def __call__(
+ self,
+ url,
+ method="GET",
+ body=None,
+ headers=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs
+ ):
+ """Make an HTTP request using requests.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ requests default timeout will be used.
+ kwargs: Additional arguments passed through to the underlying
+ requests :meth:`~requests.Session.request` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+ response = self.session.request(
+ method, url, data=body, headers=headers, timeout=timeout, **kwargs
+ )
+ return _Response(response)
+ except requests.exceptions.RequestException as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+
+class _MutualTlsAdapter(requests.adapters.HTTPAdapter):
+ """
+ A TransportAdapter that enables mutual TLS.
+
+ Args:
+ cert (bytes): client certificate in PEM format
+ key (bytes): client private key in PEM format
+
+ Raises:
+ ImportError: if certifi or pyOpenSSL is not installed
+ OpenSSL.crypto.Error: if client cert or key is invalid
+ """
+
+ def __init__(self, cert, key):
+ import certifi
+ from OpenSSL import crypto
+ import urllib3.contrib.pyopenssl
+
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
+ x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+
+ ctx_poolmanager = create_urllib3_context()
+ ctx_poolmanager.load_verify_locations(cafile=certifi.where())
+ ctx_poolmanager._ctx.use_certificate(x509)
+ ctx_poolmanager._ctx.use_privatekey(pkey)
+ self._ctx_poolmanager = ctx_poolmanager
+
+ ctx_proxymanager = create_urllib3_context()
+ ctx_proxymanager.load_verify_locations(cafile=certifi.where())
+ ctx_proxymanager._ctx.use_certificate(x509)
+ ctx_proxymanager._ctx.use_privatekey(pkey)
+ self._ctx_proxymanager = ctx_proxymanager
+
+ super(_MutualTlsAdapter, self).__init__()
+
+ def init_poolmanager(self, *args, **kwargs):
+ kwargs["ssl_context"] = self._ctx_poolmanager
+ super(_MutualTlsAdapter, self).init_poolmanager(*args, **kwargs)
+
+ def proxy_manager_for(self, *args, **kwargs):
+ kwargs["ssl_context"] = self._ctx_proxymanager
+ return super(_MutualTlsAdapter, self).proxy_manager_for(*args, **kwargs)
+
+
+class AuthorizedSession(requests.Session):
+ """A Requests Session class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport.requests import AuthorizedSession
+
+ authed_session = AuthorizedSession(credentials)
+
+ response = authed_session.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ This class also supports mutual TLS via :meth:`configure_mtls_channel`
+ method. In order to use this method, the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be explicitly set to `true`, otherwise it does
+ nothing. Assume the environment is set to `true`, the method behaves in the
+ following manner:
+ If client_cert_callback is provided, client certificate and private
+ key are loaded using the callback; if client_cert_callback is None,
+ application default SSL credentials will be used. Exceptions are raised if
+ there are problems with the certificate, private key, or the loading process,
+ so it should be called within a try/except block.
+
+ First we set the environment variable to `true`, then create an :class:`AuthorizedSession`
+ instance and specify the endpoints::
+
+ regular_endpoint = 'https://pubsub.googleapis.com/v1/projects/{my_project_id}/topics'
+ mtls_endpoint = 'https://pubsub.mtls.googleapis.com/v1/projects/{my_project_id}/topics'
+
+ authed_session = AuthorizedSession(credentials)
+
+ Now we can pass a callback to :meth:`configure_mtls_channel`::
+
+ def my_cert_callback():
+ # some code to load client cert bytes and private key bytes, both in
+ # PEM format.
+ some_code_to_load_client_cert_and_key()
+ if loaded:
+ return cert, key
+ raise MyClientCertFailureException()
+
+ # Always call configure_mtls_channel within a try/except block.
+ try:
+ authed_session.configure_mtls_channel(my_cert_callback)
+ except:
+ # handle exceptions.
+
+ if authed_session.is_mtls:
+ response = authed_session.request('GET', mtls_endpoint)
+ else:
+ response = authed_session.request('GET', regular_endpoint)
+
+ You can alternatively use application default SSL credentials like this::
+
+ try:
+ authed_session.configure_mtls_channel()
+ except:
+ # handle exceptions.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to the request.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
+ that credentials should be refreshed and the request should be
+ retried.
+ max_refresh_attempts (int): The maximum number of times to attempt to
+ refresh the credentials and retry the request.
+ refresh_timeout (Optional[int]): The timeout value in seconds for
+ credential refresh HTTP requests.
+ auth_request (google.auth.transport.requests.Request):
+ (Optional) An instance of
+ :class:`~google.auth.transport.requests.Request` used when
+ refreshing credentials. If not passed,
+ an instance of :class:`~google.auth.transport.requests.Request`
+ is created.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ refresh_timeout=None,
+ auth_request=None,
+ ):
+ super(AuthorizedSession, self).__init__()
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ self._refresh_timeout = refresh_timeout
+ self._is_mtls = False
+
+ if auth_request is None:
+ auth_request_session = requests.Session()
+
+ # Using an adapter to make HTTP requests robust to network errors.
+ # This adapter retrys HTTP requests when network errors occur
+ # and the requests seems safely retryable.
+ retry_adapter = requests.adapters.HTTPAdapter(max_retries=3)
+ auth_request_session.mount("https://", retry_adapter)
+
+ # Do not pass `self` as the session here, as it can lead to
+ # infinite recursion.
+ auth_request = Request(auth_request_session)
+
+ # Request instance used by internal methods (for example,
+ # credentials.refresh).
+ self._auth_request = auth_request
+
+ def configure_mtls_channel(self, client_cert_callback=None):
+ """Configure the client certificate and key for SSL connection.
+
+ The function does nothing unless `GOOGLE_API_USE_CLIENT_CERTIFICATE` is
+ explicitly set to `true`. In this case if client certificate and key are
+ successfully obtained (from the given client_cert_callback or from application
+ default SSL credentials), a :class:`_MutualTlsAdapter` instance will be mounted
+ to "https://" prefix.
+
+ Args:
+ client_cert_callback (Optional[Callable[[], (bytes, bytes)]]):
+ The optional callback returns the client certificate and private
+ key bytes both in PEM format.
+ If the callback is None, application default SSL credentials
+ will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert != "true":
+ self._is_mtls = False
+ return
+
+ try:
+ import OpenSSL
+ except ImportError as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ try:
+ (
+ self._is_mtls,
+ cert,
+ key,
+ ) = google.auth.transport._mtls_helper.get_client_cert_and_key(
+ client_cert_callback
+ )
+
+ if self._is_mtls:
+ mtls_adapter = _MutualTlsAdapter(cert, key)
+ self.mount("https://", mtls_adapter)
+ except (
+ exceptions.ClientCertError,
+ ImportError,
+ OpenSSL.crypto.Error,
+ ) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ def request(
+ self,
+ method,
+ url,
+ data=None,
+ headers=None,
+ max_allowed_time=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs
+ ):
+ """Implementation of Requests' request.
+
+ Args:
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time in seconds to wait for the server response
+ with each individual request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ max_allowed_time (Optional[float]):
+ If the method runs longer than this, a ``Timeout`` exception is
+ automatically raised. Unlike the ``timeout` parameter, this
+ value applies to the total method execution time, even if
+ multiple requests are made under the hood.
+
+ Mind that it is not guaranteed that the timeout error is raised
+ at ``max_allowed_time`. It might take longer, for example, if
+ an underlying request takes a lot of time, but the request
+ itself does not timeout, e.g. if a large file is being
+ transmitted. The timout error will be raised after such
+ request completes.
+ """
+ # pylint: disable=arguments-differ
+ # Requests has a ton of arguments to request, but only two
+ # (method, url) are required. We pass through all of the other
+ # arguments to super, so no need to exhaustively list them here.
+
+ # Use a kwarg for this instead of an attribute to maintain
+ # thread-safety.
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy() if headers is not None else {}
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ remaining_time = max_allowed_time
+
+ with TimeoutGuard(remaining_time) as guard:
+ self.credentials.before_request(auth_request, method, url, request_headers)
+ remaining_time = guard.remaining_timeout
+
+ with TimeoutGuard(remaining_time) as guard:
+ response = super(AuthorizedSession, self).request(
+ method,
+ url,
+ data=data,
+ headers=request_headers,
+ timeout=timeout,
+ **kwargs
+ )
+ remaining_time = guard.remaining_timeout
+
+ # If the response indicated that the credentials needed to be
+ # refreshed, then refresh the credentials and re-attempt the
+ # request.
+ # A stored token may expire between the time it is retrieved and
+ # the time the request is made, so we may need to try twice.
+ if (
+ response.status_code in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ _LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status_code,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ with TimeoutGuard(remaining_time) as guard:
+ self.credentials.refresh(auth_request)
+ remaining_time = guard.remaining_timeout
+
+ # Recurse. Pass in the original headers, not our modified set, but
+ # do pass the adjusted max allowed time (i.e. the remaining total time).
+ return self.request(
+ method,
+ url,
+ data=data,
+ headers=headers,
+ max_allowed_time=remaining_time,
+ timeout=timeout,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs
+ )
+
+ return response
+
+ @property
+ def is_mtls(self):
+ """Indicates if the created SSL channel is mutual TLS."""
+ return self._is_mtls
diff --git a/venv/Lib/site-packages/google/auth/transport/urllib3.py b/venv/Lib/site-packages/google/auth/transport/urllib3.py
new file mode 100644
index 000000000..209fc51bc
--- /dev/null
+++ b/venv/Lib/site-packages/google/auth/transport/urllib3.py
@@ -0,0 +1,426 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for urllib3."""
+
+from __future__ import absolute_import
+
+import logging
+import os
+import warnings
+
+# Certifi is Mozilla's certificate bundle. Urllib3 needs a certificate bundle
+# to verify HTTPS requests, and certifi is the recommended and most reliable
+# way to get a root certificate bundle. See
+# http://urllib3.readthedocs.io/en/latest/user-guide.html\
+# #certificate-verification
+# For more details.
+try:
+ import certifi
+except ImportError: # pragma: NO COVER
+ certifi = None
+
+try:
+ import urllib3
+except ImportError as caught_exc: # pragma: NO COVER
+ import six
+
+ six.raise_from(
+ ImportError(
+ "The urllib3 library is not installed, please install the "
+ "urllib3 package to use the urllib3 transport."
+ ),
+ caught_exc,
+ )
+import six
+import urllib3.exceptions # pylint: disable=ungrouped-imports
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import transport
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class _Response(transport.Response):
+ """urllib3 transport response adapter.
+
+ Args:
+ response (urllib3.response.HTTPResponse): The raw urllib3 response.
+ """
+
+ def __init__(self, response):
+ self._response = response
+
+ @property
+ def status(self):
+ return self._response.status
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.data
+
+
+class Request(transport.Request):
+ """urllib3 request adapter.
+
+ This class is used internally for making requests using various transports
+ in a consistent way. If you use :class:`AuthorizedHttp` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google.auth.transport.urllib3
+ import urllib3
+
+ http = urllib3.PoolManager()
+ request = google.auth.transport.urllib3.Request(http)
+
+ credentials.refresh(request)
+
+ Args:
+ http (urllib3.request.RequestMethods): An instance of any urllib3
+ class that implements :class:`~urllib3.request.RequestMethods`,
+ usually :class:`urllib3.PoolManager`.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, http):
+ self.http = http
+
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request using urllib3.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ urllib3 default timeout will be used.
+ kwargs: Additional arguments passed throught to the underlying
+ urllib3 :meth:`urlopen` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # urllib3 uses a sentinel default value for timeout, so only set it if
+ # specified.
+ if timeout is not None:
+ kwargs["timeout"] = timeout
+
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+ response = self.http.request(
+ method, url, body=body, headers=headers, **kwargs
+ )
+ return _Response(response)
+ except urllib3.exceptions.HTTPError as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+
+def _make_default_http():
+ if certifi is not None:
+ return urllib3.PoolManager(cert_reqs="CERT_REQUIRED", ca_certs=certifi.where())
+ else:
+ return urllib3.PoolManager()
+
+
+def _make_mutual_tls_http(cert, key):
+ """Create a mutual TLS HTTP connection with the given client cert and key.
+ See https://github.com/urllib3/urllib3/issues/474#issuecomment-253168415
+
+ Args:
+ cert (bytes): client certificate in PEM format
+ key (bytes): client private key in PEM format
+
+ Returns:
+ urllib3.PoolManager: Mutual TLS HTTP connection.
+
+ Raises:
+ ImportError: If certifi or pyOpenSSL is not installed.
+ OpenSSL.crypto.Error: If the cert or key is invalid.
+ """
+ import certifi
+ from OpenSSL import crypto
+ import urllib3.contrib.pyopenssl
+
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ ctx = urllib3.util.ssl_.create_urllib3_context()
+ ctx.load_verify_locations(cafile=certifi.where())
+
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
+ x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+
+ ctx._ctx.use_certificate(x509)
+ ctx._ctx.use_privatekey(pkey)
+
+ http = urllib3.PoolManager(ssl_context=ctx)
+ return http
+
+
+class AuthorizedHttp(urllib3.request.RequestMethods):
+ """A urllib3 HTTP class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport.urllib3 import AuthorizedHttp
+
+ authed_http = AuthorizedHttp(credentials)
+
+ response = authed_http.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+ This class implements :class:`urllib3.request.RequestMethods` and can be
+ used just like any other :class:`urllib3.PoolManager`.
+
+ The underlying :meth:`urlopen` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ This class also supports mutual TLS via :meth:`configure_mtls_channel`
+ method. In order to use this method, the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be explicitly set to `true`, otherwise it does
+ nothing. Assume the environment is set to `true`, the method behaves in the
+ following manner:
+ If client_cert_callback is provided, client certificate and private
+ key are loaded using the callback; if client_cert_callback is None,
+ application default SSL credentials will be used. Exceptions are raised if
+ there are problems with the certificate, private key, or the loading process,
+ so it should be called within a try/except block.
+
+ First we set the environment variable to `true`, then create an :class:`AuthorizedHttp`
+ instance and specify the endpoints::
+
+ regular_endpoint = 'https://pubsub.googleapis.com/v1/projects/{my_project_id}/topics'
+ mtls_endpoint = 'https://pubsub.mtls.googleapis.com/v1/projects/{my_project_id}/topics'
+
+ authed_http = AuthorizedHttp(credentials)
+
+ Now we can pass a callback to :meth:`configure_mtls_channel`::
+
+ def my_cert_callback():
+ # some code to load client cert bytes and private key bytes, both in
+ # PEM format.
+ some_code_to_load_client_cert_and_key()
+ if loaded:
+ return cert, key
+ raise MyClientCertFailureException()
+
+ # Always call configure_mtls_channel within a try/except block.
+ try:
+ is_mtls = authed_http.configure_mtls_channel(my_cert_callback)
+ except:
+ # handle exceptions.
+
+ if is_mtls:
+ response = authed_http.request('GET', mtls_endpoint)
+ else:
+ response = authed_http.request('GET', regular_endpoint)
+
+ You can alternatively use application default SSL credentials like this::
+
+ try:
+ is_mtls = authed_http.configure_mtls_channel()
+ except:
+ # handle exceptions.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to the request.
+ http (urllib3.PoolManager): The underlying HTTP object to
+ use to make requests. If not specified, a
+ :class:`urllib3.PoolManager` instance will be constructed with
+ sane defaults.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
+ that credentials should be refreshed and the request should be
+ retried.
+ max_refresh_attempts (int): The maximum number of times to attempt to
+ refresh the credentials and retry the request.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ http=None,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ ):
+ if http is None:
+ self.http = _make_default_http()
+ self._has_user_provided_http = False
+ else:
+ self.http = http
+ self._has_user_provided_http = True
+
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ # Request instance used by internal methods (for example,
+ # credentials.refresh).
+ self._request = Request(self.http)
+
+ super(AuthorizedHttp, self).__init__()
+
+ def configure_mtls_channel(self, client_cert_callback=None):
+ """Configures mutual TLS channel using the given client_cert_callback or
+ application default SSL credentials. The behavior is controlled by
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable.
+ (1) If the environment variable value is `true`, the function returns True
+ if the channel is mutual TLS and False otherwise. The `http` provided
+ in the constructor will be overwritten.
+ (2) If the environment variable is not set or `false`, the function does
+ nothing and it always return False.
+
+ Args:
+ client_cert_callback (Optional[Callable[[], (bytes, bytes)]]):
+ The optional callback returns the client certificate and private
+ key bytes both in PEM format.
+ If the callback is None, application default SSL credentials
+ will be used.
+
+ Returns:
+ True if the channel is mutual TLS and False otherwise.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert != "true":
+ return False
+
+ try:
+ import OpenSSL
+ except ImportError as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ try:
+ found_cert_key, cert, key = transport._mtls_helper.get_client_cert_and_key(
+ client_cert_callback
+ )
+
+ if found_cert_key:
+ self.http = _make_mutual_tls_http(cert, key)
+ else:
+ self.http = _make_default_http()
+ except (
+ exceptions.ClientCertError,
+ ImportError,
+ OpenSSL.crypto.Error,
+ ) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ six.raise_from(new_exc, caught_exc)
+
+ if self._has_user_provided_http:
+ self._has_user_provided_http = False
+ warnings.warn(
+ "`http` provided in the constructor is overwritten", UserWarning
+ )
+
+ return found_cert_key
+
+ def urlopen(self, method, url, body=None, headers=None, **kwargs):
+ """Implementation of urllib3's urlopen."""
+ # pylint: disable=arguments-differ
+ # We use kwargs to collect additional args that we don't need to
+ # introspect here. However, we do explicitly collect the two
+ # positional arguments.
+
+ # Use a kwarg for this instead of an attribute to maintain
+ # thread-safety.
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+
+ if headers is None:
+ headers = self.headers
+
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy()
+
+ self.credentials.before_request(self._request, method, url, request_headers)
+
+ response = self.http.urlopen(
+ method, url, body=body, headers=request_headers, **kwargs
+ )
+
+ # If the response indicated that the credentials needed to be
+ # refreshed, then refresh the credentials and re-attempt the
+ # request.
+ # A stored token may expire between the time it is retrieved and
+ # the time the request is made, so we may need to try twice.
+ # The reason urllib3's retries aren't used is because they
+ # don't allow you to modify the request headers. :/
+ if (
+ response.status in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ _LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ self.credentials.refresh(self._request)
+
+ # Recurse. Pass in the original headers, not our modified set.
+ return self.urlopen(
+ method,
+ url,
+ body=body,
+ headers=headers,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs
+ )
+
+ return response
+
+ # Proxy methods for compliance with the urllib3.PoolManager interface
+
+ def __enter__(self):
+ """Proxy to ``self.http``."""
+ return self.http.__enter__()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ """Proxy to ``self.http``."""
+ return self.http.__exit__(exc_type, exc_val, exc_tb)
+
+ @property
+ def headers(self):
+ """Proxy to ``self.http``."""
+ return self.http.headers
+
+ @headers.setter
+ def headers(self, value):
+ """Proxy to ``self.http``."""
+ self.http.headers = value
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..852744063
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/_http.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/_http.cpython-36.pyc
new file mode 100644
index 000000000..3242b5ecf
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/_http.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/_testing.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/_testing.cpython-36.pyc
new file mode 100644
index 000000000..73b399e18
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/_testing.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/client.cpython-36.pyc
new file mode 100644
index 000000000..e45792fc3
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/environment_vars.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/environment_vars.cpython-36.pyc
new file mode 100644
index 000000000..fd6f47825
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/environment_vars.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/exceptions.cpython-36.pyc
new file mode 100644
index 000000000..ef995e326
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/exceptions.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/firestore.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/firestore.cpython-36.pyc
new file mode 100644
index 000000000..74cf20eca
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/firestore.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/obsolete.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/obsolete.cpython-36.pyc
new file mode 100644
index 000000000..86d4ce6c6
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/obsolete.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/operation.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/operation.cpython-36.pyc
new file mode 100644
index 000000000..965d5f907
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/operation.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/__pycache__/version.cpython-36.pyc
new file mode 100644
index 000000000..e419daadb
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/__pycache__/version.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/_helpers.py b/venv/Lib/site-packages/google/cloud/_helpers.py
new file mode 100644
index 000000000..52a96b80e
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/_helpers.py
@@ -0,0 +1,636 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared helpers for Google Cloud packages.
+
+This module is not part of the public API surface.
+"""
+
+from __future__ import absolute_import
+
+import calendar
+import datetime
+import os
+import re
+from threading import local as Local
+
+import six
+from six.moves import http_client
+
+import google.auth
+import google.auth.transport.requests
+from google.protobuf import duration_pb2
+from google.protobuf import timestamp_pb2
+
+try:
+ import grpc
+ import google.auth.transport.grpc
+except ImportError: # pragma: NO COVER
+ grpc = None
+
+
+_NOW = datetime.datetime.utcnow # To be replaced by tests.
+_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
+_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
+_TIMEONLY_W_MICROS = "%H:%M:%S.%f"
+_TIMEONLY_NO_FRACTION = "%H:%M:%S"
+# datetime.strptime cannot handle nanosecond precision: parse w/ regex
+_RFC3339_NANOS = re.compile(
+ r"""
+ (?P
+ \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
+ )
+ ( # Optional decimal part
+ \. # decimal point
+ (?P\d{1,9}) # nanoseconds, maybe truncated
+ )?
+ Z # Zulu
+""",
+ re.VERBOSE,
+)
+# NOTE: Catching this ImportError is a workaround for GAE not supporting the
+# "pwd" module which is imported lazily when "expanduser" is called.
+try:
+ _USER_ROOT = os.path.expanduser("~")
+except ImportError: # pragma: NO COVER
+ _USER_ROOT = None
+_GCLOUD_CONFIG_FILE = os.path.join("gcloud", "configurations", "config_default")
+_GCLOUD_CONFIG_SECTION = "core"
+_GCLOUD_CONFIG_KEY = "project"
+
+
+class _LocalStack(Local):
+ """Manage a thread-local LIFO stack of resources.
+
+ Intended for use in :class:`google.cloud.datastore.batch.Batch.__enter__`,
+ :class:`google.cloud.storage.batch.Batch.__enter__`, etc.
+ """
+
+ def __init__(self):
+ super(_LocalStack, self).__init__()
+ self._stack = []
+
+ def __iter__(self):
+ """Iterate the stack in LIFO order.
+ """
+ return iter(reversed(self._stack))
+
+ def push(self, resource):
+ """Push a resource onto our stack.
+ """
+ self._stack.append(resource)
+
+ def pop(self):
+ """Pop a resource from our stack.
+
+ :rtype: object
+ :returns: the top-most resource, after removing it.
+ :raises IndexError: if the stack is empty.
+ """
+ return self._stack.pop()
+
+ @property
+ def top(self):
+ """Get the top-most resource
+
+ :rtype: object
+ :returns: the top-most item, or None if the stack is empty.
+ """
+ if self._stack:
+ return self._stack[-1]
+
+
+class _UTC(datetime.tzinfo):
+ """Basic UTC implementation.
+
+ Implementing a small surface area to avoid depending on ``pytz``.
+ """
+
+ _dst = datetime.timedelta(0)
+ _tzname = "UTC"
+ _utcoffset = _dst
+
+ def dst(self, dt): # pylint: disable=unused-argument
+ """Daylight savings time offset."""
+ return self._dst
+
+ def fromutc(self, dt):
+ """Convert a timestamp from (naive) UTC to this timezone."""
+ if dt.tzinfo is None:
+ return dt.replace(tzinfo=self)
+ return super(_UTC, self).fromutc(dt)
+
+ def tzname(self, dt): # pylint: disable=unused-argument
+ """Get the name of this timezone."""
+ return self._tzname
+
+ def utcoffset(self, dt): # pylint: disable=unused-argument
+ """UTC offset of this timezone."""
+ return self._utcoffset
+
+ def __repr__(self):
+ return "<%s>" % (self._tzname,)
+
+ def __str__(self):
+ return self._tzname
+
+
+def _ensure_tuple_or_list(arg_name, tuple_or_list):
+ """Ensures an input is a tuple or list.
+
+ This effectively reduces the iterable types allowed to a very short
+ whitelist: list and tuple.
+
+ :type arg_name: str
+ :param arg_name: Name of argument to use in error message.
+
+ :type tuple_or_list: sequence of str
+ :param tuple_or_list: Sequence to be verified.
+
+ :rtype: list of str
+ :returns: The ``tuple_or_list`` passed in cast to a ``list``.
+ :raises TypeError: if the ``tuple_or_list`` is not a tuple or list.
+ """
+ if not isinstance(tuple_or_list, (tuple, list)):
+ raise TypeError(
+ "Expected %s to be a tuple or list. "
+ "Received %r" % (arg_name, tuple_or_list)
+ )
+ return list(tuple_or_list)
+
+
+def _determine_default_project(project=None):
+ """Determine default project ID explicitly or implicitly as fall-back.
+
+ See :func:`google.auth.default` for details on how the default project
+ is determined.
+
+ :type project: str
+ :param project: Optional. The project name to use as default.
+
+ :rtype: str or ``NoneType``
+ :returns: Default project if it can be determined.
+ """
+ if project is None:
+ _, project = google.auth.default()
+ return project
+
+
+def _millis(when):
+ """Convert a zone-aware datetime to integer milliseconds.
+
+ :type when: :class:`datetime.datetime`
+ :param when: the datetime to convert
+
+ :rtype: int
+ :returns: milliseconds since epoch for ``when``
+ """
+ micros = _microseconds_from_datetime(when)
+ return micros // 1000
+
+
+def _datetime_from_microseconds(value):
+ """Convert timestamp to datetime, assuming UTC.
+
+ :type value: float
+ :param value: The timestamp to convert
+
+ :rtype: :class:`datetime.datetime`
+ :returns: The datetime object created from the value.
+ """
+ return _EPOCH + datetime.timedelta(microseconds=value)
+
+
+def _microseconds_from_datetime(value):
+ """Convert non-none datetime to microseconds.
+
+ :type value: :class:`datetime.datetime`
+ :param value: The timestamp to convert.
+
+ :rtype: int
+ :returns: The timestamp, in microseconds.
+ """
+ if not value.tzinfo:
+ value = value.replace(tzinfo=UTC)
+ # Regardless of what timezone is on the value, convert it to UTC.
+ value = value.astimezone(UTC)
+ # Convert the datetime to a microsecond timestamp.
+ return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
+
+
+def _millis_from_datetime(value):
+ """Convert non-none datetime to timestamp, assuming UTC.
+
+ :type value: :class:`datetime.datetime`
+ :param value: (Optional) the timestamp
+
+ :rtype: int, or ``NoneType``
+ :returns: the timestamp, in milliseconds, or None
+ """
+ if value is not None:
+ return _millis(value)
+
+
+def _date_from_iso8601_date(value):
+ """Convert a ISO8601 date string to native datetime date
+
+ :type value: str
+ :param value: The date string to convert
+
+ :rtype: :class:`datetime.date`
+ :returns: A datetime date object created from the string
+
+ """
+ return datetime.datetime.strptime(value, "%Y-%m-%d").date()
+
+
+def _time_from_iso8601_time_naive(value):
+ """Convert a zoneless ISO8601 time string to naive datetime time
+
+ :type value: str
+ :param value: The time string to convert
+
+ :rtype: :class:`datetime.time`
+ :returns: A datetime time object created from the string
+ :raises ValueError: if the value does not match a known format.
+ """
+ if len(value) == 8: # HH:MM:SS
+ fmt = _TIMEONLY_NO_FRACTION
+ elif len(value) == 15: # HH:MM:SS.micros
+ fmt = _TIMEONLY_W_MICROS
+ else:
+ raise ValueError("Unknown time format: {}".format(value))
+ return datetime.datetime.strptime(value, fmt).time()
+
+
+def _rfc3339_to_datetime(dt_str):
+ """Convert a microsecond-precision timestamp to a native datetime.
+
+ :type dt_str: str
+ :param dt_str: The string to convert.
+
+ :rtype: :class:`datetime.datetime`
+ :returns: The datetime object created from the string.
+ """
+ return datetime.datetime.strptime(dt_str, _RFC3339_MICROS).replace(tzinfo=UTC)
+
+
+def _rfc3339_nanos_to_datetime(dt_str):
+ """Convert a nanosecond-precision timestamp to a native datetime.
+
+ .. note::
+
+ Python datetimes do not support nanosecond precision; this function
+ therefore truncates such values to microseconds.
+
+ :type dt_str: str
+ :param dt_str: The string to convert.
+
+ :rtype: :class:`datetime.datetime`
+ :returns: The datetime object created from the string.
+ :raises ValueError: If the timestamp does not match the RFC 3339
+ regular expression.
+ """
+ with_nanos = _RFC3339_NANOS.match(dt_str)
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: %r, does not match pattern: %r"
+ % (dt_str, _RFC3339_NANOS.pattern)
+ )
+ bare_seconds = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+ if fraction is None:
+ micros = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10 ** scale)
+ micros = nanos // 1000
+ return bare_seconds.replace(microsecond=micros, tzinfo=UTC)
+
+
+def _datetime_to_rfc3339(value, ignore_zone=True):
+ """Convert a timestamp to a string.
+
+ :type value: :class:`datetime.datetime`
+ :param value: The datetime object to be converted to a string.
+
+ :type ignore_zone: bool
+ :param ignore_zone: If True, then the timezone (if any) of the datetime
+ object is ignored.
+
+ :rtype: str
+ :returns: The string representing the datetime stamp.
+ """
+ if not ignore_zone and value.tzinfo is not None:
+ # Convert to UTC and remove the time zone info.
+ value = value.replace(tzinfo=None) - value.utcoffset()
+
+ return value.strftime(_RFC3339_MICROS)
+
+
+def _to_bytes(value, encoding="ascii"):
+ """Converts a string value to bytes, if necessary.
+
+ Unfortunately, ``six.b`` is insufficient for this task since in
+ Python2 it does not modify ``unicode`` objects.
+
+ :type value: str / bytes or unicode
+ :param value: The string/bytes value to be converted.
+
+ :type encoding: str
+ :param encoding: The encoding to use to convert unicode to bytes. Defaults
+ to "ascii", which will not allow any characters from
+ ordinals larger than 127. Other useful values are
+ "latin-1", which which will only allows byte ordinals
+ (up to 255) and "utf-8", which will encode any unicode
+ that needs to be.
+
+ :rtype: str / bytes
+ :returns: The original value converted to bytes (if unicode) or as passed
+ in if it started out as bytes.
+ :raises TypeError: if the value could not be converted to bytes.
+ """
+ result = value.encode(encoding) if isinstance(value, six.text_type) else value
+ if isinstance(result, six.binary_type):
+ return result
+ else:
+ raise TypeError("%r could not be converted to bytes" % (value,))
+
+
+def _bytes_to_unicode(value):
+ """Converts bytes to a unicode value, if necessary.
+
+ :type value: bytes
+ :param value: bytes value to attempt string conversion on.
+
+ :rtype: str
+ :returns: The original value converted to unicode (if bytes) or as passed
+ in if it started out as unicode.
+
+ :raises ValueError: if the value could not be converted to unicode.
+ """
+ result = value.decode("utf-8") if isinstance(value, six.binary_type) else value
+ if isinstance(result, six.text_type):
+ return result
+ else:
+ raise ValueError("%r could not be converted to unicode" % (value,))
+
+
+def _from_any_pb(pb_type, any_pb):
+ """Converts an Any protobuf to the specified message type
+
+ Args:
+ pb_type (type): the type of the message that any_pb stores an instance
+ of.
+ any_pb (google.protobuf.any_pb2.Any): the object to be converted.
+
+ Returns:
+ pb_type: An instance of the pb_type message.
+
+ Raises:
+ TypeError: if the message could not be converted.
+ """
+ msg = pb_type()
+ if not any_pb.Unpack(msg):
+ raise TypeError(
+ "Could not convert {} to {}".format(
+ any_pb.__class__.__name__, pb_type.__name__
+ )
+ )
+
+ return msg
+
+
+def _pb_timestamp_to_datetime(timestamp_pb):
+ """Convert a Timestamp protobuf to a datetime object.
+
+ :type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
+ :param timestamp_pb: A Google returned timestamp protobuf.
+
+ :rtype: :class:`datetime.datetime`
+ :returns: A UTC datetime object converted from a protobuf timestamp.
+ """
+ return _EPOCH + datetime.timedelta(
+ seconds=timestamp_pb.seconds, microseconds=(timestamp_pb.nanos / 1000.0)
+ )
+
+
+def _pb_timestamp_to_rfc3339(timestamp_pb):
+ """Convert a Timestamp protobuf to an RFC 3339 string.
+
+ :type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
+ :param timestamp_pb: A Google returned timestamp protobuf.
+
+ :rtype: str
+ :returns: An RFC 3339 formatted timestamp string.
+ """
+ timestamp = _pb_timestamp_to_datetime(timestamp_pb)
+ return _datetime_to_rfc3339(timestamp)
+
+
+def _datetime_to_pb_timestamp(when):
+ """Convert a datetime object to a Timestamp protobuf.
+
+ :type when: :class:`datetime.datetime`
+ :param when: the datetime to convert
+
+ :rtype: :class:`google.protobuf.timestamp_pb2.Timestamp`
+ :returns: A timestamp protobuf corresponding to the object.
+ """
+ ms_value = _microseconds_from_datetime(when)
+ seconds, micros = divmod(ms_value, 10 ** 6)
+ nanos = micros * 10 ** 3
+ return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+
+
+def _timedelta_to_duration_pb(timedelta_val):
+ """Convert a Python timedelta object to a duration protobuf.
+
+ .. note::
+
+ The Python timedelta has a granularity of microseconds while
+ the protobuf duration type has a duration of nanoseconds.
+
+ :type timedelta_val: :class:`datetime.timedelta`
+ :param timedelta_val: A timedelta object.
+
+ :rtype: :class:`google.protobuf.duration_pb2.Duration`
+ :returns: A duration object equivalent to the time delta.
+ """
+ duration_pb = duration_pb2.Duration()
+ duration_pb.FromTimedelta(timedelta_val)
+ return duration_pb
+
+
+def _duration_pb_to_timedelta(duration_pb):
+ """Convert a duration protobuf to a Python timedelta object.
+
+ .. note::
+
+ The Python timedelta has a granularity of microseconds while
+ the protobuf duration type has a duration of nanoseconds.
+
+ :type duration_pb: :class:`google.protobuf.duration_pb2.Duration`
+ :param duration_pb: A protobuf duration object.
+
+ :rtype: :class:`datetime.timedelta`
+ :returns: The converted timedelta object.
+ """
+ return datetime.timedelta(
+ seconds=duration_pb.seconds, microseconds=(duration_pb.nanos / 1000.0)
+ )
+
+
+def _name_from_project_path(path, project, template):
+ """Validate a URI path and get the leaf object's name.
+
+ :type path: str
+ :param path: URI path containing the name.
+
+ :type project: str
+ :param project: (Optional) The project associated with the request. It is
+ included for validation purposes. If passed as None,
+ disables validation.
+
+ :type template: str
+ :param template: Template regex describing the expected form of the path.
+ The regex must have two named groups, 'project' and
+ 'name'.
+
+ :rtype: str
+ :returns: Name parsed from ``path``.
+ :raises ValueError: if the ``path`` is ill-formed or if the project from
+ the ``path`` does not agree with the ``project``
+ passed in.
+ """
+ if isinstance(template, str):
+ template = re.compile(template)
+
+ match = template.match(path)
+
+ if not match:
+ raise ValueError(
+ 'path "%s" did not match expected pattern "%s"' % (path, template.pattern)
+ )
+
+ if project is not None:
+ found_project = match.group("project")
+ if found_project != project:
+ raise ValueError(
+ "Project from client (%s) should agree with "
+ "project from resource(%s)." % (project, found_project)
+ )
+
+ return match.group("name")
+
+
+def make_secure_channel(credentials, user_agent, host, extra_options=()):
+ """Makes a secure channel for an RPC service.
+
+ Uses / depends on gRPC.
+
+ :type credentials: :class:`google.auth.credentials.Credentials`
+ :param credentials: The OAuth2 Credentials to use for creating
+ access tokens.
+
+ :type user_agent: str
+ :param user_agent: The user agent to be used with API requests.
+
+ :type host: str
+ :param host: The host for the service.
+
+ :type extra_options: tuple
+ :param extra_options: (Optional) Extra gRPC options used when creating the
+ channel.
+
+ :rtype: :class:`grpc._channel.Channel`
+ :returns: gRPC secure channel with credentials attached.
+ """
+ target = "%s:%d" % (host, http_client.HTTPS_PORT)
+ http_request = google.auth.transport.requests.Request()
+
+ user_agent_option = ("grpc.primary_user_agent", user_agent)
+ options = (user_agent_option,) + extra_options
+ return google.auth.transport.grpc.secure_authorized_channel(
+ credentials, http_request, target, options=options
+ )
+
+
+def make_secure_stub(credentials, user_agent, stub_class, host, extra_options=()):
+ """Makes a secure stub for an RPC service.
+
+ Uses / depends on gRPC.
+
+ :type credentials: :class:`google.auth.credentials.Credentials`
+ :param credentials: The OAuth2 Credentials to use for creating
+ access tokens.
+
+ :type user_agent: str
+ :param user_agent: The user agent to be used with API requests.
+
+ :type stub_class: type
+ :param stub_class: A gRPC stub type for a given service.
+
+ :type host: str
+ :param host: The host for the service.
+
+ :type extra_options: tuple
+ :param extra_options: (Optional) Extra gRPC options passed when creating
+ the channel.
+
+ :rtype: object, instance of ``stub_class``
+ :returns: The stub object used to make gRPC requests to a given API.
+ """
+ channel = make_secure_channel(
+ credentials, user_agent, host, extra_options=extra_options
+ )
+ return stub_class(channel)
+
+
+def make_insecure_stub(stub_class, host, port=None):
+ """Makes an insecure stub for an RPC service.
+
+ Uses / depends on gRPC.
+
+ :type stub_class: type
+ :param stub_class: A gRPC stub type for a given service.
+
+ :type host: str
+ :param host: The host for the service. May also include the port
+ if ``port`` is unspecified.
+
+ :type port: int
+ :param port: (Optional) The port for the service.
+
+ :rtype: object, instance of ``stub_class``
+ :returns: The stub object used to make gRPC requests to a given API.
+ """
+ if port is None:
+ target = host
+ else:
+ # NOTE: This assumes port != http_client.HTTPS_PORT:
+ target = "%s:%d" % (host, port)
+ channel = grpc.insecure_channel(target)
+ return stub_class(channel)
+
+
+try:
+ from pytz import UTC # pylint: disable=unused-import,wrong-import-order
+except ImportError: # pragma: NO COVER
+ UTC = _UTC() # Singleton instance to be used throughout.
+
+# Need to define _EPOCH at the end of module since it relies on UTC.
+_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=UTC)
diff --git a/venv/Lib/site-packages/google/cloud/_http.py b/venv/Lib/site-packages/google/cloud/_http.py
new file mode 100644
index 000000000..806262289
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/_http.py
@@ -0,0 +1,440 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared implementation of connections to API servers."""
+
+import collections
+import json
+import platform
+import warnings
+
+from six.moves import collections_abc
+from six.moves.urllib.parse import urlencode
+
+from google.api_core.client_info import ClientInfo
+from google.cloud import exceptions
+from google.cloud import version
+
+
+API_BASE_URL = "https://www.googleapis.com"
+"""The base of the API call URL."""
+
+DEFAULT_USER_AGENT = "gcloud-python/{0}".format(
+ version.__version__
+)
+"""The user agent for google-cloud-python requests."""
+
+CLIENT_INFO_HEADER = "X-Goog-API-Client"
+CLIENT_INFO_TEMPLATE = "gl-python/" + platform.python_version() + " gccl/{}"
+
+_USER_AGENT_ALL_CAPS_DEPRECATED = """\
+The 'USER_AGENT' class-level attribute is deprecated. Please use
+'user_agent' instead.
+"""
+
+_EXTRA_HEADERS_ALL_CAPS_DEPRECATED = """\
+The '_EXTRA_HEADERS' class-level attribute is deprecated. Please use
+'extra_headers' instead.
+"""
+
+_DEFAULT_TIMEOUT = 60 # in seconds
+
+
+class Connection(object):
+ """A generic connection to Google Cloud Platform.
+
+ :type client: :class:`~google.cloud.client.Client`
+ :param client: The client that owns the current connection.
+
+ :type client_info: :class:`~google.api_core.client_info.ClientInfo`
+ :param client_info: (Optional) instance used to generate user agent.
+ """
+
+ _user_agent = DEFAULT_USER_AGENT
+
+ def __init__(self, client, client_info=None):
+ self._client = client
+
+ if client_info is None:
+ client_info = ClientInfo()
+
+ self._client_info = client_info
+ self._extra_headers = {}
+
+ @property
+ def USER_AGENT(self):
+ """Deprecated: get / set user agent sent by connection.
+
+ :rtype: str
+ :returns: user agent
+ """
+ warnings.warn(
+ _USER_AGENT_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
+ return self.user_agent
+
+ @USER_AGENT.setter
+ def USER_AGENT(self, value):
+ warnings.warn(
+ _USER_AGENT_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
+ self.user_agent = value
+
+ @property
+ def user_agent(self):
+ """Get / set user agent sent by connection.
+
+ :rtype: str
+ :returns: user agent
+ """
+ return self._client_info.to_user_agent()
+
+ @user_agent.setter
+ def user_agent(self, value):
+ self._client_info.user_agent = value
+
+ @property
+ def _EXTRA_HEADERS(self):
+ """Deprecated: get / set extra headers sent by connection.
+
+ :rtype: dict
+ :returns: header keys / values
+ """
+ warnings.warn(
+ _EXTRA_HEADERS_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
+ return self.extra_headers
+
+ @_EXTRA_HEADERS.setter
+ def _EXTRA_HEADERS(self, value):
+ warnings.warn(
+ _EXTRA_HEADERS_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
+ self.extra_headers = value
+
+ @property
+ def extra_headers(self):
+ """Get / set extra headers sent by connection.
+
+ :rtype: dict
+ :returns: header keys / values
+ """
+ return self._extra_headers
+
+ @extra_headers.setter
+ def extra_headers(self, value):
+ self._extra_headers = value
+
+ @property
+ def credentials(self):
+ """Getter for current credentials.
+
+ :rtype: :class:`google.auth.credentials.Credentials` or
+ :class:`NoneType`
+ :returns: The credentials object associated with this connection.
+ """
+ return self._client._credentials
+
+ @property
+ def http(self):
+ """A getter for the HTTP transport used in talking to the API.
+
+ Returns:
+ google.auth.transport.requests.AuthorizedSession:
+ A :class:`requests.Session` instance.
+ """
+ return self._client._http
+
+
+class JSONConnection(Connection):
+ """A connection to a Google JSON-based API.
+
+ These APIs are discovery based. For reference:
+
+ https://developers.google.com/discovery/
+
+ This defines :meth:`api_request` for making a generic JSON
+ API request and API requests are created elsewhere.
+
+ * :attr:`API_BASE_URL`
+ * :attr:`API_VERSION`
+ * :attr:`API_URL_TEMPLATE`
+
+ must be updated by subclasses.
+ """
+
+ API_BASE_URL = None
+ """The base of the API call URL."""
+
+ API_VERSION = None
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE = None
+ """A template for the URL of a particular API call."""
+
+ def build_api_url(
+ self, path, query_params=None, api_base_url=None, api_version=None
+ ):
+ """Construct an API url given a few components, some optional.
+
+ Typically, you shouldn't need to use this method.
+
+ :type path: str
+ :param path: The path to the resource (ie, ``'/b/bucket-name'``).
+
+ :type query_params: dict or list
+ :param query_params: A dictionary of keys and values (or list of
+ key-value pairs) to insert into the query
+ string of the URL.
+
+ :type api_base_url: str
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+
+ :type api_version: str
+ :param api_version: The version of the API to call.
+ Typically you shouldn't provide this and instead
+ use the default for the library.
+
+ :rtype: str
+ :returns: The URL assembled from the pieces provided.
+ """
+ url = self.API_URL_TEMPLATE.format(
+ api_base_url=(api_base_url or self.API_BASE_URL),
+ api_version=(api_version or self.API_VERSION),
+ path=path,
+ )
+
+ query_params = query_params or {}
+
+ if isinstance(query_params, collections_abc.Mapping):
+ query_params = query_params.copy()
+ else:
+ query_params_dict = collections.defaultdict(list)
+ for key, value in query_params:
+ query_params_dict[key].append(value)
+ query_params = query_params_dict
+
+ query_params.setdefault("prettyPrint", "false")
+
+ url += "?" + urlencode(query_params, doseq=True)
+
+ return url
+
+ def _make_request(
+ self,
+ method,
+ url,
+ data=None,
+ content_type=None,
+ headers=None,
+ target_object=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """A low level method to send a request to the API.
+
+ Typically, you shouldn't need to use this method.
+
+ :type method: str
+ :param method: The HTTP method to use in the request.
+
+ :type url: str
+ :param url: The URL to send the request to.
+
+ :type data: str
+ :param data: The data to send as the body of the request.
+
+ :type content_type: str
+ :param content_type: The proper MIME type of the data provided.
+
+ :type headers: dict
+ :param headers: (Optional) A dictionary of HTTP headers to send with
+ the request. If passed, will be modified directly
+ here with added headers.
+
+ :type target_object: object
+ :param target_object:
+ (Optional) Argument to be used by library callers. This can allow
+ custom behavior, for example, to defer an HTTP request and complete
+ initialization of the object at a later time.
+
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`requests.Response`
+ :returns: The HTTP response.
+ """
+ headers = headers or {}
+ headers.update(self.extra_headers)
+ headers["Accept-Encoding"] = "gzip"
+
+ if content_type:
+ headers["Content-Type"] = content_type
+
+ headers[CLIENT_INFO_HEADER] = self.user_agent
+ headers["User-Agent"] = self.user_agent
+
+ return self._do_request(
+ method, url, headers, data, target_object, timeout=timeout
+ )
+
+ def _do_request(
+ self, method, url, headers, data, target_object, timeout=_DEFAULT_TIMEOUT
+ ): # pylint: disable=unused-argument
+ """Low-level helper: perform the actual API request over HTTP.
+
+ Allows batch context managers to override and defer a request.
+
+ :type method: str
+ :param method: The HTTP method to use in the request.
+
+ :type url: str
+ :param url: The URL to send the request to.
+
+ :type headers: dict
+ :param headers: A dictionary of HTTP headers to send with the request.
+
+ :type data: str
+ :param data: The data to send as the body of the request.
+
+ :type target_object: object
+ :param target_object:
+ (Optional) Unused ``target_object`` here but may be used by a
+ superclass.
+
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`requests.Response`
+ :returns: The HTTP response.
+ """
+ return self.http.request(
+ url=url, method=method, headers=headers, data=data, timeout=timeout
+ )
+
+ def api_request(
+ self,
+ method,
+ path,
+ query_params=None,
+ data=None,
+ content_type=None,
+ headers=None,
+ api_base_url=None,
+ api_version=None,
+ expect_json=True,
+ _target_object=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Make a request over the HTTP transport to the API.
+
+ You shouldn't need to use this method, but if you plan to
+ interact with the API using these primitives, this is the
+ correct one to use.
+
+ :type method: str
+ :param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
+ Required.
+
+ :type path: str
+ :param path: The path to the resource (ie, ``'/b/bucket-name'``).
+ Required.
+
+ :type query_params: dict or list
+ :param query_params: A dictionary of keys and values (or list of
+ key-value pairs) to insert into the query
+ string of the URL.
+
+ :type data: str
+ :param data: The data to send as the body of the request. Default is
+ the empty string.
+
+ :type content_type: str
+ :param content_type: The proper MIME type of the data provided. Default
+ is None.
+
+ :type headers: dict
+ :param headers: extra HTTP headers to be sent with the request.
+
+ :type api_base_url: str
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+ Default is the standard API base URL.
+
+ :type api_version: str
+ :param api_version: The version of the API to call. Typically
+ you shouldn't provide this and instead use
+ the default for the library. Default is the
+ latest API version supported by
+ google-cloud-python.
+
+ :type expect_json: bool
+ :param expect_json: If True, this method will try to parse the
+ response as JSON and raise an exception if
+ that cannot be done. Default is True.
+
+ :type _target_object: :class:`object`
+ :param _target_object:
+ (Optional) Protected argument to be used by library callers. This
+ can allow custom behavior, for example, to defer an HTTP request
+ and complete initialization of the object at a later time.
+
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises ~google.cloud.exceptions.GoogleCloudError: if the response code
+ is not 200 OK.
+ :raises ValueError: if the response content type is not JSON.
+ :rtype: dict or str
+ :returns: The API response payload, either as a raw string or
+ a dictionary if the response is valid JSON.
+ """
+ url = self.build_api_url(
+ path=path,
+ query_params=query_params,
+ api_base_url=api_base_url,
+ api_version=api_version,
+ )
+
+ # Making the executive decision that any dictionary
+ # data will be sent properly as JSON.
+ if data and isinstance(data, dict):
+ data = json.dumps(data)
+ content_type = "application/json"
+
+ response = self._make_request(
+ method=method,
+ url=url,
+ data=data,
+ content_type=content_type,
+ headers=headers,
+ target_object=_target_object,
+ timeout=timeout,
+ )
+
+ if not 200 <= response.status_code < 300:
+ raise exceptions.from_http_response(response)
+
+ if expect_json and response.content:
+ return response.json()
+ else:
+ return response.content
diff --git a/venv/Lib/site-packages/google/cloud/_testing.py b/venv/Lib/site-packages/google/cloud/_testing.py
new file mode 100644
index 000000000..04f0dba4c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/_testing.py
@@ -0,0 +1,126 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared testing utilities."""
+
+from __future__ import absolute_import
+
+
+class _Monkey(object):
+ """Context-manager for replacing module names in the scope of a test."""
+
+ def __init__(self, module, **kw):
+ self.module = module
+ if not kw: # pragma: NO COVER
+ raise ValueError("_Monkey was used with nothing to monkey-patch")
+ self.to_restore = {key: getattr(module, key) for key in kw}
+ for key, value in kw.items():
+ setattr(module, key, value)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ for key, value in self.to_restore.items():
+ setattr(self.module, key, value)
+
+
+class _NamedTemporaryFile(object):
+ def __init__(self, suffix=""):
+ import os
+ import tempfile
+
+ filehandle, self.name = tempfile.mkstemp(suffix=suffix)
+ os.close(filehandle)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ import os
+
+ os.remove(self.name)
+
+
+def _tempdir_maker():
+ import contextlib
+ import shutil
+ import tempfile
+
+ @contextlib.contextmanager
+ def _tempdir_mgr():
+ temp_dir = tempfile.mkdtemp()
+ yield temp_dir
+ shutil.rmtree(temp_dir)
+
+ return _tempdir_mgr
+
+
+# pylint: disable=invalid-name
+# Retain _tempdir as a constant for backwards compatibility despite
+# being an invalid name.
+_tempdir = _tempdir_maker()
+del _tempdir_maker
+# pylint: enable=invalid-name
+
+
+class _GAXBaseAPI(object):
+
+ _random_gax_error = False
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
+ @staticmethod
+ def _make_grpc_error(status_code, trailing=None):
+ from grpc._channel import _RPCState
+ from google.cloud.exceptions import GrpcRendezvous
+
+ details = "Some error details."
+ exc_state = _RPCState((), None, trailing, status_code, details)
+ return GrpcRendezvous(exc_state, None, None, None)
+
+ def _make_grpc_not_found(self):
+ from grpc import StatusCode
+
+ return self._make_grpc_error(StatusCode.NOT_FOUND)
+
+ def _make_grpc_failed_precondition(self):
+ from grpc import StatusCode
+
+ return self._make_grpc_error(StatusCode.FAILED_PRECONDITION)
+
+ def _make_grpc_already_exists(self):
+ from grpc import StatusCode
+
+ return self._make_grpc_error(StatusCode.ALREADY_EXISTS)
+
+ def _make_grpc_deadline_exceeded(self):
+ from grpc import StatusCode
+
+ return self._make_grpc_error(StatusCode.DEADLINE_EXCEEDED)
+
+
+class _GAXPageIterator(object):
+ def __init__(self, *pages, **kwargs):
+ self._pages = iter(pages)
+ self.page_token = kwargs.get("page_token")
+
+ def next(self):
+ """Iterate to the next page."""
+ import six
+
+ return six.next(self._pages)
+
+ __next__ = next
diff --git a/venv/Lib/site-packages/google/cloud/client.py b/venv/Lib/site-packages/google/cloud/client.py
new file mode 100644
index 000000000..6b9117f0c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/client.py
@@ -0,0 +1,250 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Base classes for client used to interact with Google Cloud APIs."""
+
+import io
+import json
+from pickle import PicklingError
+
+import six
+
+import google.api_core.client_options
+import google.api_core.exceptions
+import google.auth
+import google.auth.credentials
+import google.auth.transport.requests
+from google.cloud._helpers import _determine_default_project
+from google.oauth2 import service_account
+
+
+_GOOGLE_AUTH_CREDENTIALS_HELP = (
+ "This library only supports credentials from google-auth-library-python. "
+ "See https://google-auth.readthedocs.io/en/latest/ "
+ "for help on authentication with this library."
+)
+
+# Default timeout for auth requests.
+_CREDENTIALS_REFRESH_TIMEOUT = 300
+
+
+class _ClientFactoryMixin(object):
+ """Mixin to allow factories that create credentials.
+
+ .. note::
+
+ This class is virtual.
+ """
+
+ _SET_PROJECT = False
+
+ @classmethod
+ def from_service_account_json(cls, json_credentials_path, *args, **kwargs):
+ """Factory to retrieve JSON credentials while creating client.
+
+ :type json_credentials_path: str
+ :param json_credentials_path: The path to a private key file (this file
+ was given to you when you created the
+ service account). This file must contain
+ a JSON object with a private key and
+ other credentials information (downloaded
+ from the Google APIs console).
+
+ :type args: tuple
+ :param args: Remaining positional arguments to pass to constructor.
+
+ :param kwargs: Remaining keyword arguments to pass to constructor.
+
+ :rtype: :class:`_ClientFactoryMixin`
+ :returns: The client created with the retrieved JSON credentials.
+ :raises TypeError: if there is a conflict with the kwargs
+ and the credentials created by the factory.
+ """
+ if "credentials" in kwargs:
+ raise TypeError("credentials must not be in keyword arguments")
+ with io.open(json_credentials_path, "r", encoding="utf-8") as json_fi:
+ credentials_info = json.load(json_fi)
+ credentials = service_account.Credentials.from_service_account_info(
+ credentials_info
+ )
+ if cls._SET_PROJECT:
+ if "project" not in kwargs:
+ kwargs["project"] = credentials_info.get("project_id")
+
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+
+class Client(_ClientFactoryMixin):
+ """Client to bundle configuration needed for API requests.
+
+ Stores ``credentials`` and an HTTP object so that subclasses
+ can pass them along to a connection class.
+
+ If no value is passed in for ``_http``, a :class:`requests.Session` object
+ will be created and authorized with the ``credentials``. If not, the
+ ``credentials`` and ``_http`` need not be related.
+
+ Callers and subclasses may seek to use the private key from
+ ``credentials`` to sign data.
+
+ Args:
+ credentials (google.auth.credentials.Credentials):
+ (Optional) The OAuth2 Credentials to use for this client. If not
+ passed (and if no ``_http`` object is passed), falls back to the
+ default inferred from the environment.
+ client_options (google.api_core.client_options.ClientOptions):
+ (Optional) Custom options for the client.
+ _http (requests.Session):
+ (Optional) HTTP object to make requests. Can be any object that
+ defines ``request()`` with the same interface as
+ :meth:`requests.Session.request`. If not passed, an ``_http``
+ object is created that is bound to the ``credentials`` for the
+ current object.
+ This parameter should be considered private, and could change in
+ the future.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError:
+ Raised if ``credentials`` is not specified and the library fails
+ to acquire default credentials.
+ """
+
+ SCOPE = None
+ """The scopes required for authenticating with a service.
+
+ Needs to be set by subclasses.
+ """
+
+ def __init__(self, credentials=None, _http=None, client_options=None):
+ if isinstance(client_options, dict):
+ client_options = google.api_core.client_options.from_dict(client_options)
+ if client_options is None:
+ client_options = google.api_core.client_options.ClientOptions()
+
+ if credentials and client_options.credentials_file:
+ raise google.api_core.exceptions.DuplicateCredentialArgs(
+ "'credentials' and 'client_options.credentials_file' are mutually exclusive.")
+
+ if credentials and not isinstance(credentials, google.auth.credentials.Credentials):
+ raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP)
+
+ scopes = client_options.scopes or self.SCOPE
+
+ # if no http is provided, credentials must exist
+ if not _http and credentials is None:
+ if client_options.credentials_file:
+ credentials, _ = google.auth.load_credentials_from_file(
+ client_options.credentials_file, scopes=scopes)
+ else:
+ credentials, _ = google.auth.default(scopes=scopes)
+
+ self._credentials = google.auth.credentials.with_scopes_if_required(
+ credentials, scopes=scopes)
+
+ if client_options.quota_project_id:
+ self._credentials = self._credentials.with_quota_project(client_options.quota_project_id)
+
+ self._http_internal = _http
+
+ def __getstate__(self):
+ """Explicitly state that clients are not pickleable."""
+ raise PicklingError(
+ "\n".join(
+ [
+ "Pickling client objects is explicitly not supported.",
+ "Clients have non-trivial state that is local and unpickleable.",
+ ]
+ )
+ )
+
+ @property
+ def _http(self):
+ """Getter for object used for HTTP transport.
+
+ :rtype: :class:`~requests.Session`
+ :returns: An HTTP object.
+ """
+ if self._http_internal is None:
+ self._http_internal = google.auth.transport.requests.AuthorizedSession(
+ self._credentials,
+ refresh_timeout=_CREDENTIALS_REFRESH_TIMEOUT,
+ )
+ return self._http_internal
+
+
+class _ClientProjectMixin(object):
+ """Mixin to allow setting the project on the client.
+
+ :type project: str
+ :param project: the project which the client acts on behalf of. If not
+ passed falls back to the default inferred from the
+ environment.
+
+ :raises: :class:`EnvironmentError` if the project is neither passed in nor
+ set in the environment. :class:`ValueError` if the project value
+ is invalid.
+ """
+
+ def __init__(self, project=None):
+ project = self._determine_default(project)
+ if project is None:
+ raise EnvironmentError(
+ "Project was not passed and could not be "
+ "determined from the environment."
+ )
+ if isinstance(project, six.binary_type):
+ project = project.decode("utf-8")
+ if not isinstance(project, six.string_types):
+ raise ValueError("Project must be a string.")
+ self.project = project
+
+ @staticmethod
+ def _determine_default(project):
+ """Helper: use default project detection."""
+ return _determine_default_project(project)
+
+
+class ClientWithProject(Client, _ClientProjectMixin):
+ """Client that also stores a project.
+
+ :type project: str
+ :param project: the project which the client acts on behalf of. If not
+ passed falls back to the default inferred from the
+ environment.
+
+ :type credentials: :class:`~google.auth.credentials.Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ client. If not passed (and if no ``_http`` object is
+ passed), falls back to the default inferred from the
+ environment.
+
+ :type _http: :class:`~requests.Session`
+ :param _http: (Optional) HTTP object to make requests. Can be any object
+ that defines ``request()`` with the same interface as
+ :meth:`~requests.Session.request`. If not passed, an
+ ``_http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ This parameter should be considered private, and could
+ change in the future.
+
+ :raises: :class:`ValueError` if the project is neither passed in nor
+ set in the environment.
+ """
+
+ _SET_PROJECT = True # Used by from_service_account_json()
+
+ def __init__(self, project=None, credentials=None, client_options=None, _http=None):
+ _ClientProjectMixin.__init__(self, project=project)
+ Client.__init__(self, credentials=credentials, client_options=client_options, _http=_http)
diff --git a/venv/Lib/site-packages/google/cloud/environment_vars.py b/venv/Lib/site-packages/google/cloud/environment_vars.py
new file mode 100644
index 000000000..d84afca35
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/environment_vars.py
@@ -0,0 +1,38 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Comprehensive list of environment variables used in google-cloud.
+
+These enable many types of implicit behavior in both production
+and tests.
+"""
+
+GCD_DATASET = "DATASTORE_DATASET"
+"""Environment variable defining default dataset ID under GCD."""
+
+GCD_HOST = "DATASTORE_EMULATOR_HOST"
+"""Environment variable defining host for GCD dataset server."""
+
+PUBSUB_EMULATOR = "PUBSUB_EMULATOR_HOST"
+"""Environment variable defining host for Pub/Sub emulator."""
+
+BIGTABLE_EMULATOR = "BIGTABLE_EMULATOR_HOST"
+"""Environment variable defining host for Bigtable emulator."""
+
+DISABLE_GRPC = "GOOGLE_CLOUD_DISABLE_GRPC"
+"""Environment variable acting as flag to disable gRPC.
+
+To be used for APIs where both an HTTP and gRPC implementation
+exist.
+"""
diff --git a/venv/Lib/site-packages/google/cloud/exceptions.py b/venv/Lib/site-packages/google/cloud/exceptions.py
new file mode 100644
index 000000000..36ee6d14f
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/exceptions.py
@@ -0,0 +1,59 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=invalid-name
+# pylint recognizies all of these aliases as constants and thinks they have
+# invalid names.
+
+"""Custom exceptions for :mod:`google.cloud` package."""
+
+# Avoid the grpc and google.cloud.grpc collision.
+from __future__ import absolute_import
+
+from google.api_core import exceptions
+
+try:
+ from grpc._channel import _Rendezvous
+except ImportError: # pragma: NO COVER
+ _Rendezvous = None
+
+GrpcRendezvous = _Rendezvous
+"""Exception class raised by gRPC stable."""
+
+# Aliases to moved classes.
+GoogleCloudError = exceptions.GoogleAPICallError
+Redirection = exceptions.Redirection
+MovedPermanently = exceptions.MovedPermanently
+NotModified = exceptions.NotModified
+TemporaryRedirect = exceptions.TemporaryRedirect
+ResumeIncomplete = exceptions.ResumeIncomplete
+ClientError = exceptions.ClientError
+BadRequest = exceptions.BadRequest
+Unauthorized = exceptions.Unauthorized
+Forbidden = exceptions.Forbidden
+NotFound = exceptions.NotFound
+MethodNotAllowed = exceptions.MethodNotAllowed
+Conflict = exceptions.Conflict
+LengthRequired = exceptions.LengthRequired
+PreconditionFailed = exceptions.PreconditionFailed
+RequestRangeNotSatisfiable = exceptions.RequestRangeNotSatisfiable
+TooManyRequests = exceptions.TooManyRequests
+ServerError = exceptions.ServerError
+InternalServerError = exceptions.InternalServerError
+MethodNotImplemented = exceptions.MethodNotImplemented
+BadGateway = exceptions.BadGateway
+ServiceUnavailable = exceptions.ServiceUnavailable
+GatewayTimeout = exceptions.GatewayTimeout
+from_http_status = exceptions.from_http_status
+from_http_response = exceptions.from_http_response
diff --git a/venv/Lib/site-packages/google/cloud/firestore.py b/venv/Lib/site-packages/google/cloud/firestore.py
new file mode 100644
index 000000000..3bdb9af56
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore.py
@@ -0,0 +1,69 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python idiomatic client for Google Cloud Firestore."""
+
+
+from google.cloud.firestore_v1 import __version__
+from google.cloud.firestore_v1 import ArrayRemove
+from google.cloud.firestore_v1 import ArrayUnion
+from google.cloud.firestore_v1 import Client
+from google.cloud.firestore_v1 import CollectionReference
+from google.cloud.firestore_v1 import DELETE_FIELD
+from google.cloud.firestore_v1 import DocumentReference
+from google.cloud.firestore_v1 import DocumentSnapshot
+from google.cloud.firestore_v1 import enums
+from google.cloud.firestore_v1 import ExistsOption
+from google.cloud.firestore_v1 import GeoPoint
+from google.cloud.firestore_v1 import Increment
+from google.cloud.firestore_v1 import LastUpdateOption
+from google.cloud.firestore_v1 import Maximum
+from google.cloud.firestore_v1 import Minimum
+from google.cloud.firestore_v1 import Query
+from google.cloud.firestore_v1 import ReadAfterWriteError
+from google.cloud.firestore_v1 import SERVER_TIMESTAMP
+from google.cloud.firestore_v1 import Transaction
+from google.cloud.firestore_v1 import transactional
+from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1 import Watch
+from google.cloud.firestore_v1 import WriteBatch
+from google.cloud.firestore_v1 import WriteOption
+
+
+__all__ = [
+ "__version__",
+ "ArrayRemove",
+ "ArrayUnion",
+ "Client",
+ "CollectionReference",
+ "DELETE_FIELD",
+ "DocumentReference",
+ "DocumentSnapshot",
+ "enums",
+ "ExistsOption",
+ "GeoPoint",
+ "Increment",
+ "LastUpdateOption",
+ "Maximum",
+ "Minimum",
+ "Query",
+ "ReadAfterWriteError",
+ "SERVER_TIMESTAMP",
+ "Transaction",
+ "transactional",
+ "types",
+ "Watch",
+ "WriteBatch",
+ "WriteOption",
+]
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__init__.py
new file mode 100644
index 000000000..dc284bb96
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__init__.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import absolute_import
+import sys
+import warnings
+
+from google.cloud.firestore_admin_v1 import types
+from google.cloud.firestore_admin_v1.gapic import enums
+from google.cloud.firestore_admin_v1.gapic import firestore_admin_client
+
+
+if sys.version_info[:2] == (2, 7):
+ message = (
+ "A future version of this library will drop support for Python 2.7. "
+ "More details about Python 2 support for Google Cloud Client Libraries "
+ "can be found at https://cloud.google.com/python/docs/python2-sunset/"
+ )
+ warnings.warn(message, DeprecationWarning)
+
+
+class FirestoreAdminClient(firestore_admin_client.FirestoreAdminClient):
+ __doc__ = firestore_admin_client.FirestoreAdminClient.__doc__
+ enums = enums
+
+
+__all__ = (
+ "enums",
+ "types",
+ "FirestoreAdminClient",
+)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..582268de6
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__pycache__/types.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__pycache__/types.cpython-36.pyc
new file mode 100644
index 000000000..305c08cf5
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/__pycache__/types.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..2084d428d
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/enums.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/enums.cpython-36.pyc
new file mode 100644
index 000000000..01ac0b60b
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/enums.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client.cpython-36.pyc
new file mode 100644
index 000000000..582fd655d
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client_config.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client_config.cpython-36.pyc
new file mode 100644
index 000000000..fe2f6abe4
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client_config.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/enums.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/enums.py
new file mode 100644
index 000000000..0f162f179
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/enums.py
@@ -0,0 +1,142 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Wrappers for protocol buffer enum types."""
+
+import enum
+
+
+class OperationState(enum.IntEnum):
+ """
+ Describes the state of the operation.
+
+ Attributes:
+ OPERATION_STATE_UNSPECIFIED (int): Unspecified.
+ INITIALIZING (int): Request is being prepared for processing.
+ PROCESSING (int): Request is actively being processed.
+ CANCELLING (int): Request is in the process of being cancelled after user called
+ google.longrunning.Operations.CancelOperation on the operation.
+ FINALIZING (int): Request has been processed and is in its finalization stage.
+ SUCCESSFUL (int): Request has completed successfully.
+ FAILED (int): Request has finished being processed, but encountered an error.
+ CANCELLED (int): Request has finished being cancelled after user called
+ google.longrunning.Operations.CancelOperation.
+ """
+
+ OPERATION_STATE_UNSPECIFIED = 0
+ INITIALIZING = 1
+ PROCESSING = 2
+ CANCELLING = 3
+ FINALIZING = 4
+ SUCCESSFUL = 5
+ FAILED = 6
+ CANCELLED = 7
+
+
+class FieldOperationMetadata(object):
+ class IndexConfigDelta(object):
+ class ChangeType(enum.IntEnum):
+ """
+ Specifies how the index is changing.
+
+ Attributes:
+ CHANGE_TYPE_UNSPECIFIED (int): The type of change is not specified or known.
+ ADD (int): The single field index is being added.
+ REMOVE (int): The single field index is being removed.
+ """
+
+ CHANGE_TYPE_UNSPECIFIED = 0
+ ADD = 1
+ REMOVE = 2
+
+
+class Index(object):
+ class QueryScope(enum.IntEnum):
+ """
+ Query Scope defines the scope at which a query is run. This is
+ specified on a StructuredQuery's ``from`` field.
+
+ Attributes:
+ QUERY_SCOPE_UNSPECIFIED (int): The query scope is unspecified. Not a valid option.
+ COLLECTION (int): Indexes with a collection query scope specified allow queries
+ against a collection that is the child of a specific document, specified
+ at query time, and that has the collection id specified by the index.
+ COLLECTION_GROUP (int): Indexes with a collection group query scope specified allow queries
+ against all collections that has the collection id specified by the
+ index.
+ """
+
+ QUERY_SCOPE_UNSPECIFIED = 0
+ COLLECTION = 1
+ COLLECTION_GROUP = 2
+
+ class State(enum.IntEnum):
+ """
+ The state of an index. During index creation, an index will be in
+ the ``CREATING`` state. If the index is created successfully, it will
+ transition to the ``READY`` state. If the index creation encounters a
+ problem, the index will transition to the ``NEEDS_REPAIR`` state.
+
+ Attributes:
+ STATE_UNSPECIFIED (int): The state is unspecified.
+ CREATING (int): The index is being created.
+ There is an active long-running operation for the index.
+ The index is updated when writing a document.
+ Some index data may exist.
+ READY (int): The index is ready to be used.
+ The index is updated when writing a document.
+ The index is fully populated from all stored documents it applies to.
+ NEEDS_REPAIR (int): The index was being created, but something went wrong.
+ There is no active long-running operation for the index,
+ and the most recently finished long-running operation failed.
+ The index is not updated when writing a document.
+ Some index data may exist.
+ Use the google.longrunning.Operations API to determine why the operation
+ that last attempted to create this index failed, then re-create the
+ index.
+ """
+
+ STATE_UNSPECIFIED = 0
+ CREATING = 1
+ READY = 2
+ NEEDS_REPAIR = 3
+
+ class IndexField(object):
+ class ArrayConfig(enum.IntEnum):
+ """
+ The supported array value configurations.
+
+ Attributes:
+ ARRAY_CONFIG_UNSPECIFIED (int): The index does not support additional array queries.
+ CONTAINS (int): The index supports array containment queries.
+ """
+
+ ARRAY_CONFIG_UNSPECIFIED = 0
+ CONTAINS = 1
+
+ class Order(enum.IntEnum):
+ """
+ The supported orderings.
+
+ Attributes:
+ ORDER_UNSPECIFIED (int): The ordering is unspecified. Not a valid option.
+ ASCENDING (int): The field is ordered by ascending field value.
+ DESCENDING (int): The field is ordered by descending field value.
+ """
+
+ ORDER_UNSPECIFIED = 0
+ ASCENDING = 1
+ DESCENDING = 2
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py
new file mode 100644
index 000000000..b009aa2ff
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py
@@ -0,0 +1,1089 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Accesses the google.firestore.admin.v1 FirestoreAdmin API."""
+
+import functools
+import pkg_resources
+import warnings
+
+from google.oauth2 import service_account
+import google.api_core.client_options
+import google.api_core.gapic_v1.client_info
+import google.api_core.gapic_v1.config
+import google.api_core.gapic_v1.method
+import google.api_core.gapic_v1.routing_header
+import google.api_core.grpc_helpers
+import google.api_core.operation
+import google.api_core.operations_v1
+import google.api_core.page_iterator
+import google.api_core.path_template
+import grpc
+
+from google.cloud.firestore_admin_v1.gapic import enums
+from google.cloud.firestore_admin_v1.gapic import firestore_admin_client_config
+from google.cloud.firestore_admin_v1.gapic.transports import (
+ firestore_admin_grpc_transport,
+)
+from google.cloud.firestore_admin_v1.proto import field_pb2
+from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2
+from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2_grpc
+from google.cloud.firestore_admin_v1.proto import index_pb2
+from google.cloud.firestore_admin_v1.proto import operation_pb2
+from google.longrunning import operations_pb2
+from google.protobuf import empty_pb2
+from google.protobuf import field_mask_pb2
+
+
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-firestore",
+).version
+
+
+class FirestoreAdminClient(object):
+ """
+ Operations are created by service ``FirestoreAdmin``, but are
+ accessed via service ``google.longrunning.Operations``.
+ """
+
+ SERVICE_ADDRESS = "firestore.googleapis.com:443"
+ """The default address of the service."""
+
+ # The name of the interface for this client. This is the key used to
+ # find the method configuration in the client_config dictionary.
+ _INTERFACE_NAME = "google.firestore.admin.v1.FirestoreAdmin"
+
+ @classmethod
+ def from_service_account_file(cls, filename, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreAdminClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @classmethod
+ def collection_group_path(cls, project, database, collection):
+ """Return a fully-qualified collection_group string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/collectionGroups/{collection}",
+ project=project,
+ database=database,
+ collection=collection,
+ )
+
+ @classmethod
+ def database_path(cls, project, database):
+ """Return a fully-qualified database string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}",
+ project=project,
+ database=database,
+ )
+
+ @classmethod
+ def field_path(cls, project, database, collection, field):
+ """Return a fully-qualified field string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}",
+ project=project,
+ database=database,
+ collection=collection,
+ field=field,
+ )
+
+ @classmethod
+ def index_path(cls, project, database, collection, index):
+ """Return a fully-qualified index string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}",
+ project=project,
+ database=database,
+ collection=collection,
+ index=index,
+ )
+
+ @classmethod
+ def parent_path(cls, project, database, collection_id):
+ """Return a fully-qualified parent string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/collectionGroups/{collection_id}",
+ project=project,
+ database=database,
+ collection_id=collection_id,
+ )
+
+ def __init__(
+ self,
+ transport=None,
+ channel=None,
+ credentials=None,
+ client_config=None,
+ client_info=None,
+ client_options=None,
+ ):
+ """Constructor.
+
+ Args:
+ transport (Union[~.FirestoreAdminGrpcTransport,
+ Callable[[~.Credentials, type], ~.FirestoreAdminGrpcTransport]): A transport
+ instance, responsible for actually making the API calls.
+ The default transport uses the gRPC protocol.
+ This argument may also be a callable which returns a
+ transport instance. Callables will be sent the credentials
+ as the first argument and the default transport class as
+ the second argument.
+ channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
+ through which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is mutually exclusive with providing a
+ transport instance to ``transport``; doing so will raise
+ an exception.
+ client_config (dict): DEPRECATED. A dictionary of call options for
+ each method. If not specified, the default configuration is used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ client_options (Union[dict, google.api_core.client_options.ClientOptions]):
+ Client options used to set user options on the client. API Endpoint
+ should be set through client_options.
+ """
+ # Raise deprecation warnings for things we want to go away.
+ if client_config is not None:
+ warnings.warn(
+ "The `client_config` argument is deprecated.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ client_config = firestore_admin_client_config.config
+
+ if channel:
+ warnings.warn(
+ "The `channel` argument is deprecated; use " "`transport` instead.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+
+ api_endpoint = self.SERVICE_ADDRESS
+ if client_options:
+ if type(client_options) == dict:
+ client_options = google.api_core.client_options.from_dict(
+ client_options
+ )
+ if client_options.api_endpoint:
+ api_endpoint = client_options.api_endpoint
+
+ # Instantiate the transport.
+ # The transport is responsible for handling serialization and
+ # deserialization and actually sending data to the service.
+ if transport:
+ if callable(transport):
+ self.transport = transport(
+ credentials=credentials,
+ default_class=firestore_admin_grpc_transport.FirestoreAdminGrpcTransport,
+ address=api_endpoint,
+ )
+ else:
+ if credentials:
+ raise ValueError(
+ "Received both a transport instance and "
+ "credentials; these are mutually exclusive."
+ )
+ self.transport = transport
+ else:
+ self.transport = firestore_admin_grpc_transport.FirestoreAdminGrpcTransport(
+ address=api_endpoint, channel=channel, credentials=credentials,
+ )
+
+ if client_info is None:
+ client_info = google.api_core.gapic_v1.client_info.ClientInfo(
+ gapic_version=_GAPIC_LIBRARY_VERSION,
+ )
+ else:
+ client_info.gapic_version = _GAPIC_LIBRARY_VERSION
+ self._client_info = client_info
+
+ # Parse out the default settings for retry and timeout for each RPC
+ # from the client configuration.
+ # (Ordinarily, these are the defaults specified in the `*_config.py`
+ # file next to this one.)
+ self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
+ client_config["interfaces"][self._INTERFACE_NAME],
+ )
+
+ # Save a dictionary of cached API call functions.
+ # These are the actual callables which invoke the proper
+ # transport methods, wrapped with `wrap_method` to add retry,
+ # timeout, and the like.
+ self._inner_api_calls = {}
+
+ # Service calls
+ def delete_index(
+ self,
+ name,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Deletes a composite index.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> name = client.index_path('[PROJECT]', '[DATABASE]', '[COLLECTION]', '[INDEX]')
+ >>>
+ >>> client.delete_index(name)
+
+ Args:
+ name (str): Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "delete_index" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "delete_index"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.delete_index,
+ default_retry=self._method_configs["DeleteIndex"].retry,
+ default_timeout=self._method_configs["DeleteIndex"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.DeleteIndexRequest(name=name,)
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ self._inner_api_calls["delete_index"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def update_field(
+ self,
+ field,
+ update_mask=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Updates a field configuration. Currently, field updates apply only
+ to single field index configuration. However, calls to
+ ``FirestoreAdmin.UpdateField`` should provide a field mask to avoid
+ changing any configuration that the caller isn't aware of. The field
+ mask should be specified as: ``{ paths: "index_config" }``.
+
+ This call returns a ``google.longrunning.Operation`` which may be used
+ to track the status of the field update. The metadata for the operation
+ will be the type ``FieldOperationMetadata``.
+
+ To configure the default field settings for the database, use the
+ special ``Field`` with resource name:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> # TODO: Initialize `field`:
+ >>> field = {}
+ >>>
+ >>> response = client.update_field(field)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ field (Union[dict, ~google.cloud.firestore_admin_v1.types.Field]): Required. The field to be updated.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_admin_v1.types.Field`
+ update_mask (Union[dict, ~google.cloud.firestore_admin_v1.types.FieldMask]): A mask, relative to the field. If specified, only configuration
+ specified by this field_mask will be updated in the field.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_admin_v1.types.FieldMask`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_admin_v1.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "update_field" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "update_field"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.update_field,
+ default_retry=self._method_configs["UpdateField"].retry,
+ default_timeout=self._method_configs["UpdateField"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.UpdateFieldRequest(
+ field=field, update_mask=update_mask,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("field.name", field.name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ operation = self._inner_api_calls["update_field"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ field_pb2.Field,
+ metadata_type=operation_pb2.FieldOperationMetadata,
+ )
+
+ def create_index(
+ self,
+ parent,
+ index,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Creates a composite index. This returns a
+ ``google.longrunning.Operation`` which may be used to track the status
+ of the creation. The metadata for the operation will be the type
+ ``IndexOperationMetadata``.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> parent = client.collection_group_path('[PROJECT]', '[DATABASE]', '[COLLECTION]')
+ >>>
+ >>> # TODO: Initialize `index`:
+ >>> index = {}
+ >>>
+ >>> response = client.create_index(parent, index)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ parent (str): Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ index (Union[dict, ~google.cloud.firestore_admin_v1.types.Index]): Required. The composite index to create.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_admin_v1.types.Index`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_admin_v1.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "create_index" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "create_index"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.create_index,
+ default_retry=self._method_configs["CreateIndex"].retry,
+ default_timeout=self._method_configs["CreateIndex"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.CreateIndexRequest(parent=parent, index=index,)
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ operation = self._inner_api_calls["create_index"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ index_pb2.Index,
+ metadata_type=operation_pb2.IndexOperationMetadata,
+ )
+
+ def list_indexes(
+ self,
+ parent,
+ filter_=None,
+ page_size=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists composite indexes.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> parent = client.collection_group_path('[PROJECT]', '[DATABASE]', '[COLLECTION]')
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.list_indexes(parent):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.list_indexes(parent).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ filter_ (str): The filter to apply to list results.
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`~google.cloud.firestore_admin_v1.types.Index` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "list_indexes" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "list_indexes"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.list_indexes,
+ default_retry=self._method_configs["ListIndexes"].retry,
+ default_timeout=self._method_configs["ListIndexes"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.ListIndexesRequest(
+ parent=parent, filter=filter_, page_size=page_size,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["list_indexes"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="indexes",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
+
+ def get_index(
+ self,
+ name,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets a composite index.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> name = client.index_path('[PROJECT]', '[DATABASE]', '[COLLECTION]', '[INDEX]')
+ >>>
+ >>> response = client.get_index(name)
+
+ Args:
+ name (str): Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_admin_v1.types.Index` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "get_index" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "get_index"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.get_index,
+ default_retry=self._method_configs["GetIndex"].retry,
+ default_timeout=self._method_configs["GetIndex"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.GetIndexRequest(name=name,)
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["get_index"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def get_field(
+ self,
+ name,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets the metadata and configuration for a Field.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> name = client.field_path('[PROJECT]', '[DATABASE]', '[COLLECTION]', '[FIELD]')
+ >>>
+ >>> response = client.get_field(name)
+
+ Args:
+ name (str): Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_admin_v1.types.Field` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "get_field" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "get_field"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.get_field,
+ default_retry=self._method_configs["GetField"].retry,
+ default_timeout=self._method_configs["GetField"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.GetFieldRequest(name=name,)
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["get_field"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def list_fields(
+ self,
+ parent,
+ filter_=None,
+ page_size=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists the field configuration and metadata for this database.
+
+ Currently, ``FirestoreAdmin.ListFields`` only supports listing fields
+ that have been explicitly overridden. To issue this query, call
+ ``FirestoreAdmin.ListFields`` with the filter set to
+ ``indexConfig.usesAncestorConfig:false``.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> parent = client.collection_group_path('[PROJECT]', '[DATABASE]', '[COLLECTION]')
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.list_fields(parent):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.list_fields(parent).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ filter_ (str): The filter to apply to list results. Currently,
+ ``FirestoreAdmin.ListFields`` only supports listing fields that have
+ been explicitly overridden. To issue this query, call
+ ``FirestoreAdmin.ListFields`` with the filter set to
+ ``indexConfig.usesAncestorConfig:false``.
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`~google.cloud.firestore_admin_v1.types.Field` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "list_fields" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "list_fields"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.list_fields,
+ default_retry=self._method_configs["ListFields"].retry,
+ default_timeout=self._method_configs["ListFields"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.ListFieldsRequest(
+ parent=parent, filter=filter_, page_size=page_size,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["list_fields"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="fields",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
+
+ def export_documents(
+ self,
+ name,
+ collection_ids=None,
+ output_uri_prefix=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Exports a copy of all or a subset of documents from Google Cloud Firestore
+ to another storage system, such as Google Cloud Storage. Recent updates to
+ documents may not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed via the
+ Operation resource that is created. The output of an export may only be
+ used once the associated operation is done. If an export operation is
+ cancelled before completion it may leave partial data behind in Google
+ Cloud Storage.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> name = client.database_path('[PROJECT]', '[DATABASE]')
+ >>>
+ >>> response = client.export_documents(name)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ name (str): Required. Database to export. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ collection_ids (list[str]): Which collection ids to export. Unspecified means all collections.
+ output_uri_prefix (str): The output URI. Currently only supports Google Cloud Storage URIs of
+ the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME``
+ is the name of the Google Cloud Storage bucket and ``NAMESPACE_PATH`` is
+ an optional Google Cloud Storage namespace path. When choosing a name,
+ be sure to consider Google Cloud Storage naming guidelines:
+ https://cloud.google.com/storage/docs/naming. If the URI is a bucket
+ (without a namespace path), a prefix will be generated based on the
+ start time.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_admin_v1.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "export_documents" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "export_documents"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.export_documents,
+ default_retry=self._method_configs["ExportDocuments"].retry,
+ default_timeout=self._method_configs["ExportDocuments"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.ExportDocumentsRequest(
+ name=name,
+ collection_ids=collection_ids,
+ output_uri_prefix=output_uri_prefix,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ operation = self._inner_api_calls["export_documents"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ operation_pb2.ExportDocumentsResponse,
+ metadata_type=operation_pb2.ExportDocumentsMetadata,
+ )
+
+ def import_documents(
+ self,
+ name,
+ collection_ids=None,
+ input_uri_prefix=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Imports documents into Google Cloud Firestore. Existing documents with the
+ same name are overwritten. The import occurs in the background and its
+ progress can be monitored and managed via the Operation resource that is
+ created. If an ImportDocuments operation is cancelled, it is possible
+ that a subset of the data has already been imported to Cloud Firestore.
+
+ Example:
+ >>> from google.cloud import firestore_admin_v1
+ >>>
+ >>> client = firestore_admin_v1.FirestoreAdminClient()
+ >>>
+ >>> name = client.database_path('[PROJECT]', '[DATABASE]')
+ >>>
+ >>> response = client.import_documents(name)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ name (str): Required. Database to import into. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ collection_ids (list[str]): Which collection ids to import. Unspecified means all collections included
+ in the import.
+ input_uri_prefix (str): Location of the exported files. This must match the
+ output_uri_prefix of an ExportDocumentsResponse from an export that has
+ completed successfully. See:
+ ``google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix``.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_admin_v1.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "import_documents" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "import_documents"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.import_documents,
+ default_retry=self._method_configs["ImportDocuments"].retry,
+ default_timeout=self._method_configs["ImportDocuments"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_admin_pb2.ImportDocumentsRequest(
+ name=name, collection_ids=collection_ids, input_uri_prefix=input_uri_prefix,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ operation = self._inner_api_calls["import_documents"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ empty_pb2.Empty,
+ metadata_type=operation_pb2.ImportDocumentsMetadata,
+ )
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py
new file mode 100644
index 000000000..c9d1077c5
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py
@@ -0,0 +1,69 @@
+config = {
+ "interfaces": {
+ "google.firestore.admin.v1.FirestoreAdmin": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
+ "non_idempotent": [],
+ "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 60000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 60000,
+ "total_timeout_millis": 600000,
+ }
+ },
+ "methods": {
+ "DeleteIndex": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "UpdateField": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "CreateIndex": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "ListIndexes": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent2",
+ "retry_params_name": "default",
+ },
+ "GetIndex": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent2",
+ "retry_params_name": "default",
+ },
+ "GetField": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent2",
+ "retry_params_name": "default",
+ },
+ "ListFields": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent2",
+ "retry_params_name": "default",
+ },
+ "ExportDocuments": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "ImportDocuments": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ },
+ }
+ }
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..ee99ebc49
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__pycache__/firestore_admin_grpc_transport.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__pycache__/firestore_admin_grpc_transport.cpython-36.pyc
new file mode 100644
index 000000000..edda414fb
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/__pycache__/firestore_admin_grpc_transport.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py
new file mode 100644
index 000000000..ff9e62f44
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py
@@ -0,0 +1,269 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import google.api_core.grpc_helpers
+import google.api_core.operations_v1
+
+from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2_grpc
+
+
+class FirestoreAdminGrpcTransport(object):
+ """gRPC transport class providing stubs for
+ google.firestore.admin.v1 FirestoreAdmin API.
+
+ The transport provides access to the raw gRPC stubs,
+ which can be used to take advantage of advanced
+ features of gRPC.
+ """
+
+ # The scopes needed to make gRPC calls to all of the methods defined
+ # in this service.
+ _OAUTH_SCOPES = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+
+ def __init__(
+ self, channel=None, credentials=None, address="firestore.googleapis.com:443"
+ ):
+ """Instantiate the transport class.
+
+ Args:
+ channel (grpc.Channel): A ``Channel`` instance through
+ which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ address (str): The address where the service is hosted.
+ """
+ # If both `channel` and `credentials` are specified, raise an
+ # exception (channels come with credentials baked in already).
+ if channel is not None and credentials is not None:
+ raise ValueError(
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
+ )
+
+ # Create the channel.
+ if channel is None:
+ channel = self.create_channel(
+ address=address,
+ credentials=credentials,
+ options={
+ "grpc.max_send_message_length": -1,
+ "grpc.max_receive_message_length": -1,
+ }.items(),
+ )
+
+ self._channel = channel
+
+ # gRPC uses objects called "stubs" that are bound to the
+ # channel and provide a basic method for each RPC.
+ self._stubs = {
+ "firestore_admin_stub": firestore_admin_pb2_grpc.FirestoreAdminStub(
+ channel
+ ),
+ }
+
+ # Because this API includes a method that returns a
+ # long-running operation (proto: google.longrunning.Operation),
+ # instantiate an LRO client.
+ self._operations_client = google.api_core.operations_v1.OperationsClient(
+ channel
+ )
+
+ @classmethod
+ def create_channel(
+ cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
+ ):
+ """Create and return a gRPC channel object.
+
+ Args:
+ address (str): The host for the channel to use.
+ credentials (~.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ kwargs (dict): Keyword arguments, which are passed to the
+ channel creation.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return google.api_core.grpc_helpers.create_channel(
+ address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
+ )
+
+ @property
+ def channel(self):
+ """The gRPC channel used by the transport.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return self._channel
+
+ @property
+ def delete_index(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.delete_index`.
+
+ Deletes a composite index.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].DeleteIndex
+
+ @property
+ def update_field(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.update_field`.
+
+ Updates a field configuration. Currently, field updates apply only
+ to single field index configuration. However, calls to
+ ``FirestoreAdmin.UpdateField`` should provide a field mask to avoid
+ changing any configuration that the caller isn't aware of. The field
+ mask should be specified as: ``{ paths: "index_config" }``.
+
+ This call returns a ``google.longrunning.Operation`` which may be used
+ to track the status of the field update. The metadata for the operation
+ will be the type ``FieldOperationMetadata``.
+
+ To configure the default field settings for the database, use the
+ special ``Field`` with resource name:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].UpdateField
+
+ @property
+ def create_index(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.create_index`.
+
+ Creates a composite index. This returns a
+ ``google.longrunning.Operation`` which may be used to track the status
+ of the creation. The metadata for the operation will be the type
+ ``IndexOperationMetadata``.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].CreateIndex
+
+ @property
+ def list_indexes(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.list_indexes`.
+
+ Lists composite indexes.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].ListIndexes
+
+ @property
+ def get_index(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.get_index`.
+
+ Gets a composite index.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].GetIndex
+
+ @property
+ def get_field(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.get_field`.
+
+ Gets the metadata and configuration for a Field.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].GetField
+
+ @property
+ def list_fields(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.list_fields`.
+
+ Lists the field configuration and metadata for this database.
+
+ Currently, ``FirestoreAdmin.ListFields`` only supports listing fields
+ that have been explicitly overridden. To issue this query, call
+ ``FirestoreAdmin.ListFields`` with the filter set to
+ ``indexConfig.usesAncestorConfig:false``.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].ListFields
+
+ @property
+ def export_documents(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.export_documents`.
+
+ Exports a copy of all or a subset of documents from Google Cloud Firestore
+ to another storage system, such as Google Cloud Storage. Recent updates to
+ documents may not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed via the
+ Operation resource that is created. The output of an export may only be
+ used once the associated operation is done. If an export operation is
+ cancelled before completion it may leave partial data behind in Google
+ Cloud Storage.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].ExportDocuments
+
+ @property
+ def import_documents(self):
+ """Return the gRPC stub for :meth:`FirestoreAdminClient.import_documents`.
+
+ Imports documents into Google Cloud Firestore. Existing documents with the
+ same name are overwritten. The import occurs in the background and its
+ progress can be monitored and managed via the Operation resource that is
+ created. If an ImportDocuments operation is cancelled, it is possible
+ that a subset of the data has already been imported to Cloud Firestore.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_admin_stub"].ImportDocuments
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..1391e9045
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2.cpython-36.pyc
new file mode 100644
index 000000000..e9da6410a
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..6de28811f
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2.cpython-36.pyc
new file mode 100644
index 000000000..738dbf2f9
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..7a9756746
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2.cpython-36.pyc
new file mode 100644
index 000000000..89b2b8829
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..ace3b9df5
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2.cpython-36.pyc
new file mode 100644
index 000000000..d370dc4ea
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..174343f8b
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2.cpython-36.pyc
new file mode 100644
index 000000000..be3ab4d5a
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..f3653c3f6
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field.proto b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field.proto
new file mode 100644
index 000000000..1b9b99cf3
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field.proto
@@ -0,0 +1,100 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1;
+
+import "google/api/resource.proto";
+import "google/firestore/admin/v1/index.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "FieldProto";
+option java_package = "com.google.firestore.admin.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
+option ruby_package = "Google::Cloud::Firestore::Admin::V1";
+
+// Represents a single field in the database.
+//
+// Fields are grouped by their "Collection Group", which represent all
+// collections in the database with the same id.
+message Field {
+ option (google.api.resource) = {
+ type: "firestore.googleapis.com/Field"
+ pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}"
+ };
+
+ // The index configuration for this field.
+ message IndexConfig {
+ // The indexes supported for this field.
+ repeated Index indexes = 1;
+
+ // Output only. When true, the `Field`'s index configuration is set from the
+ // configuration specified by the `ancestor_field`.
+ // When false, the `Field`'s index configuration is defined explicitly.
+ bool uses_ancestor_config = 2;
+
+ // Output only. Specifies the resource name of the `Field` from which this field's
+ // index configuration is set (when `uses_ancestor_config` is true),
+ // or from which it *would* be set if this field had no index configuration
+ // (when `uses_ancestor_config` is false).
+ string ancestor_field = 3;
+
+ // Output only
+ // When true, the `Field`'s index configuration is in the process of being
+ // reverted. Once complete, the index config will transition to the same
+ // state as the field specified by `ancestor_field`, at which point
+ // `uses_ancestor_config` will be `true` and `reverting` will be `false`.
+ bool reverting = 4;
+ }
+
+ // A field name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
+ //
+ // A field path may be a simple field name, e.g. `address` or a path to fields
+ // within map_value , e.g. `address.city`,
+ // or a special field path. The only valid special field is `*`, which
+ // represents any field.
+ //
+ // Field paths may be quoted using ` (backtick). The only character that needs
+ // to be escaped within a quoted field path is the backtick character itself,
+ // escaped using a backslash. Special characters in field paths that
+ // must be quoted include: `*`, `.`,
+ // ``` (backtick), `[`, `]`, as well as any ascii symbolic characters.
+ //
+ // Examples:
+ // (Note: Comments here are written in markdown syntax, so there is an
+ // additional layer of backticks to represent a code block)
+ // `\`address.city\`` represents a field named `address.city`, not the map key
+ // `city` in the field `address`.
+ // `\`*\`` represents a field named `*`, not any field.
+ //
+ // A special `Field` contains the default indexing settings for all fields.
+ // This field's resource name is:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`
+ // Indexes defined on this `Field` will be applied to all fields which do not
+ // have their own `Field` index configuration.
+ string name = 1;
+
+ // The index configuration for this field. If unset, field indexing will
+ // revert to the configuration defined by the `ancestor_field`. To
+ // explicitly remove all indexes for this field, specify an index config
+ // with an empty list of indexes.
+ IndexConfig index_config = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field_pb2.py
new file mode 100644
index 000000000..667863351
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field_pb2.py
@@ -0,0 +1,285 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_admin_v1/proto/field.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
+from google.cloud.firestore_admin_v1.proto import (
+ index_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2,
+)
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_admin_v1/proto/field.proto",
+ package="google.firestore.admin.v1",
+ syntax="proto3",
+ serialized_options=b"\n\035com.google.firestore.admin.v1B\nFieldProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n1google/cloud/firestore_admin_v1/proto/field.proto\x12\x19google.firestore.admin.v1\x1a\x19google/api/resource.proto\x1a\x31google/cloud/firestore_admin_v1/proto/index.proto\x1a\x1cgoogle/api/annotations.proto"\xe0\x02\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x42\n\x0cindex_config\x18\x02 \x01(\x0b\x32,.google.firestore.admin.v1.Field.IndexConfig\x1a\x89\x01\n\x0bIndexConfig\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x1c\n\x14uses_ancestor_config\x18\x02 \x01(\x08\x12\x16\n\x0e\x61ncestor_field\x18\x03 \x01(\t\x12\x11\n\treverting\x18\x04 \x01(\x08:y\xea\x41v\n\x1e\x66irestore.googleapis.com/Field\x12Tprojects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}B\xde\x01\n\x1d\x63om.google.firestore.admin.v1B\nFieldProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
+ dependencies=[
+ google_dot_api_dot_resource__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_FIELD_INDEXCONFIG = _descriptor.Descriptor(
+ name="IndexConfig",
+ full_name="google.firestore.admin.v1.Field.IndexConfig",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="indexes",
+ full_name="google.firestore.admin.v1.Field.IndexConfig.indexes",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="uses_ancestor_config",
+ full_name="google.firestore.admin.v1.Field.IndexConfig.uses_ancestor_config",
+ index=1,
+ number=2,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="ancestor_field",
+ full_name="google.firestore.admin.v1.Field.IndexConfig.ancestor_field",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="reverting",
+ full_name="google.firestore.admin.v1.Field.IndexConfig.reverting",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=281,
+ serialized_end=418,
+)
+
+_FIELD = _descriptor.Descriptor(
+ name="Field",
+ full_name="google.firestore.admin.v1.Field",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.Field.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index_config",
+ full_name="google.firestore.admin.v1.Field.index_config",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_FIELD_INDEXCONFIG,],
+ enum_types=[],
+ serialized_options=b"\352Av\n\036firestore.googleapis.com/Field\022Tprojects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}",
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=189,
+ serialized_end=541,
+)
+
+_FIELD_INDEXCONFIG.fields_by_name[
+ "indexes"
+].message_type = (
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2._INDEX
+)
+_FIELD_INDEXCONFIG.containing_type = _FIELD
+_FIELD.fields_by_name["index_config"].message_type = _FIELD_INDEXCONFIG
+DESCRIPTOR.message_types_by_name["Field"] = _FIELD
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Field = _reflection.GeneratedProtocolMessageType(
+ "Field",
+ (_message.Message,),
+ {
+ "IndexConfig": _reflection.GeneratedProtocolMessageType(
+ "IndexConfig",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _FIELD_INDEXCONFIG,
+ "__module__": "google.cloud.firestore_admin_v1.proto.field_pb2",
+ "__doc__": """The index configuration for this field.
+
+ Attributes:
+ indexes:
+ The indexes supported for this field.
+ uses_ancestor_config:
+ Output only. When true, the ``Field``\ ’s index configuration
+ is set from the configuration specified by the
+ ``ancestor_field``. When false, the ``Field``\ ’s index
+ configuration is defined explicitly.
+ ancestor_field:
+ Output only. Specifies the resource name of the ``Field`` from
+ which this field’s index configuration is set (when
+ ``uses_ancestor_config`` is true), or from which it *would* be
+ set if this field had no index configuration (when
+ ``uses_ancestor_config`` is false).
+ reverting:
+ Output only When true, the ``Field``\ ’s index configuration
+ is in the process of being reverted. Once complete, the index
+ config will transition to the same state as the field
+ specified by ``ancestor_field``, at which point
+ ``uses_ancestor_config`` will be ``true`` and ``reverting``
+ will be ``false``.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Field.IndexConfig)
+ },
+ ),
+ "DESCRIPTOR": _FIELD,
+ "__module__": "google.cloud.firestore_admin_v1.proto.field_pb2",
+ "__doc__": """Represents a single field in the database. Fields are grouped by
+ their “Collection Group”, which represent all collections in the
+ database with the same id.
+
+ Attributes:
+ name:
+ A field name of the form ``projects/{project_id}/databases/{da
+ tabase_id}/collectionGroups/{collection_id}/fields/{field_path
+ }`` A field path may be a simple field name, e.g. ``address``
+ or a path to fields within map_value , e.g. ``address.city``,
+ or a special field path. The only valid special field is
+ ``*``, which represents any field. Field paths may be quoted
+ using ``(backtick). The only character that needs to be
+ escaped within a quoted field path is the backtick character
+ itself, escaped using a backslash. Special characters in field
+ paths that must be quoted include:``\ \*\ ``,``.\ :literal:`,
+ ``` (backtick),`\ [``,``]`, as well as any ascii symbolic
+ characters. Examples: (Note: Comments here are written in
+ markdown syntax, so there is an additional layer of backticks
+ to represent a code block) ``\``\ address.city\`\ ``represents
+ a field named``\ address.city\ ``, not the map key``\ city\
+ ``in the field``\ address\ ``.``\ \`\ *\`\ ``represents a
+ field named``*\ \`, not any field. A special ``Field``
+ contains the default indexing settings for all fields. This
+ field’s resource name is: ``projects/{project_id}/databases/{d
+ atabase_id}/collectionGroups/__default__/fields/*`` Indexes
+ defined on this ``Field`` will be applied to all fields which
+ do not have their own ``Field`` index configuration.
+ index_config:
+ The index configuration for this field. If unset, field
+ indexing will revert to the configuration defined by the
+ ``ancestor_field``. To explicitly remove all indexes for this
+ field, specify an index config with an empty list of indexes.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Field)
+ },
+)
+_sym_db.RegisterMessage(Field)
+_sym_db.RegisterMessage(Field.IndexConfig)
+
+
+DESCRIPTOR._options = None
+_FIELD._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin.proto b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin.proto
new file mode 100644
index 000000000..c3eb58f9c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin.proto
@@ -0,0 +1,355 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
+import "google/firestore/admin/v1/field.proto";
+import "google/firestore/admin/v1/index.proto";
+import "google/longrunning/operations.proto";
+import "google/protobuf/empty.proto";
+import "google/protobuf/field_mask.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "FirestoreAdminProto";
+option java_package = "com.google.firestore.admin.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
+option ruby_package = "Google::Cloud::Firestore::Admin::V1";
+option (google.api.resource_definition) = {
+ type: "firestore.googleapis.com/Database"
+ pattern: "projects/{project}/databases/{database}"
+};
+option (google.api.resource_definition) = {
+ type: "firestore.googleapis.com/CollectionGroup"
+ pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}"
+};
+
+// Operations are created by service `FirestoreAdmin`, but are accessed via
+// service `google.longrunning.Operations`.
+service FirestoreAdmin {
+ option (google.api.default_host) = "firestore.googleapis.com";
+ option (google.api.oauth_scopes) =
+ "https://www.googleapis.com/auth/cloud-platform,"
+ "https://www.googleapis.com/auth/datastore";
+
+ // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation]
+ // which may be used to track the status of the creation. The metadata for
+ // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
+ rpc CreateIndex(CreateIndexRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes"
+ body: "index"
+ };
+ option (google.api.method_signature) = "parent,index";
+ option (google.longrunning.operation_info) = {
+ response_type: "Index"
+ metadata_type: "IndexOperationMetadata"
+ };
+ }
+
+ // Lists composite indexes.
+ rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes"
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Gets a composite index.
+ rpc GetIndex(GetIndexRequest) returns (Index) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Deletes a composite index.
+ rpc DeleteIndex(DeleteIndexRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Gets the metadata and configuration for a Field.
+ rpc GetField(GetFieldRequest) returns (Field) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Updates a field configuration. Currently, field updates apply only to
+ // single field index configuration. However, calls to
+ // [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid
+ // changing any configuration that the caller isn't aware of. The field mask
+ // should be specified as: `{ paths: "index_config" }`.
+ //
+ // This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to
+ // track the status of the field update. The metadata for
+ // the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
+ //
+ // To configure the default field settings for the database, use
+ // the special `Field` with resource name:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`.
+ rpc UpdateField(UpdateFieldRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ patch: "/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}"
+ body: "field"
+ };
+ option (google.api.method_signature) = "field";
+ option (google.longrunning.operation_info) = {
+ response_type: "Field"
+ metadata_type: "FieldOperationMetadata"
+ };
+ }
+
+ // Lists the field configuration and metadata for this database.
+ //
+ // Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
+ // that have been explicitly overridden. To issue this query, call
+ // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
+ // `indexConfig.usesAncestorConfig:false`.
+ rpc ListFields(ListFieldsRequest) returns (ListFieldsResponse) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields"
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Exports a copy of all or a subset of documents from Google Cloud Firestore
+ // to another storage system, such as Google Cloud Storage. Recent updates to
+ // documents may not be reflected in the export. The export occurs in the
+ // background and its progress can be monitored and managed via the
+ // Operation resource that is created. The output of an export may only be
+ // used once the associated operation is done. If an export operation is
+ // cancelled before completion it may leave partial data behind in Google
+ // Cloud Storage.
+ rpc ExportDocuments(ExportDocumentsRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/databases/*}:exportDocuments"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name";
+ option (google.longrunning.operation_info) = {
+ response_type: "ExportDocumentsResponse"
+ metadata_type: "ExportDocumentsMetadata"
+ };
+ }
+
+ // Imports documents into Google Cloud Firestore. Existing documents with the
+ // same name are overwritten. The import occurs in the background and its
+ // progress can be monitored and managed via the Operation resource that is
+ // created. If an ImportDocuments operation is cancelled, it is possible
+ // that a subset of the data has already been imported to Cloud Firestore.
+ rpc ImportDocuments(ImportDocumentsRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/databases/*}:importDocuments"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.protobuf.Empty"
+ metadata_type: "ImportDocumentsMetadata"
+ };
+ }
+}
+
+// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
+message CreateIndexRequest {
+ // Required. A parent name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/CollectionGroup"
+ }
+ ];
+
+ // Required. The composite index to create.
+ Index index = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+message ListIndexesRequest {
+ // Required. A parent name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/CollectionGroup"
+ }
+ ];
+
+ // The filter to apply to list results.
+ string filter = 2;
+
+ // The number of results to return.
+ int32 page_size = 3;
+
+ // A page token, returned from a previous call to
+ // [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], that may be used to get the next
+ // page of results.
+ string page_token = 4;
+}
+
+// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+message ListIndexesResponse {
+ // The requested indexes.
+ repeated Index indexes = 1;
+
+ // A page token that may be used to request another page of results. If blank,
+ // this is the last page.
+ string next_page_token = 2;
+}
+
+// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
+message GetIndexRequest {
+ // Required. A name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/Index"
+ }
+ ];
+}
+
+// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
+message DeleteIndexRequest {
+ // Required. A name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/Index"
+ }
+ ];
+}
+
+// The request for [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
+message UpdateFieldRequest {
+ // Required. The field to be updated.
+ Field field = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // A mask, relative to the field. If specified, only configuration specified
+ // by this field_mask will be updated in the field.
+ google.protobuf.FieldMask update_mask = 2;
+}
+
+// The request for [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
+message GetFieldRequest {
+ // Required. A name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/Field"
+ }
+ ];
+}
+
+// The request for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+message ListFieldsRequest {
+ // Required. A parent name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/CollectionGroup"
+ }
+ ];
+
+ // The filter to apply to list results. Currently,
+ // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
+ // that have been explicitly overridden. To issue this query, call
+ // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
+ // `indexConfig.usesAncestorConfig:false`.
+ string filter = 2;
+
+ // The number of results to return.
+ int32 page_size = 3;
+
+ // A page token, returned from a previous call to
+ // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], that may be used to get the next
+ // page of results.
+ string page_token = 4;
+}
+
+// The response for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+message ListFieldsResponse {
+ // The requested fields.
+ repeated Field fields = 1;
+
+ // A page token that may be used to request another page of results. If blank,
+ // this is the last page.
+ string next_page_token = 2;
+}
+
+// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
+message ExportDocumentsRequest {
+ // Required. Database to export. Should be of the form:
+ // `projects/{project_id}/databases/{database_id}`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/Database"
+ }
+ ];
+
+ // Which collection ids to export. Unspecified means all collections.
+ repeated string collection_ids = 2;
+
+ // The output URI. Currently only supports Google Cloud Storage URIs of the
+ // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name
+ // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional
+ // Google Cloud Storage namespace path. When
+ // choosing a name, be sure to consider Google Cloud Storage naming
+ // guidelines: https://cloud.google.com/storage/docs/naming.
+ // If the URI is a bucket (without a namespace path), a prefix will be
+ // generated based on the start time.
+ string output_uri_prefix = 3;
+}
+
+// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
+message ImportDocumentsRequest {
+ // Required. Database to import into. Should be of the form:
+ // `projects/{project_id}/databases/{database_id}`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "firestore.googleapis.com/Database"
+ }
+ ];
+
+ // Which collection ids to import. Unspecified means all collections included
+ // in the import.
+ repeated string collection_ids = 2;
+
+ // Location of the exported files.
+ // This must match the output_uri_prefix of an ExportDocumentsResponse from
+ // an export that has completed successfully.
+ // See:
+ // [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix].
+ string input_uri_prefix = 3;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py
new file mode 100644
index 000000000..d346ca8e0
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py
@@ -0,0 +1,1188 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_admin_v1/proto/firestore_admin.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.api import client_pb2 as google_dot_api_dot_client__pb2
+from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
+from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
+from google.cloud.firestore_admin_v1.proto import (
+ field_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2,
+)
+from google.cloud.firestore_admin_v1.proto import (
+ index_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2,
+)
+from google.longrunning import (
+ operations_pb2 as google_dot_longrunning_dot_operations__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_admin_v1/proto/firestore_admin.proto",
+ package="google.firestore.admin.v1",
+ syntax="proto3",
+ serialized_options=b"\n\035com.google.firestore.admin.v1B\023FirestoreAdminProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1\352AL\n!firestore.googleapis.com/Database\022'projects/{project}/databases/{database}\352Aq\n(firestore.googleapis.com/CollectionGroup\022Eprojects/{project}/databases/{database}/collectionGroups/{collection}",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n;google/cloud/firestore_admin_v1/proto/firestore_admin.proto\x12\x19google.firestore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x31google/cloud/firestore_admin_v1/proto/field.proto\x1a\x31google/cloud/firestore_admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x8c\x01\n\x12\x43reateIndexRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(firestore.googleapis.com/CollectionGroup\x12\x34\n\x05index\x18\x02 \x01(\x0b\x32 .google.firestore.admin.v1.IndexB\x03\xe0\x41\x02"\x8d\x01\n\x12ListIndexesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(firestore.googleapis.com/CollectionGroup\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"G\n\x0fGetIndexRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x66irestore.googleapis.com/Index"J\n\x12\x44\x65leteIndexRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x66irestore.googleapis.com/Index"{\n\x12UpdateFieldRequest\x12\x34\n\x05\x66ield\x18\x01 \x01(\x0b\x32 .google.firestore.admin.v1.FieldB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"G\n\x0fGetFieldRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x66irestore.googleapis.com/Field"\x8c\x01\n\x11ListFieldsRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(firestore.googleapis.com/CollectionGroup\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"_\n\x12ListFieldsResponse\x12\x30\n\x06\x66ields\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Field\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x16\x45xportDocumentsRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!firestore.googleapis.com/Database\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x19\n\x11output_uri_prefix\x18\x03 \x01(\t"\x83\x01\n\x16ImportDocumentsRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!firestore.googleapis.com/Database\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x18\n\x10input_uri_prefix\x18\x03 \x01(\t2\xf5\x0e\n\x0e\x46irestoreAdmin\x12\xdb\x01\n\x0b\x43reateIndex\x12-.google.firestore.admin.v1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"~\x82\xd3\xe4\x93\x02G">/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes:\x05index\xda\x41\x0cparent,index\xca\x41\x1f\n\x05Index\x12\x16IndexOperationMetadata\x12\xbd\x01\n\x0bListIndexes\x12-.google.firestore.admin.v1.ListIndexesRequest\x1a..google.firestore.admin.v1.ListIndexesResponse"O\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes\xda\x41\x06parent\x12\xa7\x01\n\x08GetIndex\x12*.google.firestore.admin.v1.GetIndexRequest\x1a .google.firestore.admin.v1.Index"M\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\xda\x41\x04name\x12\xa3\x01\n\x0b\x44\x65leteIndex\x12-.google.firestore.admin.v1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\xda\x41\x04name\x12\xa6\x01\n\x08GetField\x12*.google.firestore.admin.v1.GetFieldRequest\x1a .google.firestore.admin.v1.Field"L\x82\xd3\xe4\x93\x02?\x12=/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}\xda\x41\x04name\x12\xd9\x01\n\x0bUpdateField\x12-.google.firestore.admin.v1.UpdateFieldRequest\x1a\x1d.google.longrunning.Operation"|\x82\xd3\xe4\x93\x02L2C/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}:\x05\x66ield\xda\x41\x05\x66ield\xca\x41\x1f\n\x05\x46ield\x12\x16\x46ieldOperationMetadata\x12\xb9\x01\n\nListFields\x12,.google.firestore.admin.v1.ListFieldsRequest\x1a-.google.firestore.admin.v1.ListFieldsResponse"N\x82\xd3\xe4\x93\x02?\x12=/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields\xda\x41\x06parent\x12\xdd\x01\n\x0f\x45xportDocuments\x12\x31.google.firestore.admin.v1.ExportDocumentsRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:exportDocuments:\x01*\xda\x41\x04name\xca\x41\x32\n\x17\x45xportDocumentsResponse\x12\x17\x45xportDocumentsMetadata\x12\xdb\x01\n\x0fImportDocuments\x12\x31.google.firestore.admin.v1.ImportDocumentsRequest\x1a\x1d.google.longrunning.Operation"v\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:importDocuments:\x01*\xda\x41\x04name\xca\x41\x30\n\x15google.protobuf.Empty\x12\x17ImportDocumentsMetadata\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xaa\x03\n\x1d\x63om.google.firestore.admin.v1B\x13\x46irestoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1\xea\x41L\n!firestore.googleapis.com/Database\x12\'projects/{project}/databases/{database}\xea\x41q\n(firestore.googleapis.com/CollectionGroup\x12\x45projects/{project}/databases/{database}/collectionGroups/{collection}b\x06proto3',
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_api_dot_client__pb2.DESCRIPTOR,
+ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
+ google_dot_api_dot_resource__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
+ google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_CREATEINDEXREQUEST = _descriptor.Descriptor(
+ name="CreateIndexRequest",
+ full_name="google.firestore.admin.v1.CreateIndexRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.admin.v1.CreateIndexRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A*\n(firestore.googleapis.com/CollectionGroup",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index",
+ full_name="google.firestore.admin.v1.CreateIndexRequest.index",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=408,
+ serialized_end=548,
+)
+
+
+_LISTINDEXESREQUEST = _descriptor.Descriptor(
+ name="ListIndexesRequest",
+ full_name="google.firestore.admin.v1.ListIndexesRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.admin.v1.ListIndexesRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A*\n(firestore.googleapis.com/CollectionGroup",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="filter",
+ full_name="google.firestore.admin.v1.ListIndexesRequest.filter",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_size",
+ full_name="google.firestore.admin.v1.ListIndexesRequest.page_size",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_token",
+ full_name="google.firestore.admin.v1.ListIndexesRequest.page_token",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=551,
+ serialized_end=692,
+)
+
+
+_LISTINDEXESRESPONSE = _descriptor.Descriptor(
+ name="ListIndexesResponse",
+ full_name="google.firestore.admin.v1.ListIndexesResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="indexes",
+ full_name="google.firestore.admin.v1.ListIndexesResponse.indexes",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="next_page_token",
+ full_name="google.firestore.admin.v1.ListIndexesResponse.next_page_token",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=694,
+ serialized_end=791,
+)
+
+
+_GETINDEXREQUEST = _descriptor.Descriptor(
+ name="GetIndexRequest",
+ full_name="google.firestore.admin.v1.GetIndexRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.GetIndexRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A \n\036firestore.googleapis.com/Index",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=793,
+ serialized_end=864,
+)
+
+
+_DELETEINDEXREQUEST = _descriptor.Descriptor(
+ name="DeleteIndexRequest",
+ full_name="google.firestore.admin.v1.DeleteIndexRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.DeleteIndexRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A \n\036firestore.googleapis.com/Index",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=866,
+ serialized_end=940,
+)
+
+
+_UPDATEFIELDREQUEST = _descriptor.Descriptor(
+ name="UpdateFieldRequest",
+ full_name="google.firestore.admin.v1.UpdateFieldRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="google.firestore.admin.v1.UpdateFieldRequest.field",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_mask",
+ full_name="google.firestore.admin.v1.UpdateFieldRequest.update_mask",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=942,
+ serialized_end=1065,
+)
+
+
+_GETFIELDREQUEST = _descriptor.Descriptor(
+ name="GetFieldRequest",
+ full_name="google.firestore.admin.v1.GetFieldRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.GetFieldRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A \n\036firestore.googleapis.com/Field",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1067,
+ serialized_end=1138,
+)
+
+
+_LISTFIELDSREQUEST = _descriptor.Descriptor(
+ name="ListFieldsRequest",
+ full_name="google.firestore.admin.v1.ListFieldsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.admin.v1.ListFieldsRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A*\n(firestore.googleapis.com/CollectionGroup",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="filter",
+ full_name="google.firestore.admin.v1.ListFieldsRequest.filter",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_size",
+ full_name="google.firestore.admin.v1.ListFieldsRequest.page_size",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_token",
+ full_name="google.firestore.admin.v1.ListFieldsRequest.page_token",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1141,
+ serialized_end=1281,
+)
+
+
+_LISTFIELDSRESPONSE = _descriptor.Descriptor(
+ name="ListFieldsResponse",
+ full_name="google.firestore.admin.v1.ListFieldsResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.firestore.admin.v1.ListFieldsResponse.fields",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="next_page_token",
+ full_name="google.firestore.admin.v1.ListFieldsResponse.next_page_token",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1283,
+ serialized_end=1378,
+)
+
+
+_EXPORTDOCUMENTSREQUEST = _descriptor.Descriptor(
+ name="ExportDocumentsRequest",
+ full_name="google.firestore.admin.v1.ExportDocumentsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.ExportDocumentsRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A#\n!firestore.googleapis.com/Database",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_ids",
+ full_name="google.firestore.admin.v1.ExportDocumentsRequest.collection_ids",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="output_uri_prefix",
+ full_name="google.firestore.admin.v1.ExportDocumentsRequest.output_uri_prefix",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1381,
+ serialized_end=1513,
+)
+
+
+_IMPORTDOCUMENTSREQUEST = _descriptor.Descriptor(
+ name="ImportDocumentsRequest",
+ full_name="google.firestore.admin.v1.ImportDocumentsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.ImportDocumentsRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002\372A#\n!firestore.googleapis.com/Database",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_ids",
+ full_name="google.firestore.admin.v1.ImportDocumentsRequest.collection_ids",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="input_uri_prefix",
+ full_name="google.firestore.admin.v1.ImportDocumentsRequest.input_uri_prefix",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1516,
+ serialized_end=1647,
+)
+
+_CREATEINDEXREQUEST.fields_by_name[
+ "index"
+].message_type = (
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2._INDEX
+)
+_LISTINDEXESRESPONSE.fields_by_name[
+ "indexes"
+].message_type = (
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2._INDEX
+)
+_UPDATEFIELDREQUEST.fields_by_name[
+ "field"
+].message_type = (
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2._FIELD
+)
+_UPDATEFIELDREQUEST.fields_by_name[
+ "update_mask"
+].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
+_LISTFIELDSRESPONSE.fields_by_name[
+ "fields"
+].message_type = (
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2._FIELD
+)
+DESCRIPTOR.message_types_by_name["CreateIndexRequest"] = _CREATEINDEXREQUEST
+DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST
+DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE
+DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST
+DESCRIPTOR.message_types_by_name["DeleteIndexRequest"] = _DELETEINDEXREQUEST
+DESCRIPTOR.message_types_by_name["UpdateFieldRequest"] = _UPDATEFIELDREQUEST
+DESCRIPTOR.message_types_by_name["GetFieldRequest"] = _GETFIELDREQUEST
+DESCRIPTOR.message_types_by_name["ListFieldsRequest"] = _LISTFIELDSREQUEST
+DESCRIPTOR.message_types_by_name["ListFieldsResponse"] = _LISTFIELDSRESPONSE
+DESCRIPTOR.message_types_by_name["ExportDocumentsRequest"] = _EXPORTDOCUMENTSREQUEST
+DESCRIPTOR.message_types_by_name["ImportDocumentsRequest"] = _IMPORTDOCUMENTSREQUEST
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+CreateIndexRequest = _reflection.GeneratedProtocolMessageType(
+ "CreateIndexRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CREATEINDEXREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1
+ .FirestoreAdmin.CreateIndex].
+
+ Attributes:
+ parent:
+ Required. A parent name of the form ``projects/{project_id}/da
+ tabases/{database_id}/collectionGroups/{collection_id}``
+ index:
+ Required. The composite index to create.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.CreateIndexRequest)
+ },
+)
+_sym_db.RegisterMessage(CreateIndexRequest)
+
+ListIndexesRequest = _reflection.GeneratedProtocolMessageType(
+ "ListIndexesRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTINDEXESREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1
+ .FirestoreAdmin.ListIndexes].
+
+ Attributes:
+ parent:
+ Required. A parent name of the form ``projects/{project_id}/da
+ tabases/{database_id}/collectionGroups/{collection_id}``
+ filter:
+ The filter to apply to list results.
+ page_size:
+ The number of results to return.
+ page_token:
+ A page token, returned from a previous call to [FirestoreAdmin
+ .ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListInd
+ exes], that may be used to get the next page of results.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListIndexesRequest)
+ },
+)
+_sym_db.RegisterMessage(ListIndexesRequest)
+
+ListIndexesResponse = _reflection.GeneratedProtocolMessageType(
+ "ListIndexesResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTINDEXESRESPONSE,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v
+ 1.FirestoreAdmin.ListIndexes].
+
+ Attributes:
+ indexes:
+ The requested indexes.
+ next_page_token:
+ A page token that may be used to request another page of
+ results. If blank, this is the last page.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListIndexesResponse)
+ },
+)
+_sym_db.RegisterMessage(ListIndexesResponse)
+
+GetIndexRequest = _reflection.GeneratedProtocolMessageType(
+ "GetIndexRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETINDEXREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1.Fi
+ restoreAdmin.GetIndex].
+
+ Attributes:
+ name:
+ Required. A name of the form ``projects/{project_id}/databases
+ /{database_id}/collectionGroups/{collection_id}/indexes/{index
+ _id}``
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.GetIndexRequest)
+ },
+)
+_sym_db.RegisterMessage(GetIndexRequest)
+
+DeleteIndexRequest = _reflection.GeneratedProtocolMessageType(
+ "DeleteIndexRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DELETEINDEXREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1
+ .FirestoreAdmin.DeleteIndex].
+
+ Attributes:
+ name:
+ Required. A name of the form ``projects/{project_id}/databases
+ /{database_id}/collectionGroups/{collection_id}/indexes/{index
+ _id}``
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.DeleteIndexRequest)
+ },
+)
+_sym_db.RegisterMessage(DeleteIndexRequest)
+
+UpdateFieldRequest = _reflection.GeneratedProtocolMessageType(
+ "UpdateFieldRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _UPDATEFIELDREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.UpdateField][google.firestore.admin.v1
+ .FirestoreAdmin.UpdateField].
+
+ Attributes:
+ field:
+ Required. The field to be updated.
+ update_mask:
+ A mask, relative to the field. If specified, only
+ configuration specified by this field_mask will be updated in
+ the field.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.UpdateFieldRequest)
+ },
+)
+_sym_db.RegisterMessage(UpdateFieldRequest)
+
+GetFieldRequest = _reflection.GeneratedProtocolMessageType(
+ "GetFieldRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETFIELDREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.GetField][google.firestore.admin.v1.Fi
+ restoreAdmin.GetField].
+
+ Attributes:
+ name:
+ Required. A name of the form ``projects/{project_id}/databases
+ /{database_id}/collectionGroups/{collection_id}/fields/{field_
+ id}``
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.GetFieldRequest)
+ },
+)
+_sym_db.RegisterMessage(GetFieldRequest)
+
+ListFieldsRequest = _reflection.GeneratedProtocolMessageType(
+ "ListFieldsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTFIELDSREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.ListFields][google.firestore.admin.v1.
+ FirestoreAdmin.ListFields].
+
+ Attributes:
+ parent:
+ Required. A parent name of the form ``projects/{project_id}/da
+ tabases/{database_id}/collectionGroups/{collection_id}``
+ filter:
+ The filter to apply to list results. Currently, [FirestoreAdmi
+ n.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFie
+ lds] only supports listing fields that have been explicitly
+ overridden. To issue this query, call [FirestoreAdmin.ListFiel
+ ds][google.firestore.admin.v1.FirestoreAdmin.ListFields] with
+ the filter set to ``indexConfig.usesAncestorConfig:false``.
+ page_size:
+ The number of results to return.
+ page_token:
+ A page token, returned from a previous call to [FirestoreAdmin
+ .ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFiel
+ ds], that may be used to get the next page of results.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListFieldsRequest)
+ },
+)
+_sym_db.RegisterMessage(ListFieldsRequest)
+
+ListFieldsResponse = _reflection.GeneratedProtocolMessageType(
+ "ListFieldsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTFIELDSRESPONSE,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The response for [FirestoreAdmin.ListFields][google.firestore.admin.v1
+ .FirestoreAdmin.ListFields].
+
+ Attributes:
+ fields:
+ The requested fields.
+ next_page_token:
+ A page token that may be used to request another page of
+ results. If blank, this is the last page.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListFieldsResponse)
+ },
+)
+_sym_db.RegisterMessage(ListFieldsResponse)
+
+ExportDocumentsRequest = _reflection.GeneratedProtocolMessageType(
+ "ExportDocumentsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _EXPORTDOCUMENTSREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.ExportDocuments][google.firestore.admi
+ n.v1.FirestoreAdmin.ExportDocuments].
+
+ Attributes:
+ name:
+ Required. Database to export. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ collection_ids:
+ Which collection ids to export. Unspecified means all
+ collections.
+ output_uri_prefix:
+ The output URI. Currently only supports Google Cloud Storage
+ URIs of the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where
+ ``BUCKET_NAME`` is the name of the Google Cloud Storage bucket
+ and ``NAMESPACE_PATH`` is an optional Google Cloud Storage
+ namespace path. When choosing a name, be sure to consider
+ Google Cloud Storage naming guidelines:
+ https://cloud.google.com/storage/docs/naming. If the URI is a
+ bucket (without a namespace path), a prefix will be generated
+ based on the start time.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ExportDocumentsRequest)
+ },
+)
+_sym_db.RegisterMessage(ExportDocumentsRequest)
+
+ImportDocumentsRequest = _reflection.GeneratedProtocolMessageType(
+ "ImportDocumentsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _IMPORTDOCUMENTSREQUEST,
+ "__module__": "google.cloud.firestore_admin_v1.proto.firestore_admin_pb2",
+ "__doc__": """The request for [FirestoreAdmin.ImportDocuments][google.firestore.admi
+ n.v1.FirestoreAdmin.ImportDocuments].
+
+ Attributes:
+ name:
+ Required. Database to import into. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ collection_ids:
+ Which collection ids to import. Unspecified means all
+ collections included in the import.
+ input_uri_prefix:
+ Location of the exported files. This must match the
+ output_uri_prefix of an ExportDocumentsResponse from an export
+ that has completed successfully. See: [google.firestore.admin.
+ v1.ExportDocumentsResponse.output_uri_prefix][google.firestore
+ .admin.v1.ExportDocumentsResponse.output_uri_prefix].
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ImportDocumentsRequest)
+ },
+)
+_sym_db.RegisterMessage(ImportDocumentsRequest)
+
+
+DESCRIPTOR._options = None
+_CREATEINDEXREQUEST.fields_by_name["parent"]._options = None
+_CREATEINDEXREQUEST.fields_by_name["index"]._options = None
+_LISTINDEXESREQUEST.fields_by_name["parent"]._options = None
+_GETINDEXREQUEST.fields_by_name["name"]._options = None
+_DELETEINDEXREQUEST.fields_by_name["name"]._options = None
+_UPDATEFIELDREQUEST.fields_by_name["field"]._options = None
+_GETFIELDREQUEST.fields_by_name["name"]._options = None
+_LISTFIELDSREQUEST.fields_by_name["parent"]._options = None
+_EXPORTDOCUMENTSREQUEST.fields_by_name["name"]._options = None
+_IMPORTDOCUMENTSREQUEST.fields_by_name["name"]._options = None
+
+_FIRESTOREADMIN = _descriptor.ServiceDescriptor(
+ name="FirestoreAdmin",
+ full_name="google.firestore.admin.v1.FirestoreAdmin",
+ file=DESCRIPTOR,
+ index=0,
+ serialized_options=b"\312A\030firestore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore",
+ create_key=_descriptor._internal_create_key,
+ serialized_start=1650,
+ serialized_end=3559,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="CreateIndex",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.CreateIndex",
+ index=0,
+ containing_service=None,
+ input_type=_CREATEINDEXREQUEST,
+ output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
+ serialized_options=b'\202\323\344\223\002G">/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes:\005index\332A\014parent,index\312A\037\n\005Index\022\026IndexOperationMetadata',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="ListIndexes",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.ListIndexes",
+ index=1,
+ containing_service=None,
+ input_type=_LISTINDEXESREQUEST,
+ output_type=_LISTINDEXESRESPONSE,
+ serialized_options=b"\202\323\344\223\002@\022>/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes\332A\006parent",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="GetIndex",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.GetIndex",
+ index=2,
+ containing_service=None,
+ input_type=_GETINDEXREQUEST,
+ output_type=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2._INDEX,
+ serialized_options=b"\202\323\344\223\002@\022>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\332A\004name",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="DeleteIndex",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.DeleteIndex",
+ index=3,
+ containing_service=None,
+ input_type=_DELETEINDEXREQUEST,
+ output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+ serialized_options=b"\202\323\344\223\002@*>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\332A\004name",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="GetField",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.GetField",
+ index=4,
+ containing_service=None,
+ input_type=_GETFIELDREQUEST,
+ output_type=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2._FIELD,
+ serialized_options=b"\202\323\344\223\002?\022=/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}\332A\004name",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="UpdateField",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.UpdateField",
+ index=5,
+ containing_service=None,
+ input_type=_UPDATEFIELDREQUEST,
+ output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
+ serialized_options=b"\202\323\344\223\002L2C/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}:\005field\332A\005field\312A\037\n\005Field\022\026FieldOperationMetadata",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="ListFields",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.ListFields",
+ index=6,
+ containing_service=None,
+ input_type=_LISTFIELDSREQUEST,
+ output_type=_LISTFIELDSRESPONSE,
+ serialized_options=b"\202\323\344\223\002?\022=/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields\332A\006parent",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="ExportDocuments",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.ExportDocuments",
+ index=7,
+ containing_service=None,
+ input_type=_EXPORTDOCUMENTSREQUEST,
+ output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
+ serialized_options=b'\202\323\344\223\0026"1/v1/{name=projects/*/databases/*}:exportDocuments:\001*\332A\004name\312A2\n\027ExportDocumentsResponse\022\027ExportDocumentsMetadata',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="ImportDocuments",
+ full_name="google.firestore.admin.v1.FirestoreAdmin.ImportDocuments",
+ index=8,
+ containing_service=None,
+ input_type=_IMPORTDOCUMENTSREQUEST,
+ output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
+ serialized_options=b'\202\323\344\223\0026"1/v1/{name=projects/*/databases/*}:importDocuments:\001*\332A\004name\312A0\n\025google.protobuf.Empty\022\027ImportDocumentsMetadata',
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+)
+_sym_db.RegisterServiceDescriptor(_FIRESTOREADMIN)
+
+DESCRIPTOR.services_by_name["FirestoreAdmin"] = _FIRESTOREADMIN
+
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py
new file mode 100644
index 000000000..7ed771461
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py
@@ -0,0 +1,478 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from google.cloud.firestore_admin_v1.proto import (
+ field_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2,
+)
+from google.cloud.firestore_admin_v1.proto import (
+ firestore_admin_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2,
+)
+from google.cloud.firestore_admin_v1.proto import (
+ index_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2,
+)
+from google.longrunning import (
+ operations_pb2 as google_dot_longrunning_dot_operations__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+
+class FirestoreAdminStub(object):
+ """Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.CreateIndex = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.ListIndexes = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
+ )
+ self.GetIndex = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.Index.FromString,
+ )
+ self.DeleteIndex = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+ self.GetField = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/GetField",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.Field.FromString,
+ )
+ self.UpdateField = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.ListFields = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ListFields",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.FromString,
+ )
+ self.ExportDocuments = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.ImportDocuments = channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
+ request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+
+
+class FirestoreAdminServicer(object):
+ """Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def CreateIndex(self, request, context):
+ """Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the creation. The metadata for
+ the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListIndexes(self, request, context):
+ """Lists composite indexes.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetIndex(self, request, context):
+ """Gets a composite index.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteIndex(self, request, context):
+ """Deletes a composite index.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetField(self, request, context):
+ """Gets the metadata and configuration for a Field.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def UpdateField(self, request, context):
+ """Updates a field configuration. Currently, field updates apply only to
+ single field index configuration. However, calls to
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid
+ changing any configuration that the caller isn't aware of. The field mask
+ should be specified as: `{ paths: "index_config" }`.
+
+ This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to
+ track the status of the field update. The metadata for
+ the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
+
+ To configure the default field settings for the database, use
+ the special `Field` with resource name:
+ `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListFields(self, request, context):
+ """Lists the field configuration and metadata for this database.
+
+ Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
+ that have been explicitly overridden. To issue this query, call
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
+ `indexConfig.usesAncestorConfig:false`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ExportDocuments(self, request, context):
+ """Exports a copy of all or a subset of documents from Google Cloud Firestore
+ to another storage system, such as Google Cloud Storage. Recent updates to
+ documents may not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed via the
+ Operation resource that is created. The output of an export may only be
+ used once the associated operation is done. If an export operation is
+ cancelled before completion it may leave partial data behind in Google
+ Cloud Storage.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ImportDocuments(self, request, context):
+ """Imports documents into Google Cloud Firestore. Existing documents with the
+ same name are overwritten. The import occurs in the background and its
+ progress can be monitored and managed via the Operation resource that is
+ created. If an ImportDocuments operation is cancelled, it is possible
+ that a subset of the data has already been imported to Cloud Firestore.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_FirestoreAdminServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "CreateIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateIndex,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "ListIndexes": grpc.unary_unary_rpc_method_handler(
+ servicer.ListIndexes,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString,
+ ),
+ "GetIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.GetIndex,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString,
+ ),
+ "DeleteIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteIndex,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "GetField": grpc.unary_unary_rpc_method_handler(
+ servicer.GetField,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.Field.SerializeToString,
+ ),
+ "UpdateField": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateField,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "ListFields": grpc.unary_unary_rpc_method_handler(
+ servicer.ListFields,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.SerializeToString,
+ ),
+ "ExportDocuments": grpc.unary_unary_rpc_method_handler(
+ servicer.ExportDocuments,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "ImportDocuments": grpc.unary_unary_rpc_method_handler(
+ servicer.ImportDocuments,
+ request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.firestore.admin.v1.FirestoreAdmin", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class FirestoreAdmin(object):
+ """Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ @staticmethod
+ def CreateIndex(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListIndexes(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetIndex(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.Index.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteIndex(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetField(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/GetField",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.Field.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateField(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListFields(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/ListFields",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ExportDocuments(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ImportDocuments(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index.proto b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index.proto
new file mode 100644
index 000000000..e27686be4
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index.proto
@@ -0,0 +1,158 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1;
+
+import "google/api/resource.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "IndexProto";
+option java_package = "com.google.firestore.admin.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
+option ruby_package = "Google::Cloud::Firestore::Admin::V1";
+
+// Cloud Firestore indexes enable simple and complex queries against
+// documents in a database.
+message Index {
+ option (google.api.resource) = {
+ type: "firestore.googleapis.com/Index"
+ pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}"
+ };
+
+ // A field in an index.
+ // The field_path describes which field is indexed, the value_mode describes
+ // how the field value is indexed.
+ message IndexField {
+ // The supported orderings.
+ enum Order {
+ // The ordering is unspecified. Not a valid option.
+ ORDER_UNSPECIFIED = 0;
+
+ // The field is ordered by ascending field value.
+ ASCENDING = 1;
+
+ // The field is ordered by descending field value.
+ DESCENDING = 2;
+ }
+
+ // The supported array value configurations.
+ enum ArrayConfig {
+ // The index does not support additional array queries.
+ ARRAY_CONFIG_UNSPECIFIED = 0;
+
+ // The index supports array containment queries.
+ CONTAINS = 1;
+ }
+
+ // Can be __name__.
+ // For single field indexes, this must match the name of the field or may
+ // be omitted.
+ string field_path = 1;
+
+ // How the field value is indexed.
+ oneof value_mode {
+ // Indicates that this field supports ordering by the specified order or
+ // comparing using =, <, <=, >, >=.
+ Order order = 2;
+
+ // Indicates that this field supports operations on `array_value`s.
+ ArrayConfig array_config = 3;
+ }
+ }
+
+ // Query Scope defines the scope at which a query is run. This is specified on
+ // a StructuredQuery's `from` field.
+ enum QueryScope {
+ // The query scope is unspecified. Not a valid option.
+ QUERY_SCOPE_UNSPECIFIED = 0;
+
+ // Indexes with a collection query scope specified allow queries
+ // against a collection that is the child of a specific document, specified
+ // at query time, and that has the collection id specified by the index.
+ COLLECTION = 1;
+
+ // Indexes with a collection group query scope specified allow queries
+ // against all collections that has the collection id specified by the
+ // index.
+ COLLECTION_GROUP = 2;
+ }
+
+ // The state of an index. During index creation, an index will be in the
+ // `CREATING` state. If the index is created successfully, it will transition
+ // to the `READY` state. If the index creation encounters a problem, the index
+ // will transition to the `NEEDS_REPAIR` state.
+ enum State {
+ // The state is unspecified.
+ STATE_UNSPECIFIED = 0;
+
+ // The index is being created.
+ // There is an active long-running operation for the index.
+ // The index is updated when writing a document.
+ // Some index data may exist.
+ CREATING = 1;
+
+ // The index is ready to be used.
+ // The index is updated when writing a document.
+ // The index is fully populated from all stored documents it applies to.
+ READY = 2;
+
+ // The index was being created, but something went wrong.
+ // There is no active long-running operation for the index,
+ // and the most recently finished long-running operation failed.
+ // The index is not updated when writing a document.
+ // Some index data may exist.
+ // Use the google.longrunning.Operations API to determine why the operation
+ // that last attempted to create this index failed, then re-create the
+ // index.
+ NEEDS_REPAIR = 3;
+ }
+
+ // Output only. A server defined name for this index.
+ // The form of this name for composite indexes will be:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}`
+ // For single field indexes, this field will be empty.
+ string name = 1;
+
+ // Indexes with a collection query scope specified allow queries
+ // against a collection that is the child of a specific document, specified at
+ // query time, and that has the same collection id.
+ //
+ // Indexes with a collection group query scope specified allow queries against
+ // all collections descended from a specific document, specified at query
+ // time, and that have the same collection id as this index.
+ QueryScope query_scope = 2;
+
+ // The fields supported by this index.
+ //
+ // For composite indexes, this is always 2 or more fields.
+ // The last field entry is always for the field path `__name__`. If, on
+ // creation, `__name__` was not specified as the last field, it will be added
+ // automatically with the same direction as that of the last field defined. If
+ // the final field in a composite index is not directional, the `__name__`
+ // will be ordered ASCENDING (unless explicitly specified).
+ //
+ // For single field indexes, this will always be exactly one entry with a
+ // field path equal to the field path of the associated field.
+ repeated IndexField fields = 3;
+
+ // Output only. The serving state of the index.
+ State state = 4;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index_pb2.py
new file mode 100644
index 000000000..2e7f17893
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index_pb2.py
@@ -0,0 +1,473 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_admin_v1/proto/index.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_admin_v1/proto/index.proto",
+ package="google.firestore.admin.v1",
+ syntax="proto3",
+ serialized_options=b"\n\035com.google.firestore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n1google/cloud/firestore_admin_v1/proto/index.proto\x12\x19google.firestore.admin.v1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xa3\x06\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x0bquery_scope\x18\x02 \x01(\x0e\x32+.google.firestore.admin.v1.Index.QueryScope\x12;\n\x06\x66ields\x18\x03 \x03(\x0b\x32+.google.firestore.admin.v1.Index.IndexField\x12\x35\n\x05state\x18\x04 \x01(\x0e\x32&.google.firestore.admin.v1.Index.State\x1a\xbd\x02\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x42\n\x05order\x18\x02 \x01(\x0e\x32\x31.google.firestore.admin.v1.Index.IndexField.OrderH\x00\x12O\n\x0c\x61rray_config\x18\x03 \x01(\x0e\x32\x37.google.firestore.admin.v1.Index.IndexField.ArrayConfigH\x00"=\n\x05Order\x12\x15\n\x11ORDER_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"9\n\x0b\x41rrayConfig\x12\x1c\n\x18\x41RRAY_CONFIG_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43ONTAINS\x10\x01\x42\x0c\n\nvalue_mode"O\n\nQueryScope\x12\x1b\n\x17QUERY_SCOPE_UNSPECIFIED\x10\x00\x12\x0e\n\nCOLLECTION\x10\x01\x12\x14\n\x10\x43OLLECTION_GROUP\x10\x02"I\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x10\n\x0cNEEDS_REPAIR\x10\x03:z\xea\x41w\n\x1e\x66irestore.googleapis.com/Index\x12Uprojects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}B\xde\x01\n\x1d\x63om.google.firestore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
+ dependencies=[
+ google_dot_api_dot_resource__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_INDEX_INDEXFIELD_ORDER = _descriptor.EnumDescriptor(
+ name="Order",
+ full_name="google.firestore.admin.v1.Index.IndexField.Order",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="ORDER_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ASCENDING",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="DESCENDING",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=527,
+ serialized_end=588,
+)
+_sym_db.RegisterEnumDescriptor(_INDEX_INDEXFIELD_ORDER)
+
+_INDEX_INDEXFIELD_ARRAYCONFIG = _descriptor.EnumDescriptor(
+ name="ArrayConfig",
+ full_name="google.firestore.admin.v1.Index.IndexField.ArrayConfig",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="ARRAY_CONFIG_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CONTAINS",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=590,
+ serialized_end=647,
+)
+_sym_db.RegisterEnumDescriptor(_INDEX_INDEXFIELD_ARRAYCONFIG)
+
+_INDEX_QUERYSCOPE = _descriptor.EnumDescriptor(
+ name="QueryScope",
+ full_name="google.firestore.admin.v1.Index.QueryScope",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="QUERY_SCOPE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="COLLECTION",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="COLLECTION_GROUP",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=663,
+ serialized_end=742,
+)
+_sym_db.RegisterEnumDescriptor(_INDEX_QUERYSCOPE)
+
+_INDEX_STATE = _descriptor.EnumDescriptor(
+ name="State",
+ full_name="google.firestore.admin.v1.Index.State",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="STATE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CREATING",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="READY",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="NEEDS_REPAIR",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=744,
+ serialized_end=817,
+)
+_sym_db.RegisterEnumDescriptor(_INDEX_STATE)
+
+
+_INDEX_INDEXFIELD = _descriptor.Descriptor(
+ name="IndexField",
+ full_name="google.firestore.admin.v1.Index.IndexField",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field_path",
+ full_name="google.firestore.admin.v1.Index.IndexField.field_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="order",
+ full_name="google.firestore.admin.v1.Index.IndexField.order",
+ index=1,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="array_config",
+ full_name="google.firestore.admin.v1.Index.IndexField.array_config",
+ index=2,
+ number=3,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_INDEX_INDEXFIELD_ORDER, _INDEX_INDEXFIELD_ARRAYCONFIG,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="value_mode",
+ full_name="google.firestore.admin.v1.Index.IndexField.value_mode",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=344,
+ serialized_end=661,
+)
+
+_INDEX = _descriptor.Descriptor(
+ name="Index",
+ full_name="google.firestore.admin.v1.Index",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1.Index.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query_scope",
+ full_name="google.firestore.admin.v1.Index.query_scope",
+ index=1,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.firestore.admin.v1.Index.fields",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="state",
+ full_name="google.firestore.admin.v1.Index.state",
+ index=3,
+ number=4,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_INDEX_INDEXFIELD,],
+ enum_types=[_INDEX_QUERYSCOPE, _INDEX_STATE,],
+ serialized_options=b"\352Aw\n\036firestore.googleapis.com/Index\022Uprojects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}",
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=138,
+ serialized_end=941,
+)
+
+_INDEX_INDEXFIELD.fields_by_name["order"].enum_type = _INDEX_INDEXFIELD_ORDER
+_INDEX_INDEXFIELD.fields_by_name[
+ "array_config"
+].enum_type = _INDEX_INDEXFIELD_ARRAYCONFIG
+_INDEX_INDEXFIELD.containing_type = _INDEX
+_INDEX_INDEXFIELD_ORDER.containing_type = _INDEX_INDEXFIELD
+_INDEX_INDEXFIELD_ARRAYCONFIG.containing_type = _INDEX_INDEXFIELD
+_INDEX_INDEXFIELD.oneofs_by_name["value_mode"].fields.append(
+ _INDEX_INDEXFIELD.fields_by_name["order"]
+)
+_INDEX_INDEXFIELD.fields_by_name[
+ "order"
+].containing_oneof = _INDEX_INDEXFIELD.oneofs_by_name["value_mode"]
+_INDEX_INDEXFIELD.oneofs_by_name["value_mode"].fields.append(
+ _INDEX_INDEXFIELD.fields_by_name["array_config"]
+)
+_INDEX_INDEXFIELD.fields_by_name[
+ "array_config"
+].containing_oneof = _INDEX_INDEXFIELD.oneofs_by_name["value_mode"]
+_INDEX.fields_by_name["query_scope"].enum_type = _INDEX_QUERYSCOPE
+_INDEX.fields_by_name["fields"].message_type = _INDEX_INDEXFIELD
+_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE
+_INDEX_QUERYSCOPE.containing_type = _INDEX
+_INDEX_STATE.containing_type = _INDEX
+DESCRIPTOR.message_types_by_name["Index"] = _INDEX
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Index = _reflection.GeneratedProtocolMessageType(
+ "Index",
+ (_message.Message,),
+ {
+ "IndexField": _reflection.GeneratedProtocolMessageType(
+ "IndexField",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _INDEX_INDEXFIELD,
+ "__module__": "google.cloud.firestore_admin_v1.proto.index_pb2",
+ "__doc__": """A field in an index. The field_path describes which field is indexed,
+ the value_mode describes how the field value is indexed.
+
+ Attributes:
+ field_path:
+ Can be **name**. For single field indexes, this must match the
+ name of the field or may be omitted.
+ value_mode:
+ How the field value is indexed.
+ order:
+ Indicates that this field supports ordering by the specified
+ order or comparing using =, <, <=, >, >=.
+ array_config:
+ Indicates that this field supports operations on
+ ``array_value``\ s.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Index.IndexField)
+ },
+ ),
+ "DESCRIPTOR": _INDEX,
+ "__module__": "google.cloud.firestore_admin_v1.proto.index_pb2",
+ "__doc__": """Cloud Firestore indexes enable simple and complex queries against
+ documents in a database.
+
+ Attributes:
+ name:
+ Output only. A server defined name for this index. The form of
+ this name for composite indexes will be: ``projects/{project_i
+ d}/databases/{database_id}/collectionGroups/{collection_id}/in
+ dexes/{composite_index_id}`` For single field indexes, this
+ field will be empty.
+ query_scope:
+ Indexes with a collection query scope specified allow queries
+ against a collection that is the child of a specific document,
+ specified at query time, and that has the same collection id.
+ Indexes with a collection group query scope specified allow
+ queries against all collections descended from a specific
+ document, specified at query time, and that have the same
+ collection id as this index.
+ fields:
+ The fields supported by this index. For composite indexes,
+ this is always 2 or more fields. The last field entry is
+ always for the field path ``__name__``. If, on creation,
+ ``__name__`` was not specified as the last field, it will be
+ added automatically with the same direction as that of the
+ last field defined. If the final field in a composite index is
+ not directional, the ``__name__`` will be ordered ASCENDING
+ (unless explicitly specified). For single field indexes, this
+ will always be exactly one entry with a field path equal to
+ the field path of the associated field.
+ state:
+ Output only. The serving state of the index.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Index)
+ },
+)
+_sym_db.RegisterMessage(Index)
+_sym_db.RegisterMessage(Index.IndexField)
+
+
+DESCRIPTOR._options = None
+_INDEX._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location.proto b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location.proto
new file mode 100644
index 000000000..e435c6f0d
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location.proto
@@ -0,0 +1,35 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1;
+
+import "google/type/latlng.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "LocationProto";
+option java_package = "com.google.firestore.admin.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
+option ruby_package = "Google::Cloud::Firestore::Admin::V1";
+
+// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
+message LocationMetadata {
+
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location_pb2.py
new file mode 100644
index 000000000..3bc821280
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location_pb2.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_admin_v1/proto/location.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_admin_v1/proto/location.proto",
+ package="google.firestore.admin.v1",
+ syntax="proto3",
+ serialized_options=b"\n\035com.google.firestore.admin.v1B\rLocationProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n4google/cloud/firestore_admin_v1/proto/location.proto\x12\x19google.firestore.admin.v1\x1a\x18google/type/latlng.proto\x1a\x1cgoogle/api/annotations.proto"\x12\n\x10LocationMetadataB\xe1\x01\n\x1d\x63om.google.firestore.admin.v1B\rLocationProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
+ dependencies=[
+ google_dot_type_dot_latlng__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_LOCATIONMETADATA = _descriptor.Descriptor(
+ name="LocationMetadata",
+ full_name="google.firestore.admin.v1.LocationMetadata",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=139,
+ serialized_end=157,
+)
+
+DESCRIPTOR.message_types_by_name["LocationMetadata"] = _LOCATIONMETADATA
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+LocationMetadata = _reflection.GeneratedProtocolMessageType(
+ "LocationMetadata",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LOCATIONMETADATA,
+ "__module__": "google.cloud.firestore_admin_v1.proto.location_pb2",
+ "__doc__": """The metadata message for [google.cloud.location.Location.metadata][goo
+ gle.cloud.location.Location.metadata].""",
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.LocationMetadata)
+ },
+)
+_sym_db.RegisterMessage(LocationMetadata)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation.proto b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation.proto
new file mode 100644
index 000000000..dcdc6ee65
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation.proto
@@ -0,0 +1,204 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1;
+
+import "google/firestore/admin/v1/index.proto";
+import "google/protobuf/timestamp.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "OperationProto";
+option java_package = "com.google.firestore.admin.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
+option ruby_package = "Google::Cloud::Firestore::Admin::V1";
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
+message IndexOperationMetadata {
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The index resource that this operation is acting on. For example:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
+ string index = 3;
+
+ // The state of the operation.
+ OperationState state = 4;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 5;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 6;
+}
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
+message FieldOperationMetadata {
+ // Information about an index configuration change.
+ message IndexConfigDelta {
+ // Specifies how the index is changing.
+ enum ChangeType {
+ // The type of change is not specified or known.
+ CHANGE_TYPE_UNSPECIFIED = 0;
+
+ // The single field index is being added.
+ ADD = 1;
+
+ // The single field index is being removed.
+ REMOVE = 2;
+ }
+
+ // Specifies how the index is changing.
+ ChangeType change_type = 1;
+
+ // The index being changed.
+ Index index = 2;
+ }
+
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The field resource that this operation is acting on. For example:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
+ string field = 3;
+
+ // A list of [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this
+ // operation.
+ repeated IndexConfigDelta index_config_deltas = 4;
+
+ // The state of the operation.
+ OperationState state = 5;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 6;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 7;
+}
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
+message ExportDocumentsMetadata {
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The state of the export operation.
+ OperationState operation_state = 3;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 4;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 5;
+
+ // Which collection ids are being exported.
+ repeated string collection_ids = 6;
+
+ // Where the entities are being exported to.
+ string output_uri_prefix = 7;
+}
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
+message ImportDocumentsMetadata {
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The state of the import operation.
+ OperationState operation_state = 3;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 4;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 5;
+
+ // Which collection ids are being imported.
+ repeated string collection_ids = 6;
+
+ // The location of the documents being imported.
+ string input_uri_prefix = 7;
+}
+
+// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
+message ExportDocumentsResponse {
+ // Location of the output files. This can be used to begin an import
+ // into Cloud Firestore (this project or another project) after the operation
+ // completes successfully.
+ string output_uri_prefix = 1;
+}
+
+// Describes the progress of the operation.
+// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1.Progress]
+// is used.
+message Progress {
+ // The amount of work estimated.
+ int64 estimated_work = 1;
+
+ // The amount of work completed.
+ int64 completed_work = 2;
+}
+
+// Describes the state of the operation.
+enum OperationState {
+ // Unspecified.
+ OPERATION_STATE_UNSPECIFIED = 0;
+
+ // Request is being prepared for processing.
+ INITIALIZING = 1;
+
+ // Request is actively being processed.
+ PROCESSING = 2;
+
+ // Request is in the process of being cancelled after user called
+ // google.longrunning.Operations.CancelOperation on the operation.
+ CANCELLING = 3;
+
+ // Request has been processed and is in its finalization stage.
+ FINALIZING = 4;
+
+ // Request has completed successfully.
+ SUCCESSFUL = 5;
+
+ // Request has finished being processed, but encountered an error.
+ FAILED = 6;
+
+ // Request has finished being cancelled after user called
+ // google.longrunning.Operations.CancelOperation.
+ CANCELLED = 7;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation_pb2.py
new file mode 100644
index 000000000..acf0996ff
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation_pb2.py
@@ -0,0 +1,1185 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_admin_v1/proto/operation.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_admin_v1.proto import (
+ index_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2,
+)
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_admin_v1/proto/operation.proto",
+ package="google.firestore.admin.v1",
+ syntax="proto3",
+ serialized_options=b"\n\035com.google.firestore.admin.v1B\016OperationProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n5google/cloud/firestore_admin_v1/proto/operation.proto\x12\x19google.firestore.admin.v1\x1a\x31google/cloud/firestore_admin_v1/proto/index.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xbd\x02\n\x16IndexOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05index\x18\x03 \x01(\t\x12\x38\n\x05state\x18\x04 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x05 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x06 \x01(\x0b\x32#.google.firestore.admin.v1.Progress"\x88\x05\n\x16\x46ieldOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05\x66ield\x18\x03 \x01(\t\x12_\n\x13index_config_deltas\x18\x04 \x03(\x0b\x32\x42.google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta\x12\x38\n\x05state\x18\x05 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x06 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x07 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x1a\xe7\x01\n\x10IndexConfigDelta\x12\x62\n\x0b\x63hange_type\x18\x01 \x01(\x0e\x32M.google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.ChangeType\x12/\n\x05index\x18\x02 \x01(\x0b\x32 .google.firestore.admin.v1.Index">\n\nChangeType\x12\x1b\n\x17\x43HANGE_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02"\xec\x02\n\x17\x45xportDocumentsMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x0foperation_state\x18\x03 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x04 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x05 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12\x16\n\x0e\x63ollection_ids\x18\x06 \x03(\t\x12\x19\n\x11output_uri_prefix\x18\x07 \x01(\t"\xeb\x02\n\x17ImportDocumentsMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x0foperation_state\x18\x03 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x04 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x05 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12\x16\n\x0e\x63ollection_ids\x18\x06 \x03(\t\x12\x18\n\x10input_uri_prefix\x18\x07 \x01(\t"4\n\x17\x45xportDocumentsResponse\x12\x19\n\x11output_uri_prefix\x18\x01 \x01(\t":\n\x08Progress\x12\x16\n\x0e\x65stimated_work\x18\x01 \x01(\x03\x12\x16\n\x0e\x63ompleted_work\x18\x02 \x01(\x03*\x9e\x01\n\x0eOperationState\x12\x1f\n\x1bOPERATION_STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\x0e\n\nCANCELLING\x10\x03\x12\x0e\n\nFINALIZING\x10\x04\x12\x0e\n\nSUCCESSFUL\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x12\r\n\tCANCELLED\x10\x07\x42\xe2\x01\n\x1d\x63om.google.firestore.admin.v1B\x0eOperationProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
+ dependencies=[
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+_OPERATIONSTATE = _descriptor.EnumDescriptor(
+ name="OperationState",
+ full_name="google.firestore.admin.v1.OperationState",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="OPERATION_STATE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="INITIALIZING",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="PROCESSING",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CANCELLING",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="FINALIZING",
+ index=4,
+ number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="SUCCESSFUL",
+ index=5,
+ number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="FAILED",
+ index=6,
+ number=6,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CANCELLED",
+ index=7,
+ number=7,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2017,
+ serialized_end=2175,
+)
+_sym_db.RegisterEnumDescriptor(_OPERATIONSTATE)
+
+OperationState = enum_type_wrapper.EnumTypeWrapper(_OPERATIONSTATE)
+OPERATION_STATE_UNSPECIFIED = 0
+INITIALIZING = 1
+PROCESSING = 2
+CANCELLING = 3
+FINALIZING = 4
+SUCCESSFUL = 5
+FAILED = 6
+CANCELLED = 7
+
+
+_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE = _descriptor.EnumDescriptor(
+ name="ChangeType",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.ChangeType",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="CHANGE_TYPE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ADD",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="REMOVE",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1105,
+ serialized_end=1167,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE)
+
+
+_INDEXOPERATIONMETADATA = _descriptor.Descriptor(
+ name="IndexOperationMetadata",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="start_time",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata.start_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_time",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata.end_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata.index",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="state",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata.state",
+ index=3,
+ number=4,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_documents",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata.progress_documents",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_bytes",
+ full_name="google.firestore.admin.v1.IndexOperationMetadata.progress_bytes",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=199,
+ serialized_end=516,
+)
+
+
+_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA = _descriptor.Descriptor(
+ name="IndexConfigDelta",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="change_type",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.change_type",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.index",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=936,
+ serialized_end=1167,
+)
+
+_FIELDOPERATIONMETADATA = _descriptor.Descriptor(
+ name="FieldOperationMetadata",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="start_time",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.start_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_time",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.end_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.field",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index_config_deltas",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.index_config_deltas",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="state",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.state",
+ index=4,
+ number=5,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_documents",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.progress_documents",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_bytes",
+ full_name="google.firestore.admin.v1.FieldOperationMetadata.progress_bytes",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=519,
+ serialized_end=1167,
+)
+
+
+_EXPORTDOCUMENTSMETADATA = _descriptor.Descriptor(
+ name="ExportDocumentsMetadata",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="start_time",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.start_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_time",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.end_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="operation_state",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.operation_state",
+ index=2,
+ number=3,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_documents",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.progress_documents",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_bytes",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.progress_bytes",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_ids",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.collection_ids",
+ index=5,
+ number=6,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="output_uri_prefix",
+ full_name="google.firestore.admin.v1.ExportDocumentsMetadata.output_uri_prefix",
+ index=6,
+ number=7,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1170,
+ serialized_end=1534,
+)
+
+
+_IMPORTDOCUMENTSMETADATA = _descriptor.Descriptor(
+ name="ImportDocumentsMetadata",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="start_time",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.start_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_time",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.end_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="operation_state",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.operation_state",
+ index=2,
+ number=3,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_documents",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.progress_documents",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="progress_bytes",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.progress_bytes",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_ids",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.collection_ids",
+ index=5,
+ number=6,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="input_uri_prefix",
+ full_name="google.firestore.admin.v1.ImportDocumentsMetadata.input_uri_prefix",
+ index=6,
+ number=7,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1537,
+ serialized_end=1900,
+)
+
+
+_EXPORTDOCUMENTSRESPONSE = _descriptor.Descriptor(
+ name="ExportDocumentsResponse",
+ full_name="google.firestore.admin.v1.ExportDocumentsResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="output_uri_prefix",
+ full_name="google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1902,
+ serialized_end=1954,
+)
+
+
+_PROGRESS = _descriptor.Descriptor(
+ name="Progress",
+ full_name="google.firestore.admin.v1.Progress",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="estimated_work",
+ full_name="google.firestore.admin.v1.Progress.estimated_work",
+ index=0,
+ number=1,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="completed_work",
+ full_name="google.firestore.admin.v1.Progress.completed_work",
+ index=1,
+ number=2,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1956,
+ serialized_end=2014,
+)
+
+_INDEXOPERATIONMETADATA.fields_by_name[
+ "start_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_INDEXOPERATIONMETADATA.fields_by_name[
+ "end_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_INDEXOPERATIONMETADATA.fields_by_name["state"].enum_type = _OPERATIONSTATE
+_INDEXOPERATIONMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
+_INDEXOPERATIONMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
+_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA.fields_by_name[
+ "change_type"
+].enum_type = _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE
+_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA.fields_by_name[
+ "index"
+].message_type = (
+ google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2._INDEX
+)
+_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA.containing_type = _FIELDOPERATIONMETADATA
+_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE.containing_type = (
+ _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA
+)
+_FIELDOPERATIONMETADATA.fields_by_name[
+ "start_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_FIELDOPERATIONMETADATA.fields_by_name[
+ "end_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_FIELDOPERATIONMETADATA.fields_by_name[
+ "index_config_deltas"
+].message_type = _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA
+_FIELDOPERATIONMETADATA.fields_by_name["state"].enum_type = _OPERATIONSTATE
+_FIELDOPERATIONMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
+_FIELDOPERATIONMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
+_EXPORTDOCUMENTSMETADATA.fields_by_name[
+ "start_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_EXPORTDOCUMENTSMETADATA.fields_by_name[
+ "end_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_EXPORTDOCUMENTSMETADATA.fields_by_name["operation_state"].enum_type = _OPERATIONSTATE
+_EXPORTDOCUMENTSMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
+_EXPORTDOCUMENTSMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
+_IMPORTDOCUMENTSMETADATA.fields_by_name[
+ "start_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_IMPORTDOCUMENTSMETADATA.fields_by_name[
+ "end_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_IMPORTDOCUMENTSMETADATA.fields_by_name["operation_state"].enum_type = _OPERATIONSTATE
+_IMPORTDOCUMENTSMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
+_IMPORTDOCUMENTSMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
+DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA
+DESCRIPTOR.message_types_by_name["FieldOperationMetadata"] = _FIELDOPERATIONMETADATA
+DESCRIPTOR.message_types_by_name["ExportDocumentsMetadata"] = _EXPORTDOCUMENTSMETADATA
+DESCRIPTOR.message_types_by_name["ImportDocumentsMetadata"] = _IMPORTDOCUMENTSMETADATA
+DESCRIPTOR.message_types_by_name["ExportDocumentsResponse"] = _EXPORTDOCUMENTSRESPONSE
+DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS
+DESCRIPTOR.enum_types_by_name["OperationState"] = _OPERATIONSTATE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+IndexOperationMetadata = _reflection.GeneratedProtocolMessageType(
+ "IndexOperationMetadata",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _INDEXOPERATIONMETADATA,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreA
+ dmin.CreateIndex].
+
+ Attributes:
+ start_time:
+ The time this operation started.
+ end_time:
+ The time this operation completed. Will be unset if operation
+ still in progress.
+ index:
+ The index resource that this operation is acting on. For
+ example: ``projects/{project_id}/databases/{database_id}/colle
+ ctionGroups/{collection_id}/indexes/{index_id}``
+ state:
+ The state of the operation.
+ progress_documents:
+ The progress, in documents, of this operation.
+ progress_bytes:
+ The progress, in bytes, of this operation.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.IndexOperationMetadata)
+ },
+)
+_sym_db.RegisterMessage(IndexOperationMetadata)
+
+FieldOperationMetadata = _reflection.GeneratedProtocolMessageType(
+ "FieldOperationMetadata",
+ (_message.Message,),
+ {
+ "IndexConfigDelta": _reflection.GeneratedProtocolMessageType(
+ "IndexConfigDelta",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Information about an index configuration change.
+
+ Attributes:
+ change_type:
+ Specifies how the index is changing.
+ index:
+ The index being changed.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta)
+ },
+ ),
+ "DESCRIPTOR": _FIELDOPERATIONMETADATA,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreA
+ dmin.UpdateField].
+
+ Attributes:
+ start_time:
+ The time this operation started.
+ end_time:
+ The time this operation completed. Will be unset if operation
+ still in progress.
+ field:
+ The field resource that this operation is acting on. For
+ example: ``projects/{project_id}/databases/{database_id}/colle
+ ctionGroups/{collection_id}/fields/{field_path}``
+ index_config_deltas:
+ A list of [IndexConfigDelta][google.firestore.admin.v1.FieldOp
+ erationMetadata.IndexConfigDelta], which describe the intent
+ of this operation.
+ state:
+ The state of the operation.
+ progress_documents:
+ The progress, in documents, of this operation.
+ progress_bytes:
+ The progress, in bytes, of this operation.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.FieldOperationMetadata)
+ },
+)
+_sym_db.RegisterMessage(FieldOperationMetadata)
+_sym_db.RegisterMessage(FieldOperationMetadata.IndexConfigDelta)
+
+ExportDocumentsMetadata = _reflection.GeneratedProtocolMessageType(
+ "ExportDocumentsMetadata",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _EXPORTDOCUMENTSMETADATA,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.Firest
+ oreAdmin.ExportDocuments].
+
+ Attributes:
+ start_time:
+ The time this operation started.
+ end_time:
+ The time this operation completed. Will be unset if operation
+ still in progress.
+ operation_state:
+ The state of the export operation.
+ progress_documents:
+ The progress, in documents, of this operation.
+ progress_bytes:
+ The progress, in bytes, of this operation.
+ collection_ids:
+ Which collection ids are being exported.
+ output_uri_prefix:
+ Where the entities are being exported to.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ExportDocumentsMetadata)
+ },
+)
+_sym_db.RegisterMessage(ExportDocumentsMetadata)
+
+ImportDocumentsMetadata = _reflection.GeneratedProtocolMessageType(
+ "ImportDocumentsMetadata",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _IMPORTDOCUMENTSMETADATA,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.Firest
+ oreAdmin.ImportDocuments].
+
+ Attributes:
+ start_time:
+ The time this operation started.
+ end_time:
+ The time this operation completed. Will be unset if operation
+ still in progress.
+ operation_state:
+ The state of the import operation.
+ progress_documents:
+ The progress, in documents, of this operation.
+ progress_bytes:
+ The progress, in bytes, of this operation.
+ collection_ids:
+ Which collection ids are being imported.
+ input_uri_prefix:
+ The location of the documents being imported.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ImportDocumentsMetadata)
+ },
+)
+_sym_db.RegisterMessage(ImportDocumentsMetadata)
+
+ExportDocumentsResponse = _reflection.GeneratedProtocolMessageType(
+ "ExportDocumentsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _EXPORTDOCUMENTSRESPONSE,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Returned in the
+ [google.longrunning.Operation][google.longrunning.Operation] response
+ field.
+
+ Attributes:
+ output_uri_prefix:
+ Location of the output files. This can be used to begin an
+ import into Cloud Firestore (this project or another project)
+ after the operation completes successfully.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ExportDocumentsResponse)
+ },
+)
+_sym_db.RegisterMessage(ExportDocumentsResponse)
+
+Progress = _reflection.GeneratedProtocolMessageType(
+ "Progress",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _PROGRESS,
+ "__module__": "google.cloud.firestore_admin_v1.proto.operation_pb2",
+ "__doc__": """Describes the progress of the operation. Unit of work is generic and
+ must be interpreted based on where
+ [Progress][google.firestore.admin.v1.Progress] is used.
+
+ Attributes:
+ estimated_work:
+ The amount of work estimated.
+ completed_work:
+ The amount of work completed.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Progress)
+ },
+)
+_sym_db.RegisterMessage(Progress)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_admin_v1/types.py b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/types.py
new file mode 100644
index 000000000..ca5f24164
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_admin_v1/types.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import absolute_import
+import sys
+
+from google.api_core.protobuf_helpers import get_messages
+
+from google.cloud.firestore_admin_v1.proto import field_pb2
+from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2
+from google.cloud.firestore_admin_v1.proto import index_pb2
+from google.cloud.firestore_admin_v1.proto import location_pb2
+from google.cloud.firestore_admin_v1.proto import operation_pb2
+from google.longrunning import operations_pb2
+from google.protobuf import any_pb2
+from google.protobuf import empty_pb2
+from google.protobuf import field_mask_pb2
+from google.protobuf import timestamp_pb2
+from google.rpc import status_pb2
+
+
+_shared_modules = [
+ operations_pb2,
+ any_pb2,
+ empty_pb2,
+ field_mask_pb2,
+ timestamp_pb2,
+ status_pb2,
+]
+
+_local_modules = [
+ field_pb2,
+ firestore_admin_pb2,
+ index_pb2,
+ location_pb2,
+ operation_pb2,
+]
+
+names = []
+
+for module in _shared_modules: # pragma: NO COVER
+ for name, message in get_messages(module).items():
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+for module in _local_modules:
+ for name, message in get_messages(module).items():
+ message.__module__ = "google.cloud.firestore_admin_v1.types"
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+
+
+__all__ = tuple(sorted(names))
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1/__init__.py
new file mode 100644
index 000000000..e4af45218
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/__init__.py
@@ -0,0 +1,71 @@
+# Copyright 2019 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python idiomatic client for Google Cloud Firestore."""
+
+from pkg_resources import get_distribution
+
+__version__ = get_distribution("google-cloud-firestore").version
+
+from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1._helpers import GeoPoint
+from google.cloud.firestore_v1._helpers import ExistsOption
+from google.cloud.firestore_v1._helpers import LastUpdateOption
+from google.cloud.firestore_v1._helpers import ReadAfterWriteError
+from google.cloud.firestore_v1._helpers import WriteOption
+from google.cloud.firestore_v1.batch import WriteBatch
+from google.cloud.firestore_v1.client import Client
+from google.cloud.firestore_v1.collection import CollectionReference
+from google.cloud.firestore_v1.transforms import ArrayRemove
+from google.cloud.firestore_v1.transforms import ArrayUnion
+from google.cloud.firestore_v1.transforms import DELETE_FIELD
+from google.cloud.firestore_v1.transforms import Increment
+from google.cloud.firestore_v1.transforms import Maximum
+from google.cloud.firestore_v1.transforms import Minimum
+from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
+from google.cloud.firestore_v1.document import DocumentReference
+from google.cloud.firestore_v1.document import DocumentSnapshot
+from google.cloud.firestore_v1.gapic import enums
+from google.cloud.firestore_v1.query import Query
+from google.cloud.firestore_v1.transaction import Transaction
+from google.cloud.firestore_v1.transaction import transactional
+from google.cloud.firestore_v1.watch import Watch
+
+
+__all__ = [
+ "__version__",
+ "ArrayRemove",
+ "ArrayUnion",
+ "Client",
+ "CollectionReference",
+ "DELETE_FIELD",
+ "DocumentReference",
+ "DocumentSnapshot",
+ "enums",
+ "ExistsOption",
+ "GeoPoint",
+ "Increment",
+ "LastUpdateOption",
+ "Maximum",
+ "Minimum",
+ "Query",
+ "ReadAfterWriteError",
+ "SERVER_TIMESTAMP",
+ "Transaction",
+ "transactional",
+ "types",
+ "Watch",
+ "WriteBatch",
+ "WriteOption",
+]
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..afb4975cf
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..f9d556cf3
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/batch.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/batch.cpython-36.pyc
new file mode 100644
index 000000000..ac3cdb811
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/batch.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/client.cpython-36.pyc
new file mode 100644
index 000000000..c06ff4fb3
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/collection.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/collection.cpython-36.pyc
new file mode 100644
index 000000000..25f9492db
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/collection.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/document.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/document.cpython-36.pyc
new file mode 100644
index 000000000..acba2558c
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/document.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/field_path.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/field_path.cpython-36.pyc
new file mode 100644
index 000000000..20ab63b06
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/field_path.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/order.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/order.cpython-36.pyc
new file mode 100644
index 000000000..9b45c64c1
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/order.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/query.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/query.cpython-36.pyc
new file mode 100644
index 000000000..762213431
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/query.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/transaction.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/transaction.cpython-36.pyc
new file mode 100644
index 000000000..ae5a191da
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/transaction.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/transforms.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/transforms.cpython-36.pyc
new file mode 100644
index 000000000..8d1ef5c9a
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/transforms.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/types.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/types.cpython-36.pyc
new file mode 100644
index 000000000..5b0a48d66
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/types.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/watch.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/watch.cpython-36.pyc
new file mode 100644
index 000000000..48ba68e0c
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/__pycache__/watch.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/_helpers.py b/venv/Lib/site-packages/google/cloud/firestore_v1/_helpers.py
new file mode 100644
index 000000000..34e7c5bbf
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/_helpers.py
@@ -0,0 +1,1049 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common helpers shared across Google Cloud Firestore modules."""
+
+import datetime
+
+from google.protobuf import struct_pb2
+from google.type import latlng_pb2
+import grpc
+import six
+
+from google.cloud import exceptions
+from google.cloud._helpers import _datetime_to_pb_timestamp
+from google.api_core.datetime_helpers import DatetimeWithNanoseconds
+from google.cloud.firestore_v1 import transforms
+from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1.field_path import FieldPath
+from google.cloud.firestore_v1.field_path import parse_field_path
+from google.cloud.firestore_v1.gapic import enums
+from google.cloud.firestore_v1.proto import common_pb2
+from google.cloud.firestore_v1.proto import document_pb2
+from google.cloud.firestore_v1.proto import write_pb2
+
+
+BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}."
+DOCUMENT_PATH_DELIMITER = "/"
+INACTIVE_TXN = "Transaction not in progress, cannot be used in API requests."
+READ_AFTER_WRITE_ERROR = "Attempted read after write in a transaction."
+BAD_REFERENCE_ERROR = (
+ "Reference value {!r} in unexpected format, expected to be of the form "
+ "``projects/{{project}}/databases/{{database}}/"
+ "documents/{{document_path}}``."
+)
+WRONG_APP_REFERENCE = (
+ "Document {!r} does not correspond to the same database " "({!r}) as the client."
+)
+REQUEST_TIME_ENUM = enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME
+_GRPC_ERROR_MAPPING = {
+ grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict,
+ grpc.StatusCode.NOT_FOUND: exceptions.NotFound,
+}
+
+
+class GeoPoint(object):
+ """Simple container for a geo point value.
+
+ Args:
+ latitude (float): Latitude of a point.
+ longitude (float): Longitude of a point.
+ """
+
+ def __init__(self, latitude, longitude):
+ self.latitude = latitude
+ self.longitude = longitude
+
+ def to_protobuf(self):
+ """Convert the current object to protobuf.
+
+ Returns:
+ google.type.latlng_pb2.LatLng: The current point as a protobuf.
+ """
+ return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude)
+
+ def __eq__(self, other):
+ """Compare two geo points for equality.
+
+ Returns:
+ Union[bool, NotImplemented]: :data:`True` if the points compare
+ equal, else :data:`False`. (Or :data:`NotImplemented` if
+ ``other`` is not a geo point.)
+ """
+ if not isinstance(other, GeoPoint):
+ return NotImplemented
+
+ return self.latitude == other.latitude and self.longitude == other.longitude
+
+ def __ne__(self, other):
+ """Compare two geo points for inequality.
+
+ Returns:
+ Union[bool, NotImplemented]: :data:`False` if the points compare
+ equal, else :data:`True`. (Or :data:`NotImplemented` if
+ ``other`` is not a geo point.)
+ """
+ equality_val = self.__eq__(other)
+ if equality_val is NotImplemented:
+ return NotImplemented
+ else:
+ return not equality_val
+
+
+def verify_path(path, is_collection):
+ """Verifies that a ``path`` has the correct form.
+
+ Checks that all of the elements in ``path`` are strings.
+
+ Args:
+ path (Tuple[str, ...]): The components in a collection or
+ document path.
+ is_collection (bool): Indicates if the ``path`` represents
+ a document or a collection.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * ``is_collection=True`` and there are an even number of elements
+ * ``is_collection=False`` and there are an odd number of elements
+ * an element is not a string
+ """
+ num_elements = len(path)
+ if num_elements == 0:
+ raise ValueError("Document or collection path cannot be empty")
+
+ if is_collection:
+ if num_elements % 2 == 0:
+ raise ValueError("A collection must have an odd number of path elements")
+ else:
+ if num_elements % 2 == 1:
+ raise ValueError("A document must have an even number of path elements")
+
+ for element in path:
+ if not isinstance(element, six.string_types):
+ msg = BAD_PATH_TEMPLATE.format(element, type(element))
+ raise ValueError(msg)
+
+
+def encode_value(value):
+ """Converts a native Python value into a Firestore protobuf ``Value``.
+
+ Args:
+ value (Union[NoneType, bool, int, float, datetime.datetime, \
+ str, bytes, dict, ~google.cloud.Firestore.GeoPoint]): A native
+ Python value to convert to a protobuf field.
+
+ Returns:
+ ~google.cloud.firestore_v1.types.Value: A
+ value encoded as a Firestore protobuf.
+
+ Raises:
+ TypeError: If the ``value`` is not one of the accepted types.
+ """
+ if value is None:
+ return document_pb2.Value(null_value=struct_pb2.NULL_VALUE)
+
+ # Must come before six.integer_types since ``bool`` is an integer subtype.
+ if isinstance(value, bool):
+ return document_pb2.Value(boolean_value=value)
+
+ if isinstance(value, six.integer_types):
+ return document_pb2.Value(integer_value=value)
+
+ if isinstance(value, float):
+ return document_pb2.Value(double_value=value)
+
+ if isinstance(value, DatetimeWithNanoseconds):
+ return document_pb2.Value(timestamp_value=value.timestamp_pb())
+
+ if isinstance(value, datetime.datetime):
+ return document_pb2.Value(timestamp_value=_datetime_to_pb_timestamp(value))
+
+ if isinstance(value, six.text_type):
+ return document_pb2.Value(string_value=value)
+
+ if isinstance(value, six.binary_type):
+ return document_pb2.Value(bytes_value=value)
+
+ # NOTE: We avoid doing an isinstance() check for a Document
+ # here to avoid import cycles.
+ document_path = getattr(value, "_document_path", None)
+ if document_path is not None:
+ return document_pb2.Value(reference_value=document_path)
+
+ if isinstance(value, GeoPoint):
+ return document_pb2.Value(geo_point_value=value.to_protobuf())
+
+ if isinstance(value, (list, tuple, set, frozenset)):
+ value_list = tuple(encode_value(element) for element in value)
+ value_pb = document_pb2.ArrayValue(values=value_list)
+ return document_pb2.Value(array_value=value_pb)
+
+ if isinstance(value, dict):
+ value_dict = encode_dict(value)
+ value_pb = document_pb2.MapValue(fields=value_dict)
+ return document_pb2.Value(map_value=value_pb)
+
+ raise TypeError(
+ "Cannot convert to a Firestore Value", value, "Invalid type", type(value)
+ )
+
+
+def encode_dict(values_dict):
+ """Encode a dictionary into protobuf ``Value``-s.
+
+ Args:
+ values_dict (dict): The dictionary to encode as protobuf fields.
+
+ Returns:
+ Dict[str, ~google.cloud.firestore_v1.types.Value]: A
+ dictionary of string keys and ``Value`` protobufs as dictionary
+ values.
+ """
+ return {key: encode_value(value) for key, value in six.iteritems(values_dict)}
+
+
+def reference_value_to_document(reference_value, client):
+ """Convert a reference value string to a document.
+
+ Args:
+ reference_value (str): A document reference value.
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client that has a document factory.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`:
+ The document corresponding to ``reference_value``.
+
+ Raises:
+ ValueError: If the ``reference_value`` is not of the expected
+ format: ``projects/{project}/databases/{database}/documents/...``.
+ ValueError: If the ``reference_value`` does not come from the same
+ project / database combination as the ``client``.
+ """
+ # The first 5 parts are
+ # projects, {project}, databases, {database}, documents
+ parts = reference_value.split(DOCUMENT_PATH_DELIMITER, 5)
+ if len(parts) != 6:
+ msg = BAD_REFERENCE_ERROR.format(reference_value)
+ raise ValueError(msg)
+
+ # The sixth part is `a/b/c/d` (i.e. the document path)
+ document = client.document(parts[-1])
+ if document._document_path != reference_value:
+ msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string)
+ raise ValueError(msg)
+
+ return document
+
+
+def decode_value(value, client):
+ """Converts a Firestore protobuf ``Value`` to a native Python value.
+
+ Args:
+ value (google.cloud.firestore_v1.types.Value): A
+ Firestore protobuf to be decoded / parsed / converted.
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client that has a document factory.
+
+ Returns:
+ Union[NoneType, bool, int, float, datetime.datetime, \
+ str, bytes, dict, ~google.cloud.Firestore.GeoPoint]: A native
+ Python value converted from the ``value``.
+
+ Raises:
+ NotImplementedError: If the ``value_type`` is ``reference_value``.
+ ValueError: If the ``value_type`` is unknown.
+ """
+ value_type = value.WhichOneof("value_type")
+
+ if value_type == "null_value":
+ return None
+ elif value_type == "boolean_value":
+ return value.boolean_value
+ elif value_type == "integer_value":
+ return value.integer_value
+ elif value_type == "double_value":
+ return value.double_value
+ elif value_type == "timestamp_value":
+ return DatetimeWithNanoseconds.from_timestamp_pb(value.timestamp_value)
+ elif value_type == "string_value":
+ return value.string_value
+ elif value_type == "bytes_value":
+ return value.bytes_value
+ elif value_type == "reference_value":
+ return reference_value_to_document(value.reference_value, client)
+ elif value_type == "geo_point_value":
+ return GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude)
+ elif value_type == "array_value":
+ return [decode_value(element, client) for element in value.array_value.values]
+ elif value_type == "map_value":
+ return decode_dict(value.map_value.fields, client)
+ else:
+ raise ValueError("Unknown ``value_type``", value_type)
+
+
+def decode_dict(value_fields, client):
+ """Converts a protobuf map of Firestore ``Value``-s.
+
+ Args:
+ value_fields (google.protobuf.pyext._message.MessageMapContainer): A
+ protobuf map of Firestore ``Value``-s.
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client that has a document factory.
+
+ Returns:
+ Dict[str, Union[NoneType, bool, int, float, datetime.datetime, \
+ str, bytes, dict, ~google.cloud.Firestore.GeoPoint]]: A dictionary
+ of native Python values converted from the ``value_fields``.
+ """
+ return {
+ key: decode_value(value, client) for key, value in six.iteritems(value_fields)
+ }
+
+
+def get_doc_id(document_pb, expected_prefix):
+ """Parse a document ID from a document protobuf.
+
+ Args:
+ document_pb (google.cloud.proto.firestore.v1.\
+ document_pb2.Document): A protobuf for a document that
+ was created in a ``CreateDocument`` RPC.
+ expected_prefix (str): The expected collection prefix for the
+ fully-qualified document name.
+
+ Returns:
+ str: The document ID from the protobuf.
+
+ Raises:
+ ValueError: If the name does not begin with the prefix.
+ """
+ prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1)
+ if prefix != expected_prefix:
+ raise ValueError(
+ "Unexpected document name",
+ document_pb.name,
+ "Expected to begin with",
+ expected_prefix,
+ )
+
+ return document_id
+
+
+_EmptyDict = transforms.Sentinel("Marker for an empty dict value")
+
+
+def extract_fields(document_data, prefix_path, expand_dots=False):
+ """Do depth-first walk of tree, yielding field_path, value"""
+ if not document_data:
+ yield prefix_path, _EmptyDict
+ else:
+ for key, value in sorted(six.iteritems(document_data)):
+
+ if expand_dots:
+ sub_key = FieldPath.from_string(key)
+ else:
+ sub_key = FieldPath(key)
+
+ field_path = FieldPath(*(prefix_path.parts + sub_key.parts))
+
+ if isinstance(value, dict):
+ for s_path, s_value in extract_fields(value, field_path):
+ yield s_path, s_value
+ else:
+ yield field_path, value
+
+
+def set_field_value(document_data, field_path, value):
+ """Set a value into a document for a field_path"""
+ current = document_data
+ for element in field_path.parts[:-1]:
+ current = current.setdefault(element, {})
+ if value is _EmptyDict:
+ value = {}
+ current[field_path.parts[-1]] = value
+
+
+def get_field_value(document_data, field_path):
+ if not field_path.parts:
+ raise ValueError("Empty path")
+
+ current = document_data
+ for element in field_path.parts[:-1]:
+ current = current[element]
+ return current[field_path.parts[-1]]
+
+
+class DocumentExtractor(object):
+ """ Break document data up into actual data and transforms.
+
+ Handle special values such as ``DELETE_FIELD``, ``SERVER_TIMESTAMP``.
+
+ Args:
+ document_data (dict):
+ Property names and values to use for sending a change to
+ a document.
+ """
+
+ def __init__(self, document_data):
+ self.document_data = document_data
+ self.field_paths = []
+ self.deleted_fields = []
+ self.server_timestamps = []
+ self.array_removes = {}
+ self.array_unions = {}
+ self.increments = {}
+ self.minimums = {}
+ self.maximums = {}
+ self.set_fields = {}
+ self.empty_document = False
+
+ prefix_path = FieldPath()
+ iterator = self._get_document_iterator(prefix_path)
+
+ for field_path, value in iterator:
+
+ if field_path == prefix_path and value is _EmptyDict:
+ self.empty_document = True
+
+ elif value is transforms.DELETE_FIELD:
+ self.deleted_fields.append(field_path)
+
+ elif value is transforms.SERVER_TIMESTAMP:
+ self.server_timestamps.append(field_path)
+
+ elif isinstance(value, transforms.ArrayRemove):
+ self.array_removes[field_path] = value.values
+
+ elif isinstance(value, transforms.ArrayUnion):
+ self.array_unions[field_path] = value.values
+
+ elif isinstance(value, transforms.Increment):
+ self.increments[field_path] = value.value
+
+ elif isinstance(value, transforms.Maximum):
+ self.maximums[field_path] = value.value
+
+ elif isinstance(value, transforms.Minimum):
+ self.minimums[field_path] = value.value
+
+ else:
+ self.field_paths.append(field_path)
+ set_field_value(self.set_fields, field_path, value)
+
+ def _get_document_iterator(self, prefix_path):
+ return extract_fields(self.document_data, prefix_path)
+
+ @property
+ def has_transforms(self):
+ return bool(
+ self.server_timestamps
+ or self.array_removes
+ or self.array_unions
+ or self.increments
+ or self.maximums
+ or self.minimums
+ )
+
+ @property
+ def transform_paths(self):
+ return sorted(
+ self.server_timestamps
+ + list(self.array_removes)
+ + list(self.array_unions)
+ + list(self.increments)
+ + list(self.maximums)
+ + list(self.minimums)
+ )
+
+ def _get_update_mask(self, allow_empty_mask=False):
+ return None
+
+ def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):
+
+ if exists is not None:
+ current_document = common_pb2.Precondition(exists=exists)
+ else:
+ current_document = None
+
+ update_pb = write_pb2.Write(
+ update=document_pb2.Document(
+ name=document_path, fields=encode_dict(self.set_fields)
+ ),
+ update_mask=self._get_update_mask(allow_empty_mask),
+ current_document=current_document,
+ )
+
+ return update_pb
+
+ def get_transform_pb(self, document_path, exists=None):
+ def make_array_value(values):
+ value_list = [encode_value(element) for element in values]
+ return document_pb2.ArrayValue(values=value_list)
+
+ path_field_transforms = (
+ [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(),
+ set_to_server_value=REQUEST_TIME_ENUM,
+ ),
+ )
+ for path in self.server_timestamps
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(),
+ remove_all_from_array=make_array_value(values),
+ ),
+ )
+ for path, values in self.array_removes.items()
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(),
+ append_missing_elements=make_array_value(values),
+ ),
+ )
+ for path, values in self.array_unions.items()
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(), increment=encode_value(value)
+ ),
+ )
+ for path, value in self.increments.items()
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(), maximum=encode_value(value)
+ ),
+ )
+ for path, value in self.maximums.items()
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(), minimum=encode_value(value)
+ ),
+ )
+ for path, value in self.minimums.items()
+ ]
+ )
+ field_transforms = [
+ transform for path, transform in sorted(path_field_transforms)
+ ]
+ transform_pb = write_pb2.Write(
+ transform=write_pb2.DocumentTransform(
+ document=document_path, field_transforms=field_transforms
+ )
+ )
+ if exists is not None:
+ transform_pb.current_document.CopyFrom(
+ common_pb2.Precondition(exists=exists)
+ )
+
+ return transform_pb
+
+
+def pbs_for_create(document_path, document_data):
+ """Make ``Write`` protobufs for ``create()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ document_data (dict): Property names and values to use for
+ creating a document.
+
+ Returns:
+ List[google.cloud.firestore_v1.types.Write]: One or two
+ ``Write`` protobuf instances for ``create()``.
+ """
+ extractor = DocumentExtractor(document_data)
+
+ if extractor.deleted_fields:
+ raise ValueError("Cannot apply DELETE_FIELD in a create request.")
+
+ write_pbs = []
+
+ # Conformance tests require skipping the 'update_pb' if the document
+ # contains only transforms.
+ if extractor.empty_document or extractor.set_fields:
+ write_pbs.append(extractor.get_update_pb(document_path, exists=False))
+
+ if extractor.has_transforms:
+ exists = None if write_pbs else False
+ transform_pb = extractor.get_transform_pb(document_path, exists)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+def pbs_for_set_no_merge(document_path, document_data):
+ """Make ``Write`` protobufs for ``set()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ document_data (dict): Property names and values to use for
+ replacing a document.
+
+ Returns:
+ List[google.cloud.firestore_v1.types.Write]: One
+ or two ``Write`` protobuf instances for ``set()``.
+ """
+ extractor = DocumentExtractor(document_data)
+
+ if extractor.deleted_fields:
+ raise ValueError(
+ "Cannot apply DELETE_FIELD in a set request without "
+ "specifying 'merge=True' or 'merge=[field_paths]'."
+ )
+
+ # Conformance tests require send the 'update_pb' even if the document
+ # contains only transforms.
+ write_pbs = [extractor.get_update_pb(document_path)]
+
+ if extractor.has_transforms:
+ transform_pb = extractor.get_transform_pb(document_path)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+class DocumentExtractorForMerge(DocumentExtractor):
+ """ Break document data up into actual data and transforms.
+ """
+
+ def __init__(self, document_data):
+ super(DocumentExtractorForMerge, self).__init__(document_data)
+ self.data_merge = []
+ self.transform_merge = []
+ self.merge = []
+
+ @property
+ def has_updates(self):
+ # for whatever reason, the conformance tests want to see the parent
+ # of nested transform paths in the update mask
+ # (see set-st-merge-nonleaf-alone.textproto)
+ update_paths = set(self.data_merge)
+
+ for transform_path in self.transform_paths:
+ if len(transform_path.parts) > 1:
+ parent_fp = FieldPath(*transform_path.parts[:-1])
+ update_paths.add(parent_fp)
+
+ return bool(update_paths)
+
+ def _apply_merge_all(self):
+ self.data_merge = sorted(self.field_paths + self.deleted_fields)
+ # TODO: other transforms
+ self.transform_merge = self.transform_paths
+ self.merge = sorted(self.data_merge + self.transform_paths)
+
+ def _construct_merge_paths(self, merge):
+ for merge_field in merge:
+ if isinstance(merge_field, FieldPath):
+ yield merge_field
+ else:
+ yield FieldPath(*parse_field_path(merge_field))
+
+ def _normalize_merge_paths(self, merge):
+ merge_paths = sorted(self._construct_merge_paths(merge))
+
+ # Raise if any merge path is a parent of another. Leverage sorting
+ # to avoid quadratic behavior.
+ for index in range(len(merge_paths) - 1):
+ lhs, rhs = merge_paths[index], merge_paths[index + 1]
+ if lhs.eq_or_parent(rhs):
+ raise ValueError("Merge paths overlap: {}, {}".format(lhs, rhs))
+
+ for merge_path in merge_paths:
+ if merge_path in self.deleted_fields:
+ continue
+ try:
+ get_field_value(self.document_data, merge_path)
+ except KeyError:
+ raise ValueError("Invalid merge path: {}".format(merge_path))
+
+ return merge_paths
+
+ def _apply_merge_paths(self, merge):
+
+ if self.empty_document:
+ raise ValueError("Cannot merge specific fields with empty document.")
+
+ merge_paths = self._normalize_merge_paths(merge)
+
+ del self.data_merge[:]
+ del self.transform_merge[:]
+ self.merge = merge_paths
+
+ for merge_path in merge_paths:
+
+ if merge_path in self.transform_paths:
+ self.transform_merge.append(merge_path)
+
+ for field_path in self.field_paths:
+ if merge_path.eq_or_parent(field_path):
+ self.data_merge.append(field_path)
+
+ # Clear out data for fields not merged.
+ merged_set_fields = {}
+ for field_path in self.data_merge:
+ value = get_field_value(self.document_data, field_path)
+ set_field_value(merged_set_fields, field_path, value)
+ self.set_fields = merged_set_fields
+
+ unmerged_deleted_fields = [
+ field_path
+ for field_path in self.deleted_fields
+ if field_path not in self.merge
+ ]
+ if unmerged_deleted_fields:
+ raise ValueError(
+ "Cannot delete unmerged fields: {}".format(unmerged_deleted_fields)
+ )
+ self.data_merge = sorted(self.data_merge + self.deleted_fields)
+
+ # Keep only transforms which are within merge.
+ merged_transform_paths = set()
+ for merge_path in self.merge:
+ tranform_merge_paths = [
+ transform_path
+ for transform_path in self.transform_paths
+ if merge_path.eq_or_parent(transform_path)
+ ]
+ merged_transform_paths.update(tranform_merge_paths)
+
+ self.server_timestamps = [
+ path for path in self.server_timestamps if path in merged_transform_paths
+ ]
+
+ self.array_removes = {
+ path: values
+ for path, values in self.array_removes.items()
+ if path in merged_transform_paths
+ }
+
+ self.array_unions = {
+ path: values
+ for path, values in self.array_unions.items()
+ if path in merged_transform_paths
+ }
+
+ def apply_merge(self, merge):
+ if merge is True: # merge all fields
+ self._apply_merge_all()
+ else:
+ self._apply_merge_paths(merge)
+
+ def _get_update_mask(self, allow_empty_mask=False):
+ # Mask uses dotted / quoted paths.
+ mask_paths = [
+ field_path.to_api_repr()
+ for field_path in self.merge
+ if field_path not in self.transform_merge
+ ]
+
+ if mask_paths or allow_empty_mask:
+ return common_pb2.DocumentMask(field_paths=mask_paths)
+
+
+def pbs_for_set_with_merge(document_path, document_data, merge):
+ """Make ``Write`` protobufs for ``set()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ document_data (dict): Property names and values to use for
+ replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, merge all fields; else, merge only the named fields.
+
+ Returns:
+ List[google.cloud.firestore_v1.types.Write]: One
+ or two ``Write`` protobuf instances for ``set()``.
+ """
+ extractor = DocumentExtractorForMerge(document_data)
+ extractor.apply_merge(merge)
+
+ merge_empty = not document_data
+
+ write_pbs = []
+
+ if extractor.has_updates or merge_empty:
+ write_pbs.append(
+ extractor.get_update_pb(document_path, allow_empty_mask=merge_empty)
+ )
+
+ if extractor.transform_paths:
+ transform_pb = extractor.get_transform_pb(document_path)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+class DocumentExtractorForUpdate(DocumentExtractor):
+ """ Break document data up into actual data and transforms.
+ """
+
+ def __init__(self, document_data):
+ super(DocumentExtractorForUpdate, self).__init__(document_data)
+ self.top_level_paths = sorted(
+ [FieldPath.from_string(key) for key in document_data]
+ )
+ tops = set(self.top_level_paths)
+ for top_level_path in self.top_level_paths:
+ for ancestor in top_level_path.lineage():
+ if ancestor in tops:
+ raise ValueError(
+ "Conflicting field path: {}, {}".format(
+ top_level_path, ancestor
+ )
+ )
+
+ for field_path in self.deleted_fields:
+ if field_path not in tops:
+ raise ValueError(
+ "Cannot update with nest delete: {}".format(field_path)
+ )
+
+ def _get_document_iterator(self, prefix_path):
+ return extract_fields(self.document_data, prefix_path, expand_dots=True)
+
+ def _get_update_mask(self, allow_empty_mask=False):
+ mask_paths = []
+ for field_path in self.top_level_paths:
+ if field_path not in self.transform_paths:
+ mask_paths.append(field_path.to_api_repr())
+
+ return common_pb2.DocumentMask(field_paths=mask_paths)
+
+
+def pbs_for_update(document_path, field_updates, option):
+ """Make ``Write`` protobufs for ``update()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ field_updates (dict): Field names or paths to update and values
+ to update with.
+ option (optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ List[google.cloud.firestore_v1.types.Write]: One
+ or two ``Write`` protobuf instances for ``update()``.
+ """
+ extractor = DocumentExtractorForUpdate(field_updates)
+
+ if extractor.empty_document:
+ raise ValueError("Cannot update with an empty document.")
+
+ if option is None: # Default is to use ``exists=True``.
+ option = ExistsOption(exists=True)
+
+ write_pbs = []
+
+ if extractor.field_paths or extractor.deleted_fields:
+ update_pb = extractor.get_update_pb(document_path)
+ option.modify_write(update_pb)
+ write_pbs.append(update_pb)
+
+ if extractor.has_transforms:
+ transform_pb = extractor.get_transform_pb(document_path)
+ if not write_pbs:
+ # NOTE: set the write option on the ``transform_pb`` only if there
+ # is no ``update_pb``
+ option.modify_write(transform_pb)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+def pb_for_delete(document_path, option):
+ """Make a ``Write`` protobuf for ``delete()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ option (optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ google.cloud.firestore_v1.types.Write: A
+ ``Write`` protobuf instance for the ``delete()``.
+ """
+ write_pb = write_pb2.Write(delete=document_path)
+ if option is not None:
+ option.modify_write(write_pb)
+
+ return write_pb
+
+
+class ReadAfterWriteError(Exception):
+ """Raised when a read is attempted after a write.
+
+ Raised by "read" methods that use transactions.
+ """
+
+
+def get_transaction_id(transaction, read_operation=True):
+ """Get the transaction ID from a ``Transaction`` object.
+
+ Args:
+ transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.\
+ Transaction`]):
+ An existing transaction that this query will run in.
+ read_operation (Optional[bool]): Indicates if the transaction ID
+ will be used in a read operation. Defaults to :data:`True`.
+
+ Returns:
+ Optional[bytes]: The ID of the transaction, or :data:`None` if the
+ ``transaction`` is :data:`None`.
+
+ Raises:
+ ValueError: If the ``transaction`` is not in progress (only if
+ ``transaction`` is not :data:`None`).
+ ReadAfterWriteError: If the ``transaction`` has writes stored on
+ it and ``read_operation`` is :data:`True`.
+ """
+ if transaction is None:
+ return None
+ else:
+ if not transaction.in_progress:
+ raise ValueError(INACTIVE_TXN)
+ if read_operation and len(transaction._write_pbs) > 0:
+ raise ReadAfterWriteError(READ_AFTER_WRITE_ERROR)
+ return transaction.id
+
+
+def metadata_with_prefix(prefix, **kw):
+ """Create RPC metadata containing a prefix.
+
+ Args:
+ prefix (str): appropriate resource path.
+
+ Returns:
+ List[Tuple[str, str]]: RPC metadata with supplied prefix
+ """
+ return [("google-cloud-resource-prefix", prefix)]
+
+
+class WriteOption(object):
+ """Option used to assert a condition on a write operation."""
+
+ def modify_write(self, write_pb, no_create_msg=None):
+ """Modify a ``Write`` protobuf based on the state of this write option.
+
+ This is a virtual method intended to be implemented by subclasses.
+
+ Args:
+ write_pb (google.cloud.firestore_v1.types.Write): A
+ ``Write`` protobuf instance to be modified with a precondition
+ determined by the state of this option.
+ no_create_msg (Optional[str]): A message to use to indicate that
+ a create operation is not allowed.
+
+ Raises:
+ NotImplementedError: Always, this method is virtual.
+ """
+ raise NotImplementedError
+
+
+class LastUpdateOption(WriteOption):
+ """Option used to assert a "last update" condition on a write operation.
+
+ This will typically be created by
+ :meth:`~google.cloud.firestore_v1.client.Client.write_option`.
+
+ Args:
+ last_update_time (google.protobuf.timestamp_pb2.Timestamp): A
+ timestamp. When set, the target document must exist and have
+ been last updated at that time. Protobuf ``update_time`` timestamps
+ are typically returned from methods that perform write operations
+ as part of a "write result" protobuf or directly.
+ """
+
+ def __init__(self, last_update_time):
+ self._last_update_time = last_update_time
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._last_update_time == other._last_update_time
+
+ def modify_write(self, write_pb, **unused_kwargs):
+ """Modify a ``Write`` protobuf based on the state of this write option.
+
+ The ``last_update_time`` is added to ``write_pb`` as an "update time"
+ precondition. When set, the target document must exist and have been
+ last updated at that time.
+
+ Args:
+ write_pb (google.cloud.firestore_v1.types.Write): A
+ ``Write`` protobuf instance to be modified with a precondition
+ determined by the state of this option.
+ unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
+ other subclasses that are unused here.
+ """
+ current_doc = types.Precondition(update_time=self._last_update_time)
+ write_pb.current_document.CopyFrom(current_doc)
+
+
+class ExistsOption(WriteOption):
+ """Option used to assert existence on a write operation.
+
+ This will typically be created by
+ :meth:`~google.cloud.firestore_v1.client.Client.write_option`.
+
+ Args:
+ exists (bool): Indicates if the document being modified
+ should already exist.
+ """
+
+ def __init__(self, exists):
+ self._exists = exists
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._exists == other._exists
+
+ def modify_write(self, write_pb, **unused_kwargs):
+ """Modify a ``Write`` protobuf based on the state of this write option.
+
+ If:
+
+ * ``exists=True``, adds a precondition that requires existence
+ * ``exists=False``, adds a precondition that requires non-existence
+
+ Args:
+ write_pb (google.cloud.firestore_v1.types.Write): A
+ ``Write`` protobuf instance to be modified with a precondition
+ determined by the state of this option.
+ unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
+ other subclasses that are unused here.
+ """
+ current_doc = types.Precondition(exists=self._exists)
+ write_pb.current_document.CopyFrom(current_doc)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/batch.py b/venv/Lib/site-packages/google/cloud/firestore_v1/batch.py
new file mode 100644
index 000000000..56483af10
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/batch.py
@@ -0,0 +1,160 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for batch requests to the Google Cloud Firestore API."""
+
+
+from google.cloud.firestore_v1 import _helpers
+
+
+class WriteBatch(object):
+ """Accumulate write operations to be sent in a batch.
+
+ This has the same set of methods for write operations that
+ :class:`~google.cloud.firestore_v1.document.DocumentReference` does,
+ e.g. :meth:`~google.cloud.firestore_v1.document.DocumentReference.create`.
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ The client that created this batch.
+ """
+
+ def __init__(self, client):
+ self._client = client
+ self._write_pbs = []
+ self.write_results = None
+ self.commit_time = None
+
+ def _add_write_pbs(self, write_pbs):
+ """Add `Write`` protobufs to this transaction.
+
+ This method intended to be over-ridden by subclasses.
+
+ Args:
+ write_pbs (List[google.cloud.proto.firestore.v1.\
+ write_pb2.Write]): A list of write protobufs to be added.
+ """
+ self._write_pbs.extend(write_pbs)
+
+ def create(self, reference, document_data):
+ """Add a "change" to this batch to create a document.
+
+ If the document given by ``reference`` already exists, then this
+ batch will fail when :meth:`commit`-ed.
+
+ Args:
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference to be created in this batch.
+ document_data (dict): Property names and values to use for
+ creating a document.
+ """
+ write_pbs = _helpers.pbs_for_create(reference._document_path, document_data)
+ self._add_write_pbs(write_pbs)
+
+ def set(self, reference, document_data, merge=False):
+ """Add a "change" to replace a document.
+
+ See
+ :meth:`google.cloud.firestore_v1.document.DocumentReference.set` for
+ more information on how ``option`` determines how the change is
+ applied.
+
+ Args:
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference that will have values set in this batch.
+ document_data (dict):
+ Property names and values to use for replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, apply merging instead of overwriting the state
+ of the document.
+ """
+ if merge is not False:
+ write_pbs = _helpers.pbs_for_set_with_merge(
+ reference._document_path, document_data, merge
+ )
+ else:
+ write_pbs = _helpers.pbs_for_set_no_merge(
+ reference._document_path, document_data
+ )
+
+ self._add_write_pbs(write_pbs)
+
+ def update(self, reference, field_updates, option=None):
+ """Add a "change" to update a document.
+
+ See
+ :meth:`google.cloud.firestore_v1.document.DocumentReference.update`
+ for more information on ``field_updates`` and ``option``.
+
+ Args:
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference that will be updated in this batch.
+ field_updates (dict):
+ Field names or paths to update and values to update with.
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+ """
+ if option.__class__.__name__ == "ExistsOption":
+ raise ValueError("you must not pass an explicit write option to " "update.")
+ write_pbs = _helpers.pbs_for_update(
+ reference._document_path, field_updates, option
+ )
+ self._add_write_pbs(write_pbs)
+
+ def delete(self, reference, option=None):
+ """Add a "change" to delete a document.
+
+ See
+ :meth:`google.cloud.firestore_v1.document.DocumentReference.delete`
+ for more information on how ``option`` determines how the change is
+ applied.
+
+ Args:
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference that will be deleted in this batch.
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+ """
+ write_pb = _helpers.pb_for_delete(reference._document_path, option)
+ self._add_write_pbs([write_pb])
+
+ def commit(self):
+ """Commit the changes accumulated in this batch.
+
+ Returns:
+ List[:class:`google.cloud.proto.firestore.v1.write_pb2.WriteResult`, ...]:
+ The write results corresponding to the changes committed, returned
+ in the same order as the changes were applied to this batch. A
+ write result contains an ``update_time`` field.
+ """
+ commit_response = self._client._firestore_api.commit(
+ self._client._database_string,
+ self._write_pbs,
+ transaction=None,
+ metadata=self._client._rpc_metadata,
+ )
+
+ self._write_pbs = []
+ self.write_results = results = list(commit_response.write_results)
+ self.commit_time = commit_response.commit_time
+ return results
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if exc_type is None:
+ self.commit()
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/client.py b/venv/Lib/site-packages/google/cloud/firestore_v1/client.py
new file mode 100644
index 000000000..6d0bea49c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/client.py
@@ -0,0 +1,622 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Firestore API.
+
+This is the base from which all interactions with the API occur.
+
+In the hierarchy of API concepts
+
+* a :class:`~google.cloud.firestore_v1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`
+* a :class:`~google.cloud.firestore_v1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`
+"""
+import os
+
+import google.api_core.client_options
+from google.api_core.gapic_v1 import client_info
+from google.cloud.client import ClientWithProject
+
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import __version__
+from google.cloud.firestore_v1 import query
+from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1.batch import WriteBatch
+from google.cloud.firestore_v1.collection import CollectionReference
+from google.cloud.firestore_v1.document import DocumentReference
+from google.cloud.firestore_v1.document import DocumentSnapshot
+from google.cloud.firestore_v1.field_path import render_field_path
+from google.cloud.firestore_v1.gapic import firestore_client
+from google.cloud.firestore_v1.gapic.transports import firestore_grpc_transport
+from google.cloud.firestore_v1.transaction import Transaction
+
+
+DEFAULT_DATABASE = "(default)"
+"""str: The default database used in a :class:`~google.cloud.firestore_v1.client.Client`."""
+_BAD_OPTION_ERR = (
+ "Exactly one of ``last_update_time`` or ``exists`` " "must be provided."
+)
+_BAD_DOC_TEMPLATE = (
+ "Document {!r} appeared in response but was not present among references"
+)
+_ACTIVE_TXN = "There is already an active transaction."
+_INACTIVE_TXN = "There is no active transaction."
+_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__)
+_FIRESTORE_EMULATOR_HOST = "FIRESTORE_EMULATOR_HOST"
+
+
+class Client(ClientWithProject):
+ """Client for interacting with Google Cloud Firestore API.
+
+ .. note::
+
+ Since the Cloud Firestore API requires the gRPC transport, no
+ ``_http`` argument is accepted by this class.
+
+ Args:
+ project (Optional[str]): The project which the client acts on behalf
+ of. If not passed, falls back to the default inferred
+ from the environment.
+ credentials (Optional[~google.auth.credentials.Credentials]): The
+ OAuth2 Credentials to use for this client. If not passed, falls
+ back to the default inferred from the environment.
+ database (Optional[str]): The database name that the client targets.
+ For now, :attr:`DEFAULT_DATABASE` (the default value) is the
+ only valid database.
+ client_info (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
+ The client info used to send a user-agent string along with API
+ requests. If ``None``, then default info will be used. Generally,
+ you only need to set this if you're developing your own library
+ or partner tool.
+ client_options (Union[dict, google.api_core.client_options.ClientOptions]):
+ Client options used to set user options on the client. API Endpoint
+ should be set through client_options.
+ """
+
+ SCOPE = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+ """The scopes required for authenticating with the Firestore service."""
+
+ _firestore_api_internal = None
+ _database_string_internal = None
+ _rpc_metadata_internal = None
+
+ def __init__(
+ self,
+ project=None,
+ credentials=None,
+ database=DEFAULT_DATABASE,
+ client_info=_CLIENT_INFO,
+ client_options=None,
+ ):
+ # NOTE: This API has no use for the _http argument, but sending it
+ # will have no impact since the _http() @property only lazily
+ # creates a working HTTP object.
+ super(Client, self).__init__(
+ project=project,
+ credentials=credentials,
+ client_options=client_options,
+ _http=None,
+ )
+ self._client_info = client_info
+ if client_options:
+ if type(client_options) == dict:
+ client_options = google.api_core.client_options.from_dict(
+ client_options
+ )
+ self._client_options = client_options
+
+ self._database = database
+ self._emulator_host = os.getenv(_FIRESTORE_EMULATOR_HOST)
+
+ @property
+ def _firestore_api(self):
+ """Lazy-loading getter GAPIC Firestore API.
+
+ Returns:
+ :class:`~google.cloud.gapic.firestore.v1`.firestore_client.FirestoreClient:
+ >> client.collection('top')
+
+ For a sub-collection:
+
+ .. code-block:: python
+
+ >>> client.collection('mydocs/doc/subcol')
+ >>> # is the same as
+ >>> client.collection('mydocs', 'doc', 'subcol')
+
+ Sub-collections can be nested deeper in a similar fashion.
+
+ Args:
+ collection_path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path to a collection
+ * A tuple of collection path segments
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
+ A reference to a collection in the Firestore database.
+ """
+ if len(collection_path) == 1:
+ path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
+ else:
+ path = collection_path
+
+ return CollectionReference(*path, client=self)
+
+ def collection_group(self, collection_id):
+ """
+ Creates and returns a new Query that includes all documents in the
+ database that are contained in a collection or subcollection with the
+ given collection_id.
+
+ .. code-block:: python
+
+ >>> query = client.collection_group('mygroup')
+
+ @param {string} collectionId Identifies the collections to query over.
+ Every collection or subcollection with this ID as the last segment of its
+ path will be included. Cannot contain a slash.
+ @returns {Query} The created Query.
+ """
+ if "/" in collection_id:
+ raise ValueError(
+ "Invalid collection_id "
+ + collection_id
+ + ". Collection IDs must not contain '/'."
+ )
+
+ collection = self.collection(collection_id)
+ return query.Query(collection, all_descendants=True)
+
+ def document(self, *document_path):
+ """Get a reference to a document in a collection.
+
+ For a top-level document:
+
+ .. code-block:: python
+
+ >>> client.document('collek/shun')
+ >>> # is the same as
+ >>> client.document('collek', 'shun')
+
+ For a document in a sub-collection:
+
+ .. code-block:: python
+
+ >>> client.document('mydocs/doc/subcol/child')
+ >>> # is the same as
+ >>> client.document('mydocs', 'doc', 'subcol', 'child')
+
+ Documents in sub-collections can be nested deeper in a similar fashion.
+
+ Args:
+ document_path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path to a document
+ * A tuple of document path segments
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`:
+ A reference to a document in a collection.
+ """
+ if len(document_path) == 1:
+ path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
+ else:
+ path = document_path
+
+ # DocumentReference takes a relative path. Strip the database string if present.
+ base_path = self._database_string + "/documents/"
+ joined_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path)
+ if joined_path.startswith(base_path):
+ joined_path = joined_path[len(base_path) :]
+ path = joined_path.split(_helpers.DOCUMENT_PATH_DELIMITER)
+
+ return DocumentReference(*path, client=self)
+
+ @staticmethod
+ def field_path(*field_names):
+ """Create a **field path** from a list of nested field names.
+
+ A **field path** is a ``.``-delimited concatenation of the field
+ names. It is used to represent a nested field. For example,
+ in the data
+
+ .. code-block:: python
+
+ data = {
+ 'aa': {
+ 'bb': {
+ 'cc': 10,
+ },
+ },
+ }
+
+ the field path ``'aa.bb.cc'`` represents the data stored in
+ ``data['aa']['bb']['cc']``.
+
+ Args:
+ field_names (Tuple[str, ...]): The list of field names.
+
+ Returns:
+ str: The ``.``-delimited field path.
+ """
+ return render_field_path(field_names)
+
+ @staticmethod
+ def write_option(**kwargs):
+ """Create a write option for write operations.
+
+ Write operations include :meth:`~google.cloud.DocumentReference.set`,
+ :meth:`~google.cloud.DocumentReference.update` and
+ :meth:`~google.cloud.DocumentReference.delete`.
+
+ One of the following keyword arguments must be provided:
+
+ * ``last_update_time`` (:class:`google.protobuf.timestamp_pb2.\
+ Timestamp`): A timestamp. When set, the target document must
+ exist and have been last updated at that time. Protobuf
+ ``update_time`` timestamps are typically returned from methods
+ that perform write operations as part of a "write result"
+ protobuf or directly.
+ * ``exists`` (:class:`bool`): Indicates if the document being modified
+ should already exist.
+
+ Providing no argument would make the option have no effect (so
+ it is not allowed). Providing multiple would be an apparent
+ contradiction, since ``last_update_time`` assumes that the
+ document **was** updated (it can't have been updated if it
+ doesn't exist) and ``exists`` indicate that it is unknown if the
+ document exists or not.
+
+ Args:
+ kwargs (Dict[str, Any]): The keyword arguments described above.
+
+ Raises:
+ TypeError: If anything other than exactly one argument is
+ provided by the caller.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.client.WriteOption`:
+ The option to be used to configure a write message.
+ """
+ if len(kwargs) != 1:
+ raise TypeError(_BAD_OPTION_ERR)
+
+ name, value = kwargs.popitem()
+ if name == "last_update_time":
+ return _helpers.LastUpdateOption(value)
+ elif name == "exists":
+ return _helpers.ExistsOption(value)
+ else:
+ extra = "{!r} was provided".format(name)
+ raise TypeError(_BAD_OPTION_ERR, extra)
+
+ def get_all(self, references, field_paths=None, transaction=None):
+ """Retrieve a batch of documents.
+
+ .. note::
+
+ Documents returned by this method are not guaranteed to be
+ returned in the same order that they are given in ``references``.
+
+ .. note::
+
+ If multiple ``references`` refer to the same document, the server
+ will only return one result.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ references (List[.DocumentReference, ...]): Iterable of document
+ references to be retrieved.
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results. If
+ no value is provided, all fields will be returned.
+ transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that these ``references`` will be
+ retrieved in.
+
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ document_paths, reference_map = _reference_info(references)
+ mask = _get_doc_mask(field_paths)
+ response_iterator = self._firestore_api.batch_get_documents(
+ self._database_string,
+ document_paths,
+ mask,
+ transaction=_helpers.get_transaction_id(transaction),
+ metadata=self._rpc_metadata,
+ )
+
+ for get_doc_response in response_iterator:
+ yield _parse_batch_get(get_doc_response, reference_map, self)
+
+ def collections(self):
+ """List top-level collections of the client's database.
+
+ Returns:
+ Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
+ iterator of subcollections of the current document.
+ """
+ iterator = self._firestore_api.list_collection_ids(
+ "{}/documents".format(self._database_string), metadata=self._rpc_metadata
+ )
+ iterator.client = self
+ iterator.item_to_value = _item_to_collection_ref
+ return iterator
+
+ def batch(self):
+ """Get a batch instance from this client.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.batch.WriteBatch`:
+ A "write" batch to be used for accumulating document changes and
+ sending the changes all at once.
+ """
+ return WriteBatch(self)
+
+ def transaction(self, **kwargs):
+ """Get a transaction that uses this client.
+
+ See :class:`~google.cloud.firestore_v1.transaction.Transaction` for
+ more information on transactions and the constructor arguments.
+
+ Args:
+ kwargs (Dict[str, Any]): The keyword arguments (other than
+ ``client``) to pass along to the
+ :class:`~google.cloud.firestore_v1.transaction.Transaction`
+ constructor.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.transaction.Transaction`:
+ A transaction attached to this client.
+ """
+ return Transaction(self, **kwargs)
+
+
+def _reference_info(references):
+ """Get information about document references.
+
+ Helper for :meth:`~google.cloud.firestore_v1.client.Client.get_all`.
+
+ Args:
+ references (List[.DocumentReference, ...]): Iterable of document
+ references.
+
+ Returns:
+ Tuple[List[str, ...], Dict[str, .DocumentReference]]: A two-tuple of
+
+ * fully-qualified documents paths for each reference in ``references``
+ * a mapping from the paths to the original reference. (If multiple
+ ``references`` contains multiple references to the same document,
+ that key will be overwritten in the result.)
+ """
+ document_paths = []
+ reference_map = {}
+ for reference in references:
+ doc_path = reference._document_path
+ document_paths.append(doc_path)
+ reference_map[doc_path] = reference
+
+ return document_paths, reference_map
+
+
+def _get_reference(document_path, reference_map):
+ """Get a document reference from a dictionary.
+
+ This just wraps a simple dictionary look-up with a helpful error that is
+ specific to :meth:`~google.cloud.firestore.client.Client.get_all`, the
+ **public** caller of this function.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ reference_map (Dict[str, .DocumentReference]): A mapping (produced
+ by :func:`_reference_info`) of fully-qualified document paths to
+ document references.
+
+ Returns:
+ .DocumentReference: The matching reference.
+
+ Raises:
+ ValueError: If ``document_path`` has not been encountered.
+ """
+ try:
+ return reference_map[document_path]
+ except KeyError:
+ msg = _BAD_DOC_TEMPLATE.format(document_path)
+ raise ValueError(msg)
+
+
+def _parse_batch_get(get_doc_response, reference_map, client):
+ """Parse a `BatchGetDocumentsResponse` protobuf.
+
+ Args:
+ get_doc_response (~google.cloud.proto.firestore.v1.\
+ firestore_pb2.BatchGetDocumentsResponse): A single response (from
+ a stream) containing the "get" response for a document.
+ reference_map (Dict[str, .DocumentReference]): A mapping (produced
+ by :func:`_reference_info`) of fully-qualified document paths to
+ document references.
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client that has a document factory.
+
+ Returns:
+ [.DocumentSnapshot]: The retrieved snapshot.
+
+ Raises:
+ ValueError: If the response has a ``result`` field (a oneof) other
+ than ``found`` or ``missing``.
+ """
+ result_type = get_doc_response.WhichOneof("result")
+ if result_type == "found":
+ reference = _get_reference(get_doc_response.found.name, reference_map)
+ data = _helpers.decode_dict(get_doc_response.found.fields, client)
+ snapshot = DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=get_doc_response.read_time,
+ create_time=get_doc_response.found.create_time,
+ update_time=get_doc_response.found.update_time,
+ )
+ elif result_type == "missing":
+ reference = _get_reference(get_doc_response.missing, reference_map)
+ snapshot = DocumentSnapshot(
+ reference,
+ None,
+ exists=False,
+ read_time=get_doc_response.read_time,
+ create_time=None,
+ update_time=None,
+ )
+ else:
+ raise ValueError(
+ "`BatchGetDocumentsResponse.result` (a oneof) had a field other "
+ "than `found` or `missing` set, or was unset"
+ )
+ return snapshot
+
+
+def _get_doc_mask(field_paths):
+ """Get a document mask if field paths are provided.
+
+ Args:
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results.
+
+ Returns:
+ Optional[google.cloud.firestore_v1.types.DocumentMask]: A mask
+ to project documents to a restricted set of field paths.
+ """
+ if field_paths is None:
+ return None
+ else:
+ return types.DocumentMask(field_paths=field_paths)
+
+
+def _item_to_collection_ref(iterator, item):
+ """Convert collection ID to collection ref.
+
+ Args:
+ iterator (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (str): ID of the collection
+ """
+ return iterator.client.collection(item)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/collection.py b/venv/Lib/site-packages/google/cloud/firestore_v1/collection.py
new file mode 100644
index 000000000..c78e44929
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/collection.py
@@ -0,0 +1,509 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing collections for the Google Cloud Firestore API."""
+import random
+
+import six
+
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import query as query_mod
+from google.cloud.firestore_v1.watch import Watch
+from google.cloud.firestore_v1 import document
+
+_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+
+
+class CollectionReference(object):
+ """A reference to a collection in a Firestore database.
+
+ The collection may already exist or this class can facilitate creation
+ of documents within the collection.
+
+ Args:
+ path (Tuple[str, ...]): The components in the collection path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection.
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1.client.Client` if provided. It
+ represents the client that created this collection reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ def __init__(self, *path, **kwargs):
+ _helpers.verify_path(path, is_collection=True)
+ self._path = path
+ self._client = kwargs.pop("client", None)
+ if kwargs:
+ raise TypeError(
+ "Received unexpected arguments", kwargs, "Only `client` is supported"
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._path == other._path and self._client == other._client
+
+ @property
+ def id(self):
+ """The collection identifier.
+
+ Returns:
+ str: The last component of the path.
+ """
+ return self._path[-1]
+
+ @property
+ def parent(self):
+ """Document that owns the current collection.
+
+ Returns:
+ Optional[:class:`~google.cloud.firestore_v1.document.DocumentReference`]:
+ The parent document, if the current collection is not a
+ top-level collection.
+ """
+ if len(self._path) == 1:
+ return None
+ else:
+ parent_path = self._path[:-1]
+ return self._client.document(*parent_path)
+
+ def document(self, document_id=None):
+ """Create a sub-document underneath the current collection.
+
+ Args:
+ document_id (Optional[str]): The document identifier
+ within the current collection. If not provided, will default
+ to a random 20 character string composed of digits,
+ uppercase and lowercase and letters.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`:
+ The child document.
+ """
+ if document_id is None:
+ document_id = _auto_id()
+
+ child_path = self._path + (document_id,)
+ return self._client.document(*child_path)
+
+ def _parent_info(self):
+ """Get fully-qualified parent path and prefix for this collection.
+
+ Returns:
+ Tuple[str, str]: Pair of
+
+ * the fully-qualified (with database and project) path to the
+ parent of this collection (will either be the database path
+ or a document path).
+ * the prefix to a document in this collection.
+ """
+ parent_doc = self.parent
+ if parent_doc is None:
+ parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join(
+ (self._client._database_string, "documents")
+ )
+ else:
+ parent_path = parent_doc._document_path
+
+ expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id))
+ return parent_path, expected_prefix
+
+ def add(self, document_data, document_id=None):
+ """Create a document in the Firestore database with the provided data.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ creating the document.
+ document_id (Optional[str]): The document identifier within the
+ current collection. If not provided, an ID will be
+ automatically assigned by the server (the assigned ID will be
+ a random 20 character string composed of digits,
+ uppercase and lowercase letters).
+
+ Returns:
+ Tuple[:class:`google.protobuf.timestamp_pb2.Timestamp`, \
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`]:
+ Pair of
+
+ * The ``update_time`` when the document was created/overwritten.
+ * A document reference for the created document.
+
+ Raises:
+ ~google.cloud.exceptions.Conflict: If ``document_id`` is provided
+ and the document already exists.
+ """
+ if document_id is None:
+ document_id = _auto_id()
+
+ document_ref = self.document(document_id)
+ write_result = document_ref.create(document_data)
+ return write_result.update_time, document_ref
+
+ def list_documents(self, page_size=None):
+ """List all subdocuments of the current collection.
+
+ Args:
+ page_size (Optional[int]]): The maximum number of documents
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+
+ Returns:
+ Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]:
+ iterator of subdocuments of the current collection. If the
+ collection does not exist at the time of `snapshot`, the
+ iterator will be empty
+ """
+ parent, _ = self._parent_info()
+
+ iterator = self._client._firestore_api.list_documents(
+ parent,
+ self.id,
+ page_size=page_size,
+ show_missing=True,
+ metadata=self._client._rpc_metadata,
+ )
+ iterator.collection = self
+ iterator.item_to_value = _item_to_document_ref
+ return iterator
+
+ def select(self, field_paths):
+ """Create a "select" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.select` for
+ more information on this method.
+
+ Args:
+ field_paths (Iterable[str, ...]): An iterable of field paths
+ (``.``-delimited list of field names) to use as a projection
+ of document fields in the query results.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A "projected" query.
+ """
+ query = query_mod.Query(self)
+ return query.select(field_paths)
+
+ def where(self, field_path, op_string, value):
+ """Create a "where" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.where` for
+ more information on this method.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) for the field to filter on.
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``
+ and ``>``.
+ value (Any): The value to compare the field against in the filter.
+ If ``value`` is :data:`None` or a NaN, then ``==`` is the only
+ allowed operation.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A filtered query.
+ """
+ query = query_mod.Query(self)
+ return query.where(field_path, op_string, value)
+
+ def order_by(self, field_path, **kwargs):
+ """Create an "order by" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by` for
+ more information on this method.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) on which to order the query results.
+ kwargs (Dict[str, Any]): The keyword arguments to pass along
+ to the query. The only supported keyword is ``direction``,
+ see :meth:`~google.cloud.firestore_v1.query.Query.order_by`
+ for more information.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An "order by" query.
+ """
+ query = query_mod.Query(self)
+ return query.order_by(field_path, **kwargs)
+
+ def limit(self, count):
+ """Create a limited query with this collection as parent.
+
+ .. note::
+
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit` will drop previously set `limit_to_last`.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.limit` for
+ more information on this method.
+
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited query.
+ """
+ query = query_mod.Query(self)
+ return query.limit(count)
+
+ def limit_to_last(self, count):
+ """Create a limited to last query with this collection as parent.
+
+ .. note::
+
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit_to_last` will drop previously set `limit`.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.limit_to_last`
+ for more information on this method.
+
+ Args:
+ count (int): Maximum number of documents to return that
+ match the query.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited to last query.
+ """
+ query = query_mod.Query(self)
+ return query.limit_to_last(count)
+
+ def offset(self, num_to_skip):
+ """Skip to an offset in a query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.offset` for
+ more information on this method.
+
+ Args:
+ num_to_skip (int): The number of results to skip at the beginning
+ of query results. (Must be non-negative.)
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An offset query.
+ """
+ query = query_mod.Query(self)
+ return query.offset(num_to_skip)
+
+ def start_at(self, document_fields):
+ """Start query at a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.start_at` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.start_at(document_fields)
+
+ def start_after(self, document_fields):
+ """Start query after a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.start_after` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.start_after(document_fields)
+
+ def end_before(self, document_fields):
+ """End query before a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.end_before` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.end_before(document_fields)
+
+ def end_at(self, document_fields):
+ """End query at a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.end_at` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.end_at(document_fields)
+
+ def get(self, transaction=None):
+ """Read the documents in this collection.
+
+ This sends a ``RunQuery`` RPC and returns a list of documents
+ returned in the stream of ``RunQueryResponse`` messages.
+
+ Args:
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Returns:
+ list: The documents in this collection that match the query.
+ """
+ query = query_mod.Query(self)
+ return query.get(transaction=transaction)
+
+ def stream(self, transaction=None):
+ """Read the documents in this collection.
+
+ This sends a ``RunQuery`` RPC and then returns an iterator which
+ consumes each document returned in the stream of ``RunQueryResponse``
+ messages.
+
+ .. note::
+
+ The underlying stream of responses will time out after
+ the ``max_rpc_timeout_millis`` value set in the GAPIC
+ client configuration for the ``RunQuery`` API. Snapshots
+ not consumed from the iterator before that point will be lost.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.\
+ Transaction`]):
+ An existing transaction that the query will run in.
+
+ Yields:
+ :class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
+ The next document that fulfills the query.
+ """
+ query = query_mod.Query(self)
+ return query.stream(transaction=transaction)
+
+ def on_snapshot(self, callback):
+ """Monitor the documents in this collection.
+
+ This starts a watch on this collection using a background thread. The
+ provided callback is run on the snapshot of the documents.
+
+ Args:
+ callback (Callable[List[:class:`~google.cloud.firestore_v1.collection.CollectionSnapshot`], \
+ List[:class:`~google.cloud.firestore_v1.watch.DocumentChange`], datetime.datetime], NoneType):
+ a callback to run when a change occurs.
+
+ Example:
+ from google.cloud import firestore_v1
+
+ db = firestore_v1.Client()
+ collection_ref = db.collection(u'users')
+
+ def on_snapshot(docs, changes, read_time):
+ for doc in docs:
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+ # Watch this collection
+ collection_watch = collection_ref.on_snapshot(on_snapshot)
+
+ # Terminate this watch
+ collection_watch.unsubscribe()
+ """
+ return Watch.for_query(
+ query_mod.Query(self),
+ callback,
+ document.DocumentSnapshot,
+ document.DocumentReference,
+ )
+
+
+def _auto_id():
+ """Generate a "random" automatically generated ID.
+
+ Returns:
+ str: A 20 character string composed of digits, uppercase and
+ lowercase and letters.
+ """
+ return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20))
+
+
+def _item_to_document_ref(iterator, item):
+ """Convert Document resource to document ref.
+
+ Args:
+ iterator (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (dict): document resource
+ """
+ document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1]
+ return iterator.collection.document(document_id)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/document.py b/venv/Lib/site-packages/google/cloud/firestore_v1/document.py
new file mode 100644
index 000000000..b431e26da
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/document.py
@@ -0,0 +1,787 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing documents for the Google Cloud Firestore API."""
+
+import copy
+
+import six
+
+from google.api_core import exceptions
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import field_path as field_path_module
+from google.cloud.firestore_v1.proto import common_pb2
+from google.cloud.firestore_v1.watch import Watch
+
+
+class DocumentReference(object):
+ """A reference to a document in a Firestore database.
+
+ The document may already exist or can be created by this class.
+
+ Args:
+ path (Tuple[str, ...]): The components in the document path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection (as well as the base document).
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1.client.Client`. It represents
+ the client that created this document reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ _document_path_internal = None
+
+ def __init__(self, *path, **kwargs):
+ _helpers.verify_path(path, is_collection=False)
+ self._path = path
+ self._client = kwargs.pop("client", None)
+ if kwargs:
+ raise TypeError(
+ "Received unexpected arguments", kwargs, "Only `client` is supported"
+ )
+
+ def __copy__(self):
+ """Shallow copy the instance.
+
+ We leave the client "as-is" but tuple-unpack the path.
+
+ Returns:
+ .DocumentReference: A copy of the current document.
+ """
+ result = self.__class__(*self._path, client=self._client)
+ result._document_path_internal = self._document_path_internal
+ return result
+
+ def __deepcopy__(self, unused_memo):
+ """Deep copy the instance.
+
+ This isn't a true deep copy, wee leave the client "as-is" but
+ tuple-unpack the path.
+
+ Returns:
+ .DocumentReference: A copy of the current document.
+ """
+ return self.__copy__()
+
+ def __eq__(self, other):
+ """Equality check against another instance.
+
+ Args:
+ other (Any): A value to compare against.
+
+ Returns:
+ Union[bool, NotImplementedType]: Indicating if the values are
+ equal.
+ """
+ if isinstance(other, DocumentReference):
+ return self._client == other._client and self._path == other._path
+ else:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(self._path) + hash(self._client)
+
+ def __ne__(self, other):
+ """Inequality check against another instance.
+
+ Args:
+ other (Any): A value to compare against.
+
+ Returns:
+ Union[bool, NotImplementedType]: Indicating if the values are
+ not equal.
+ """
+ if isinstance(other, DocumentReference):
+ return self._client != other._client or self._path != other._path
+ else:
+ return NotImplemented
+
+ @property
+ def path(self):
+ """Database-relative for this document.
+
+ Returns:
+ str: The document's relative path.
+ """
+ return "/".join(self._path)
+
+ @property
+ def _document_path(self):
+ """Create and cache the full path for this document.
+
+ Of the form:
+
+ ``projects/{project_id}/databases/{database_id}/...
+ documents/{document_path}``
+
+ Returns:
+ str: The full document path.
+
+ Raises:
+ ValueError: If the current document reference has no ``client``.
+ """
+ if self._document_path_internal is None:
+ if self._client is None:
+ raise ValueError("A document reference requires a `client`.")
+ self._document_path_internal = _get_document_path(self._client, self._path)
+
+ return self._document_path_internal
+
+ @property
+ def id(self):
+ """The document identifier (within its collection).
+
+ Returns:
+ str: The last component of the path.
+ """
+ return self._path[-1]
+
+ @property
+ def parent(self):
+ """Collection that owns the current document.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
+ The parent collection.
+ """
+ parent_path = self._path[:-1]
+ return self._client.collection(*parent_path)
+
+ def collection(self, collection_id):
+ """Create a sub-collection underneath the current document.
+
+ Args:
+ collection_id (str): The sub-collection identifier (sometimes
+ referred to as the "kind").
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
+ The child collection.
+ """
+ child_path = self._path + (collection_id,)
+ return self._client.collection(*child_path)
+
+ def create(self, document_data):
+ """Create the current document in the Firestore database.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ creating a document.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.types.WriteResult`:
+ The write result corresponding to the committed document.
+ A write result contains an ``update_time`` field.
+
+ Raises:
+ :class:`~google.cloud.exceptions.Conflict`:
+ If the document already exists.
+ """
+ batch = self._client.batch()
+ batch.create(self, document_data)
+ write_results = batch.commit()
+ return _first_write_result(write_results)
+
+ def set(self, document_data, merge=False):
+ """Replace the current document in the Firestore database.
+
+ A write ``option`` can be specified to indicate preconditions of
+ the "set" operation. If no ``option`` is specified and this document
+ doesn't exist yet, this method will create it.
+
+ Overwrites all content for the document with the fields in
+ ``document_data``. This method performs almost the same functionality
+ as :meth:`create`. The only difference is that this method doesn't
+ make any requirements on the existence of the document (unless
+ ``option`` is used), whereas as :meth:`create` will fail if the
+ document already exists.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, apply merging instead of overwriting the state
+ of the document.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.types.WriteResult`:
+ The write result corresponding to the committed document. A write
+ result contains an ``update_time`` field.
+ """
+ batch = self._client.batch()
+ batch.set(self, document_data, merge=merge)
+ write_results = batch.commit()
+ return _first_write_result(write_results)
+
+ def update(self, field_updates, option=None):
+ """Update an existing document in the Firestore database.
+
+ By default, this method verifies that the document exists on the
+ server before making updates. A write ``option`` can be specified to
+ override these preconditions.
+
+ Each key in ``field_updates`` can either be a field name or a
+ **field path** (For more information on **field paths**, see
+ :meth:`~google.cloud.firestore_v1.client.Client.field_path`.) To
+ illustrate this, consider a document with
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ },
+ 'other': True,
+ }
+
+ stored on the server. If the field name is used in the update:
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo': {
+ ... 'quux': 800,
+ ... },
+ ... }
+ >>> document.update(field_updates)
+
+ then all of ``foo`` will be overwritten on the server and the new
+ value will be
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'quux': 800,
+ },
+ 'other': True,
+ }
+
+ On the other hand, if a ``.``-delimited **field path** is used in the
+ update:
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo.quux': 800,
+ ... }
+ >>> document.update(field_updates)
+
+ then only ``foo.quux`` will be updated on the server and the
+ field ``foo.bar`` will remain intact:
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ 'quux': 800,
+ },
+ 'other': True,
+ }
+
+ .. warning::
+
+ A **field path** can only be used as a top-level key in
+ ``field_updates``.
+
+ To delete / remove a field from an existing document, use the
+ :attr:`~google.cloud.firestore_v1.transforms.DELETE_FIELD` sentinel.
+ So with the example above, sending
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'other': firestore.DELETE_FIELD,
+ ... }
+ >>> document.update(field_updates)
+
+ would update the value on the server to:
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ },
+ }
+
+ To set a field to the current time on the server when the
+ update is received, use the
+ :attr:`~google.cloud.firestore_v1.transforms.SERVER_TIMESTAMP`
+ sentinel.
+ Sending
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo.now': firestore.SERVER_TIMESTAMP,
+ ... }
+ >>> document.update(field_updates)
+
+ would update the value on the server to:
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ 'now': datetime.datetime(2012, ...),
+ },
+ 'other': True,
+ }
+
+ Args:
+ field_updates (dict): Field names or paths to update and values
+ to update with.
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.types.WriteResult`:
+ The write result corresponding to the updated document. A write
+ result contains an ``update_time`` field.
+
+ Raises:
+ ~google.cloud.exceptions.NotFound: If the document does not exist.
+ """
+ batch = self._client.batch()
+ batch.update(self, field_updates, option=option)
+ write_results = batch.commit()
+ return _first_write_result(write_results)
+
+ def delete(self, option=None):
+ """Delete the current document in the Firestore database.
+
+ Args:
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ :class:`google.protobuf.timestamp_pb2.Timestamp`:
+ The time that the delete request was received by the server.
+ If the document did not exist when the delete was sent (i.e.
+ nothing was deleted), this method will still succeed and will
+ still return the time that the request was received by the server.
+ """
+ write_pb = _helpers.pb_for_delete(self._document_path, option)
+ commit_response = self._client._firestore_api.commit(
+ self._client._database_string,
+ [write_pb],
+ transaction=None,
+ metadata=self._client._rpc_metadata,
+ )
+
+ return commit_response.commit_time
+
+ def get(self, field_paths=None, transaction=None):
+ """Retrieve a snapshot of the current document.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results. If
+ no value is provided, all fields will be returned.
+ transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this reference
+ will be retrieved in.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
+ A snapshot of the current document. If the document does not
+ exist at the time of the snapshot is taken, the snapshot's
+ :attr:`reference`, :attr:`data`, :attr:`update_time`, and
+ :attr:`create_time` attributes will all be ``None`` and
+ its :attr:`exists` attribute will be ``False``.
+ """
+ if isinstance(field_paths, six.string_types):
+ raise ValueError("'field_paths' must be a sequence of paths, not a string.")
+
+ if field_paths is not None:
+ mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
+ else:
+ mask = None
+
+ firestore_api = self._client._firestore_api
+ try:
+ document_pb = firestore_api.get_document(
+ self._document_path,
+ mask=mask,
+ transaction=_helpers.get_transaction_id(transaction),
+ metadata=self._client._rpc_metadata,
+ )
+ except exceptions.NotFound:
+ data = None
+ exists = False
+ create_time = None
+ update_time = None
+ else:
+ data = _helpers.decode_dict(document_pb.fields, self._client)
+ exists = True
+ create_time = document_pb.create_time
+ update_time = document_pb.update_time
+
+ return DocumentSnapshot(
+ reference=self,
+ data=data,
+ exists=exists,
+ read_time=None, # No server read_time available
+ create_time=create_time,
+ update_time=update_time,
+ )
+
+ def collections(self, page_size=None):
+ """List subcollections of the current document.
+
+ Args:
+ page_size (Optional[int]]): The maximum number of collections
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+
+ Returns:
+ Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
+ iterator of subcollections of the current document. If the
+ document does not exist at the time of `snapshot`, the
+ iterator will be empty
+ """
+ iterator = self._client._firestore_api.list_collection_ids(
+ self._document_path,
+ page_size=page_size,
+ metadata=self._client._rpc_metadata,
+ )
+ iterator.document = self
+ iterator.item_to_value = _item_to_collection_ref
+ return iterator
+
+ def on_snapshot(self, callback):
+ """Watch this document.
+
+ This starts a watch on this document using a background thread. The
+ provided callback is run on the snapshot.
+
+ Args:
+ callback (Callable[List[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`], \
+ List[:class:`~google.cloud.firestore_v1.watch.DocumentChange`], datetime.datetime], NoneType):
+ a callback to run when a change occurs
+
+ Example:
+
+ .. code-block:: python
+
+ from google.cloud import firestore_v1
+
+ db = firestore_v1.Client()
+ collection_ref = db.collection(u'users')
+
+ def on_snapshot(docs, changes, read_time):
+ for doc in docs:
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+ doc_ref = db.collection(u'users').document(
+ u'alovelace' + unique_resource_id())
+
+ # Watch this document
+ doc_watch = doc_ref.on_snapshot(on_snapshot)
+
+ # Terminate this watch
+ doc_watch.unsubscribe()
+ """
+ return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference)
+
+
+class DocumentSnapshot(object):
+ """A snapshot of document data in a Firestore database.
+
+ This represents data retrieved at a specific time and may not contain
+ all fields stored for the document (i.e. a hand-picked selection of
+ fields may have been retrieved).
+
+ Instances of this class are not intended to be constructed by hand,
+ rather they'll be returned as responses to various methods, such as
+ :meth:`~google.cloud.DocumentReference.get`.
+
+ Args:
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference corresponding to the document that contains
+ the data in this snapshot.
+ data (Dict[str, Any]):
+ The data retrieved in the snapshot.
+ exists (bool):
+ Indicates if the document existed at the time the snapshot was
+ retrieved.
+ read_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
+ The time that this snapshot was read from the server.
+ create_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
+ The time that this document was created.
+ update_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
+ The time that this document was last updated.
+ """
+
+ def __init__(self, reference, data, exists, read_time, create_time, update_time):
+ self._reference = reference
+ # We want immutable data, so callers can't modify this value
+ # out from under us.
+ self._data = copy.deepcopy(data)
+ self._exists = exists
+ self.read_time = read_time
+ self.create_time = create_time
+ self.update_time = update_time
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._reference == other._reference and self._data == other._data
+
+ def __hash__(self):
+ seconds = self.update_time.seconds
+ nanos = self.update_time.nanos
+ return hash(self._reference) + hash(seconds) + hash(nanos)
+
+ @property
+ def _client(self):
+ """The client that owns the document reference for this snapshot.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.client.Client`:
+ The client that owns this document.
+ """
+ return self._reference._client
+
+ @property
+ def exists(self):
+ """Existence flag.
+
+ Indicates if the document existed at the time this snapshot
+ was retrieved.
+
+ Returns:
+ bool: The existence flag.
+ """
+ return self._exists
+
+ @property
+ def id(self):
+ """The document identifier (within its collection).
+
+ Returns:
+ str: The last component of the path of the document.
+ """
+ return self._reference.id
+
+ @property
+ def reference(self):
+ """Document reference corresponding to document that owns this data.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`:
+ A document reference corresponding to this document.
+ """
+ return self._reference
+
+ def get(self, field_path):
+ """Get a value from the snapshot data.
+
+ If the data is nested, for example:
+
+ .. code-block:: python
+
+ >>> snapshot.to_dict()
+ {
+ 'top1': {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ },
+ 'top6': b'\x00\x01 foo',
+ }
+
+ a **field path** can be used to access the nested data. For
+ example:
+
+ .. code-block:: python
+
+ >>> snapshot.get('top1')
+ {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ }
+ >>> snapshot.get('top1.middle2')
+ {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ }
+ >>> snapshot.get('top1.middle2.bottom3')
+ 20
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ A copy is returned since the data may contain mutable values,
+ but the data stored in the snapshot must remain immutable.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names).
+
+ Returns:
+ Any or None:
+ (A copy of) the value stored for the ``field_path`` or
+ None if snapshot document does not exist.
+
+ Raises:
+ KeyError: If the ``field_path`` does not match nested data
+ in the snapshot.
+ """
+ if not self._exists:
+ return None
+ nested_data = field_path_module.get_nested_value(field_path, self._data)
+ return copy.deepcopy(nested_data)
+
+ def to_dict(self):
+ """Retrieve the data contained in this snapshot.
+
+ A copy is returned since the data may contain mutable values,
+ but the data stored in the snapshot must remain immutable.
+
+ Returns:
+ Dict[str, Any] or None:
+ The data in the snapshot. Returns None if reference
+ does not exist.
+ """
+ if not self._exists:
+ return None
+ return copy.deepcopy(self._data)
+
+
+def _get_document_path(client, path):
+ """Convert a path tuple into a full path string.
+
+ Of the form:
+
+ ``projects/{project_id}/databases/{database_id}/...
+ documents/{document_path}``
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ The client that holds configuration details and a GAPIC client
+ object.
+ path (Tuple[str, ...]): The components in a document path.
+
+ Returns:
+ str: The fully-qualified document path.
+ """
+ parts = (client._database_string, "documents") + path
+ return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
+
+
+def _consume_single_get(response_iterator):
+ """Consume a gRPC stream that should contain a single response.
+
+ The stream will correspond to a ``BatchGetDocuments`` request made
+ for a single document.
+
+ Args:
+ response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
+ streaming iterator returned from a ``BatchGetDocuments``
+ request.
+
+ Returns:
+ ~google.cloud.proto.firestore.v1.\
+ firestore_pb2.BatchGetDocumentsResponse: The single "get"
+ response in the batch.
+
+ Raises:
+ ValueError: If anything other than exactly one response is returned.
+ """
+ # Calling ``list()`` consumes the entire iterator.
+ all_responses = list(response_iterator)
+ if len(all_responses) != 1:
+ raise ValueError(
+ "Unexpected response from `BatchGetDocumentsResponse`",
+ all_responses,
+ "Expected only one result",
+ )
+
+ return all_responses[0]
+
+
+def _first_write_result(write_results):
+ """Get first write result from list.
+
+ For cases where ``len(write_results) > 1``, this assumes the writes
+ occurred at the same time (e.g. if an update and transform are sent
+ at the same time).
+
+ Args:
+ write_results (List[google.cloud.proto.firestore.v1.\
+ write_pb2.WriteResult, ...]: The write results from a
+ ``CommitResponse``.
+
+ Returns:
+ google.cloud.firestore_v1.types.WriteResult: The
+ lone write result from ``write_results``.
+
+ Raises:
+ ValueError: If there are zero write results. This is likely to
+ **never** occur, since the backend should be stable.
+ """
+ if not write_results:
+ raise ValueError("Expected at least one write result")
+
+ return write_results[0]
+
+
+def _item_to_collection_ref(iterator, item):
+ """Convert collection ID to collection ref.
+
+ Args:
+ iterator (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (str): ID of the collection
+ """
+ return iterator.document.collection(item)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/field_path.py b/venv/Lib/site-packages/google/cloud/firestore_v1/field_path.py
new file mode 100644
index 000000000..58b4f3b9a
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/field_path.py
@@ -0,0 +1,395 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for managing / converting field paths to / from strings."""
+
+try:
+ from collections import abc as collections_abc
+except ImportError: # Python 2.7
+ import collections as collections_abc
+
+import re
+
+import six
+
+
+_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data"
+_FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}"
+_FIELD_PATH_WRONG_TYPE = (
+ "The data at {!r} is not a dictionary, so it cannot contain the key {!r}"
+)
+
+_FIELD_PATH_DELIMITER = "."
+_BACKSLASH = "\\"
+_ESCAPED_BACKSLASH = _BACKSLASH * 2
+_BACKTICK = "`"
+_ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK
+
+_SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$")
+_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]")
+PATH_ELEMENT_TOKENS = [
+ ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements
+ ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted
+ ("DOT", r"\."), # separator
+]
+TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS)
+TOKENS_REGEX = re.compile(TOKENS_PATTERN)
+
+
+def _tokenize_field_path(path):
+ """Lex a field path into tokens (including dots).
+
+ Args:
+ path (str): field path to be lexed.
+ Returns:
+ List(str): tokens
+ """
+ pos = 0
+ get_token = TOKENS_REGEX.match
+ match = get_token(path)
+ while match is not None:
+ type_ = match.lastgroup
+ value = match.group(type_)
+ yield value
+ pos = match.end()
+ match = get_token(path, pos)
+ if pos != len(path):
+ raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:]))
+
+
+def split_field_path(path):
+ """Split a field path into valid elements (without dots).
+
+ Args:
+ path (str): field path to be lexed.
+ Returns:
+ List(str): tokens
+ Raises:
+ ValueError: if the path does not match the elements-interspersed-
+ with-dots pattern.
+ """
+ if not path:
+ return []
+
+ elements = []
+ want_dot = False
+
+ for element in _tokenize_field_path(path):
+ if want_dot:
+ if element != ".":
+ raise ValueError("Invalid path: {}".format(path))
+ else:
+ want_dot = False
+ else:
+ if element == ".":
+ raise ValueError("Invalid path: {}".format(path))
+ elements.append(element)
+ want_dot = True
+
+ if not want_dot or not elements:
+ raise ValueError("Invalid path: {}".format(path))
+
+ return elements
+
+
+def parse_field_path(api_repr):
+ """Parse a **field path** from into a list of nested field names.
+
+ See :func:`field_path` for more on **field paths**.
+
+ Args:
+ api_repr (str):
+ The unique Firestore api representation which consists of
+ either simple or UTF-8 field names. It cannot exceed
+ 1500 bytes, and cannot be empty. Simple field names match
+ ``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are
+ escaped by surrounding them with backticks.
+
+ Returns:
+ List[str, ...]: The list of field names in the field path.
+ """
+ # code dredged back up from
+ # https://github.com/googleapis/google-cloud-python/pull/5109/files
+ field_names = []
+ for field_name in split_field_path(api_repr):
+ # non-simple field name
+ if field_name[0] == "`" and field_name[-1] == "`":
+ field_name = field_name[1:-1]
+ field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK)
+ field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH)
+ field_names.append(field_name)
+ return field_names
+
+
+def render_field_path(field_names):
+ """Create a **field path** from a list of nested field names.
+
+ A **field path** is a ``.``-delimited concatenation of the field
+ names. It is used to represent a nested field. For example,
+ in the data
+
+ .. code-block: python
+
+ data = {
+ 'aa': {
+ 'bb': {
+ 'cc': 10,
+ },
+ },
+ }
+
+ the field path ``'aa.bb.cc'`` represents that data stored in
+ ``data['aa']['bb']['cc']``.
+
+ Args:
+ field_names (Iterable[str, ...]): The list of field names.
+
+ Returns:
+ str: The ``.``-delimited field path.
+ """
+ result = []
+
+ for field_name in field_names:
+ match = _SIMPLE_FIELD_NAME.match(field_name)
+ if match and match.group(0) == field_name:
+ result.append(field_name)
+ else:
+ replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace(
+ _BACKTICK, _ESCAPED_BACKTICK
+ )
+ result.append(_BACKTICK + replaced + _BACKTICK)
+
+ return _FIELD_PATH_DELIMITER.join(result)
+
+
+get_field_path = render_field_path # backward-compatibility
+
+
+def get_nested_value(field_path, data):
+ """Get a (potentially nested) value from a dictionary.
+
+ If the data is nested, for example:
+
+ .. code-block:: python
+
+ >>> data
+ {
+ 'top1': {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ },
+ 'top6': b'\x00\x01 foo',
+ }
+
+ a **field path** can be used to access the nested data. For
+ example:
+
+ .. code-block:: python
+
+ >>> get_nested_value('top1', data)
+ {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ }
+ >>> get_nested_value('top1.middle2', data)
+ {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ }
+ >>> get_nested_value('top1.middle2.bottom3', data)
+ 20
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names).
+ data (Dict[str, Any]): The (possibly nested) data.
+
+ Returns:
+ Any: (A copy of) the value stored for the ``field_path``.
+
+ Raises:
+ KeyError: If the ``field_path`` does not match nested data.
+ """
+ field_names = parse_field_path(field_path)
+
+ nested_data = data
+ for index, field_name in enumerate(field_names):
+ if isinstance(nested_data, collections_abc.Mapping):
+ if field_name in nested_data:
+ nested_data = nested_data[field_name]
+ else:
+ if index == 0:
+ msg = _FIELD_PATH_MISSING_TOP.format(field_name)
+ raise KeyError(msg)
+ else:
+ partial = render_field_path(field_names[:index])
+ msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial)
+ raise KeyError(msg)
+ else:
+ partial = render_field_path(field_names[:index])
+ msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name)
+ raise KeyError(msg)
+
+ return nested_data
+
+
+class FieldPath(object):
+ """Field Path object for client use.
+
+ A field path is a sequence of element keys, separated by periods.
+ Each element key can be either a simple identifier, or a full unicode
+ string.
+
+ In the string representation of a field path, non-identifier elements
+ must be quoted using backticks, with internal backticks and backslashes
+ escaped with a backslash.
+
+ Args:
+ parts: (one or more strings)
+ Indicating path of the key to be used.
+ """
+
+ def __init__(self, *parts):
+ for part in parts:
+ if not isinstance(part, six.string_types) or not part:
+ error = "One or more components is not a string or is empty."
+ raise ValueError(error)
+ self.parts = tuple(parts)
+
+ @classmethod
+ def from_api_repr(cls, api_repr):
+ """Factory: create a FieldPath from the string formatted per the API.
+
+ Args:
+ api_repr (str): a string path, with non-identifier elements quoted
+ It cannot exceed 1500 characters, and cannot be empty.
+ Returns:
+ (:class:`FieldPath`) An instance parsed from ``api_repr``.
+ Raises:
+ ValueError if the parsing fails
+ """
+ api_repr = api_repr.strip()
+ if not api_repr:
+ raise ValueError("Field path API representation cannot be empty.")
+ return cls(*parse_field_path(api_repr))
+
+ @classmethod
+ def from_string(cls, path_string):
+ """Factory: create a FieldPath from a unicode string representation.
+
+ This method splits on the character `.` and disallows the
+ characters `~*/[]`. To create a FieldPath whose components have
+ those characters, call the constructor.
+
+ Args:
+ path_string (str): A unicode string which cannot contain
+ `~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty.
+
+ Returns:
+ (:class:`FieldPath`) An instance parsed from ``path_string``.
+ """
+ try:
+ return cls.from_api_repr(path_string)
+ except ValueError:
+ elements = path_string.split(".")
+ for element in elements:
+ if not element:
+ raise ValueError("Empty element")
+ if _LEADING_ALPHA_INVALID.match(element):
+ raise ValueError(
+ "Non-alphanum char in element with leading alpha: {}".format(
+ element
+ )
+ )
+ return FieldPath(*elements)
+
+ def __repr__(self):
+ paths = ""
+ for part in self.parts:
+ paths += "'" + part + "',"
+ paths = paths[:-1]
+ return "FieldPath({})".format(paths)
+
+ def __hash__(self):
+ return hash(self.to_api_repr())
+
+ def __eq__(self, other):
+ if isinstance(other, FieldPath):
+ return self.parts == other.parts
+ return NotImplemented
+
+ def __lt__(self, other):
+ if isinstance(other, FieldPath):
+ return self.parts < other.parts
+ return NotImplemented
+
+ def __add__(self, other):
+ """Adds `other` field path to end of this field path.
+
+ Args:
+ other (~google.cloud.firestore_v1._helpers.FieldPath, str):
+ The field path to add to the end of this `FieldPath`.
+ """
+ if isinstance(other, FieldPath):
+ parts = self.parts + other.parts
+ return FieldPath(*parts)
+ elif isinstance(other, six.string_types):
+ parts = self.parts + FieldPath.from_string(other).parts
+ return FieldPath(*parts)
+ else:
+ return NotImplemented
+
+ def to_api_repr(self):
+ """Render a quoted string representation of the FieldPath
+
+ Returns:
+ (str) Quoted string representation of the path stored
+ within this FieldPath.
+ """
+ return render_field_path(self.parts)
+
+ def eq_or_parent(self, other):
+ """Check whether ``other`` is an ancestor.
+
+ Returns:
+ (bool) True IFF ``other`` is an ancestor or equal to ``self``,
+ else False.
+ """
+ return self.parts[: len(other.parts)] == other.parts[: len(self.parts)]
+
+ def lineage(self):
+ """Return field paths for all parents.
+
+ Returns: Set[:class:`FieldPath`]
+ """
+ indexes = six.moves.range(1, len(self.parts))
+ return {FieldPath(*self.parts[:index]) for index in indexes}
+
+ @staticmethod
+ def document_id():
+ """A special FieldPath value to refer to the ID of a document. It can be used
+ in queries to sort or filter by the document ID.
+
+ Returns: A special sentinel value to refer to the ID of a document.
+ """
+ return "__name__"
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..82add8954
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/enums.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/enums.cpython-36.pyc
new file mode 100644
index 000000000..20e95c065
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/enums.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/firestore_client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/firestore_client.cpython-36.pyc
new file mode 100644
index 000000000..7dbbd2b11
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/firestore_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/firestore_client_config.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/firestore_client_config.cpython-36.pyc
new file mode 100644
index 000000000..f741175d9
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/__pycache__/firestore_client_config.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/enums.py b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/enums.py
new file mode 100644
index 000000000..2d71222a5
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/enums.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Wrappers for protocol buffer enum types."""
+
+import enum
+
+
+class NullValue(enum.IntEnum):
+ """
+ ``NullValue`` is a singleton enumeration to represent the null value
+ for the ``Value`` type union.
+
+ The JSON representation for ``NullValue`` is JSON ``null``.
+
+ Attributes:
+ NULL_VALUE (int): Null value.
+ """
+
+ NULL_VALUE = 0
+
+
+class DocumentTransform(object):
+ class FieldTransform(object):
+ class ServerValue(enum.IntEnum):
+ """
+ A value that is calculated by the server.
+
+ Attributes:
+ SERVER_VALUE_UNSPECIFIED (int): Unspecified. This value must not be used.
+ REQUEST_TIME (int): The time at which the server processed the request, with millisecond
+ precision.
+ """
+
+ SERVER_VALUE_UNSPECIFIED = 0
+ REQUEST_TIME = 1
+
+
+class StructuredQuery(object):
+ class Direction(enum.IntEnum):
+ """
+ A sort direction.
+
+ Attributes:
+ DIRECTION_UNSPECIFIED (int): Unspecified.
+ ASCENDING (int): Ascending.
+ DESCENDING (int): Descending.
+ """
+
+ DIRECTION_UNSPECIFIED = 0
+ ASCENDING = 1
+ DESCENDING = 2
+
+ class CompositeFilter(object):
+ class Operator(enum.IntEnum):
+ """
+ A composite filter operator.
+
+ Attributes:
+ OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
+ AND (int): The results are required to satisfy each of the combined filters.
+ """
+
+ OPERATOR_UNSPECIFIED = 0
+ AND = 1
+
+ class FieldFilter(object):
+ class Operator(enum.IntEnum):
+ """
+ A field filter operator.
+
+ Attributes:
+ OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
+ LESS_THAN (int): The given ``field`` is less than the given ``value``.
+
+ Requires:
+
+ - That ``field`` come first in ``order_by``.
+ LESS_THAN_OR_EQUAL (int): The given ``field`` is less than or equal to the given ``value``.
+
+ Requires:
+
+ - That ``field`` come first in ``order_by``.
+ GREATER_THAN (int): The given ``field`` is greater than the given ``value``.
+
+ Requires:
+
+ - That ``field`` come first in ``order_by``.
+ GREATER_THAN_OR_EQUAL (int): The given ``field`` is greater than or equal to the given ``value``.
+
+ Requires:
+
+ - That ``field`` come first in ``order_by``.
+ EQUAL (int): The given ``field`` is equal to the given ``value``.
+ ARRAY_CONTAINS (int): The given ``field`` is an array that contains the given ``value``.
+ IN (int): The given ``field`` is equal to at least one value in the given
+ array.
+
+ Requires:
+
+ - That ``value`` is a non-empty ``ArrayValue`` with at most 10 values.
+ - No other ``IN``, ``ARRAY_CONTAINS_ANY``, or ``NOT_IN``.
+ ARRAY_CONTAINS_ANY (int): The given ``field`` is an array that contains any of the values in
+ the given array.
+
+ Requires:
+
+ - That ``value`` is a non-empty ``ArrayValue`` with at most 10 values.
+ - No other ``IN``, ``ARRAY_CONTAINS_ANY``, or ``NOT_IN``.
+ """
+
+ OPERATOR_UNSPECIFIED = 0
+ LESS_THAN = 1
+ LESS_THAN_OR_EQUAL = 2
+ GREATER_THAN = 3
+ GREATER_THAN_OR_EQUAL = 4
+ EQUAL = 5
+ ARRAY_CONTAINS = 7
+ IN = 8
+ ARRAY_CONTAINS_ANY = 9
+
+ class UnaryFilter(object):
+ class Operator(enum.IntEnum):
+ """
+ A unary operator.
+
+ Attributes:
+ OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
+ IS_NAN (int): The given ``field`` is equal to ``NaN``.
+ IS_NULL (int): The given ``field`` is equal to ``NULL``.
+ """
+
+ OPERATOR_UNSPECIFIED = 0
+ IS_NAN = 2
+ IS_NULL = 3
+
+
+class TargetChange(object):
+ class TargetChangeType(enum.IntEnum):
+ """
+ The type of change.
+
+ Attributes:
+ NO_CHANGE (int): No change has occurred. Used only to send an updated
+ ``resume_token``.
+ ADD (int): The targets have been added.
+ REMOVE (int): The targets have been removed.
+ CURRENT (int): The targets reflect all changes committed before the targets were
+ added to the stream.
+
+ This will be sent after or with a ``read_time`` that is greater than or
+ equal to the time at which the targets were added.
+
+ Listeners can wait for this change if read-after-write semantics are
+ desired.
+ RESET (int): The targets have been reset, and a new initial state for the targets
+ will be returned in subsequent changes.
+
+ After the initial state is complete, ``CURRENT`` will be returned even
+ if the target was previously indicated to be ``CURRENT``.
+ """
+
+ NO_CHANGE = 0
+ ADD = 1
+ REMOVE = 2
+ CURRENT = 3
+ RESET = 4
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/firestore_client.py b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/firestore_client.py
new file mode 100644
index 000000000..3b6ee28af
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/firestore_client.py
@@ -0,0 +1,1673 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Accesses the google.firestore.v1 Firestore API."""
+
+import functools
+import pkg_resources
+import warnings
+
+from google.oauth2 import service_account
+import google.api_core.client_options
+import google.api_core.gapic_v1.client_info
+import google.api_core.gapic_v1.config
+import google.api_core.gapic_v1.method
+import google.api_core.gapic_v1.routing_header
+import google.api_core.grpc_helpers
+import google.api_core.page_iterator
+import google.api_core.path_template
+import google.api_core.protobuf_helpers
+import grpc
+
+from google.cloud.firestore_v1.gapic import enums
+from google.cloud.firestore_v1.gapic import firestore_client_config
+from google.cloud.firestore_v1.gapic.transports import firestore_grpc_transport
+from google.cloud.firestore_v1.proto import common_pb2
+from google.cloud.firestore_v1.proto import document_pb2
+from google.cloud.firestore_v1.proto import firestore_pb2
+from google.cloud.firestore_v1.proto import firestore_pb2_grpc
+from google.cloud.firestore_v1.proto import query_pb2
+from google.cloud.firestore_v1.proto import write_pb2
+from google.protobuf import empty_pb2
+from google.protobuf import timestamp_pb2
+
+
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-firestore",
+).version
+
+
+class FirestoreClient(object):
+ """
+ The Cloud Firestore service.
+
+ Cloud Firestore is a fast, fully managed, serverless, cloud-native NoSQL
+ document database that simplifies storing, syncing, and querying data for
+ your mobile, web, and IoT apps at global scale. Its client libraries provide
+ live synchronization and offline support, while its security features and
+ integrations with Firebase and Google Cloud Platform (GCP) accelerate
+ building truly serverless apps.
+ """
+
+ SERVICE_ADDRESS = "firestore.googleapis.com:443"
+ """The default address of the service."""
+
+ # The name of the interface for this client. This is the key used to
+ # find the method configuration in the client_config dictionary.
+ _INTERFACE_NAME = "google.firestore.v1.Firestore"
+
+ @classmethod
+ def from_service_account_file(cls, filename, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @classmethod
+ def any_path_path(cls, project, database, document, any_path):
+ """Return a fully-qualified any_path string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/documents/{document}/{any_path=**}",
+ project=project,
+ database=database,
+ document=document,
+ any_path=any_path,
+ )
+
+ @classmethod
+ def database_root_path(cls, project, database):
+ """Return a fully-qualified database_root string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}",
+ project=project,
+ database=database,
+ )
+
+ @classmethod
+ def document_path_path(cls, project, database, document_path):
+ """Return a fully-qualified document_path string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/documents/{document_path=**}",
+ project=project,
+ database=database,
+ document_path=document_path,
+ )
+
+ @classmethod
+ def document_root_path(cls, project, database):
+ """Return a fully-qualified document_root string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/documents",
+ project=project,
+ database=database,
+ )
+
+ def __init__(
+ self,
+ transport=None,
+ channel=None,
+ credentials=None,
+ client_config=None,
+ client_info=None,
+ client_options=None,
+ ):
+ """Constructor.
+
+ Args:
+ transport (Union[~.FirestoreGrpcTransport,
+ Callable[[~.Credentials, type], ~.FirestoreGrpcTransport]): A transport
+ instance, responsible for actually making the API calls.
+ The default transport uses the gRPC protocol.
+ This argument may also be a callable which returns a
+ transport instance. Callables will be sent the credentials
+ as the first argument and the default transport class as
+ the second argument.
+ channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
+ through which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is mutually exclusive with providing a
+ transport instance to ``transport``; doing so will raise
+ an exception.
+ client_config (dict): DEPRECATED. A dictionary of call options for
+ each method. If not specified, the default configuration is used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ client_options (Union[dict, google.api_core.client_options.ClientOptions]):
+ Client options used to set user options on the client. API Endpoint
+ should be set through client_options.
+ """
+ # Raise deprecation warnings for things we want to go away.
+ if client_config is not None:
+ warnings.warn(
+ "The `client_config` argument is deprecated.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ client_config = firestore_client_config.config
+
+ if channel:
+ warnings.warn(
+ "The `channel` argument is deprecated; use " "`transport` instead.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+
+ api_endpoint = self.SERVICE_ADDRESS
+ if client_options:
+ if type(client_options) == dict:
+ client_options = google.api_core.client_options.from_dict(
+ client_options
+ )
+ if client_options.api_endpoint:
+ api_endpoint = client_options.api_endpoint
+
+ # Instantiate the transport.
+ # The transport is responsible for handling serialization and
+ # deserialization and actually sending data to the service.
+ if transport:
+ if callable(transport):
+ self.transport = transport(
+ credentials=credentials,
+ default_class=firestore_grpc_transport.FirestoreGrpcTransport,
+ address=api_endpoint,
+ )
+ else:
+ if credentials:
+ raise ValueError(
+ "Received both a transport instance and "
+ "credentials; these are mutually exclusive."
+ )
+ self.transport = transport
+ else:
+ self.transport = firestore_grpc_transport.FirestoreGrpcTransport(
+ address=api_endpoint, channel=channel, credentials=credentials,
+ )
+
+ if client_info is None:
+ client_info = google.api_core.gapic_v1.client_info.ClientInfo(
+ gapic_version=_GAPIC_LIBRARY_VERSION,
+ )
+ else:
+ client_info.gapic_version = _GAPIC_LIBRARY_VERSION
+ self._client_info = client_info
+
+ # Parse out the default settings for retry and timeout for each RPC
+ # from the client configuration.
+ # (Ordinarily, these are the defaults specified in the `*_config.py`
+ # file next to this one.)
+ self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
+ client_config["interfaces"][self._INTERFACE_NAME],
+ )
+
+ # Save a dictionary of cached API call functions.
+ # These are the actual callables which invoke the proper
+ # transport methods, wrapped with `wrap_method` to add retry,
+ # timeout, and the like.
+ self._inner_api_calls = {}
+
+ # Service calls
+ def get_document(
+ self,
+ name,
+ mask=None,
+ transaction=None,
+ read_time=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets a single document.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `name`:
+ >>> name = ''
+ >>>
+ >>> response = client.get_document(name)
+
+ Args:
+ name (str): Required. The resource name of the Document to get. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If the document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.DocumentMask`
+ transaction (bytes): Reads the document in a transaction.
+ read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads the version of the document at the given time.
+ This may not be older than 270 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Timestamp`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1.types.Document` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "get_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "get_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.get_document,
+ default_retry=self._method_configs["GetDocument"].retry,
+ default_timeout=self._method_configs["GetDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction, read_time=read_time,
+ )
+
+ request = firestore_pb2.GetDocumentRequest(
+ name=name, mask=mask, transaction=transaction, read_time=read_time,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["get_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def list_documents(
+ self,
+ parent,
+ collection_id,
+ page_size=None,
+ order_by=None,
+ mask=None,
+ transaction=None,
+ read_time=None,
+ show_missing=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists documents.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # TODO: Initialize `collection_id`:
+ >>> collection_id = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.list_documents(parent, collection_id):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.list_documents(parent, collection_id).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example: ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
+ example: ``chatrooms`` or ``messages``.
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ order_by (str): The order to sort results by. For example: ``priority desc, name``.
+ mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If a document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.DocumentMask`
+ transaction (bytes): Reads documents in a transaction.
+ read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time.
+ This may not be older than 270 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Timestamp`
+ show_missing (bool): If the list should show missing documents. A missing document is a
+ document that does not exist but has sub-documents. These documents will
+ be returned with a key but will not have fields,
+ ``Document.create_time``, or ``Document.update_time`` set.
+
+ Requests with ``show_missing`` may not specify ``where`` or
+ ``order_by``.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`~google.cloud.firestore_v1.types.Document` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "list_documents" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "list_documents"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.list_documents,
+ default_retry=self._method_configs["ListDocuments"].retry,
+ default_timeout=self._method_configs["ListDocuments"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction, read_time=read_time,
+ )
+
+ request = firestore_pb2.ListDocumentsRequest(
+ parent=parent,
+ collection_id=collection_id,
+ page_size=page_size,
+ order_by=order_by,
+ mask=mask,
+ transaction=transaction,
+ read_time=read_time,
+ show_missing=show_missing,
+ )
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["list_documents"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="documents",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
+
+ def create_document(
+ self,
+ parent,
+ collection_id,
+ document_id,
+ document,
+ mask=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Creates a new document.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # TODO: Initialize `collection_id`:
+ >>> collection_id = ''
+ >>>
+ >>> # TODO: Initialize `document`:
+ >>> document = {}
+ >>>
+ >>> response = client.create_document(parent, collection_id, document)
+
+ Args:
+ parent (str): Required. The parent resource. For example:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}``
+ collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
+ example: ``chatrooms``.
+ document (Union[dict, ~google.cloud.firestore_v1.types.Document]): Required. The document to create. ``name`` must not be set.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Document`
+ document_id (str): The client-assigned document ID to use for this document.
+
+ Optional. If not specified, an ID will be assigned by the service.
+ mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If the document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.DocumentMask`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1.types.Document` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "create_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "create_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.create_document,
+ default_retry=self._method_configs["CreateDocument"].retry,
+ default_timeout=self._method_configs["CreateDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.CreateDocumentRequest(
+ parent=parent,
+ collection_id=collection_id,
+ document=document,
+ document_id=document_id,
+ mask=mask,
+ )
+ return self._inner_api_calls["create_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def update_document(
+ self,
+ document,
+ update_mask=None,
+ mask=None,
+ current_document=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Updates or inserts a document.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `document`:
+ >>> document = {}
+ >>>
+ >>> response = client.update_document(document)
+
+ Args:
+ document (Union[dict, ~google.cloud.firestore_v1.types.Document]): Required. The updated document.
+ Creates the document if it does not already exist.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Document`
+ update_mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to update.
+ None of the field paths in the mask may contain a reserved name.
+
+ If the document exists on the server and has fields not referenced in the
+ mask, they are left unchanged.
+ Fields referenced in the mask, but not present in the input document, are
+ deleted from the document on the server.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.DocumentMask`
+ mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If the document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.DocumentMask`
+ current_document (Union[dict, ~google.cloud.firestore_v1.types.Precondition]): An optional precondition on the document.
+ The request will fail if this is set and not met by the target document.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Precondition`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1.types.Document` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "update_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "update_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.update_document,
+ default_retry=self._method_configs["UpdateDocument"].retry,
+ default_timeout=self._method_configs["UpdateDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.UpdateDocumentRequest(
+ document=document,
+ update_mask=update_mask,
+ mask=mask,
+ current_document=current_document,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("document.name", document.name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["update_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def delete_document(
+ self,
+ name,
+ current_document=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Deletes a document.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `name`:
+ >>> name = ''
+ >>>
+ >>> client.delete_document(name)
+
+ Args:
+ name (str): Required. The resource name of the Document to delete. In the
+ format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ current_document (Union[dict, ~google.cloud.firestore_v1.types.Precondition]): An optional precondition on the document.
+ The request will fail if this is set and not met by the target document.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Precondition`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "delete_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "delete_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.delete_document,
+ default_retry=self._method_configs["DeleteDocument"].retry,
+ default_timeout=self._method_configs["DeleteDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.DeleteDocumentRequest(
+ name=name, current_document=current_document,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ self._inner_api_calls["delete_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def batch_get_documents(
+ self,
+ database,
+ documents=None,
+ mask=None,
+ transaction=None,
+ new_transaction=None,
+ read_time=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets multiple documents.
+
+ Documents returned by this method are not guaranteed to be returned in the
+ same order that they were requested.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> for element in client.batch_get_documents(database):
+ ... # process element
+ ... pass
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ documents (list[str]): The names of the documents to retrieve. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ The request will fail if any of the document is not a child resource of
+ the given ``database``. Duplicate names will be elided.
+ mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If a document has a field that is not present in this mask, that field will
+ not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.DocumentMask`
+ transaction (bytes): Reads documents in a transaction.
+ new_transaction (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): Starts a new transaction and reads the documents.
+ Defaults to a read-only transaction.
+ The new transaction ID will be returned as the first response in the
+ stream.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.TransactionOptions`
+ read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time.
+ This may not be older than 270 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Timestamp`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1.types.BatchGetDocumentsResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "batch_get_documents" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "batch_get_documents"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.batch_get_documents,
+ default_retry=self._method_configs["BatchGetDocuments"].retry,
+ default_timeout=self._method_configs["BatchGetDocuments"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+
+ request = firestore_pb2.BatchGetDocumentsRequest(
+ database=database,
+ documents=documents,
+ mask=mask,
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["batch_get_documents"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def batch_write(
+ self,
+ database,
+ writes=None,
+ labels=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Applies a batch of write operations.
+
+ The BatchWrite method does not apply the write operations atomically and
+ can apply them out of order. Method does not allow more than one write
+ per document. Each write succeeds or fails independently. See the
+ ``BatchWriteResponse`` for the success status of each write.
+
+ If you require an atomically applied set of writes, use ``Commit``
+ instead.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> response = client.batch_write(database)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ writes (list[Union[dict, ~google.cloud.firestore_v1.types.Write]]): The writes to apply.
+
+ Method does not apply writes atomically and does not guarantee ordering.
+ Each write succeeds or fails independently. You cannot write to the same
+ document more than once per request.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Write`
+ labels (dict[str -> str]): Labels associated with this batch write.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1.types.BatchWriteResponse` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "batch_write" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "batch_write"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.batch_write,
+ default_retry=self._method_configs["BatchWrite"].retry,
+ default_timeout=self._method_configs["BatchWrite"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.BatchWriteRequest(
+ database=database, writes=writes, labels=labels,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["batch_write"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def begin_transaction(
+ self,
+ database,
+ options_=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Starts a new transaction.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> response = client.begin_transaction(database)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ options_ (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): The options for the transaction.
+ Defaults to a read-write transaction.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.TransactionOptions`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1.types.BeginTransactionResponse` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "begin_transaction" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "begin_transaction"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.begin_transaction,
+ default_retry=self._method_configs["BeginTransaction"].retry,
+ default_timeout=self._method_configs["BeginTransaction"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.BeginTransactionRequest(
+ database=database, options=options_,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["begin_transaction"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def commit(
+ self,
+ database,
+ writes=None,
+ transaction=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Commits a transaction, while optionally updating documents.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> response = client.commit(database)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ writes (list[Union[dict, ~google.cloud.firestore_v1.types.Write]]): The writes to apply.
+
+ Always executed atomically and in order.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Write`
+ transaction (bytes): If set, applies all writes in this transaction, and commits it.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1.types.CommitResponse` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "commit" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "commit"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.commit,
+ default_retry=self._method_configs["Commit"].retry,
+ default_timeout=self._method_configs["Commit"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.CommitRequest(
+ database=database, writes=writes, transaction=transaction,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["commit"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def rollback(
+ self,
+ database,
+ transaction,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Rolls back a transaction.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> # TODO: Initialize `transaction`:
+ >>> transaction = b''
+ >>>
+ >>> client.rollback(database, transaction)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ transaction (bytes): Required. The transaction to roll back.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "rollback" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "rollback"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.rollback,
+ default_retry=self._method_configs["Rollback"].retry,
+ default_timeout=self._method_configs["Rollback"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.RollbackRequest(
+ database=database, transaction=transaction,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ self._inner_api_calls["rollback"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def run_query(
+ self,
+ parent,
+ structured_query=None,
+ transaction=None,
+ new_transaction=None,
+ read_time=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Runs a query.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> for element in client.run_query(parent):
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example: ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ structured_query (Union[dict, ~google.cloud.firestore_v1.types.StructuredQuery]): A structured query.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.StructuredQuery`
+ transaction (bytes): Reads documents in a transaction.
+ new_transaction (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): Starts a new transaction and reads the documents.
+ Defaults to a read-only transaction.
+ The new transaction ID will be returned as the first response in the
+ stream.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.TransactionOptions`
+ read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time.
+ This may not be older than 270 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.Timestamp`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1.types.RunQueryResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "run_query" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "run_query"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.run_query,
+ default_retry=self._method_configs["RunQuery"].retry,
+ default_timeout=self._method_configs["RunQuery"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query,)
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+
+ request = firestore_pb2.RunQueryRequest(
+ parent=parent,
+ structured_query=structured_query,
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["run_query"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def write(
+ self,
+ requests,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Streams batches of document updates and deletes, in order.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>> request = {'database': database}
+ >>>
+ >>> requests = [request]
+ >>> for element in client.write(requests):
+ ... # process element
+ ... pass
+
+ Args:
+ requests (iterator[dict|google.cloud.firestore_v1.proto.firestore_pb2.WriteRequest]): The input objects. If a dict is provided, it must be of the
+ same form as the protobuf message :class:`~google.cloud.firestore_v1.types.WriteRequest`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1.types.WriteResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "write" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "write"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.write,
+ default_retry=self._method_configs["Write"].retry,
+ default_timeout=self._method_configs["Write"].timeout,
+ client_info=self._client_info,
+ )
+
+ return self._inner_api_calls["write"](
+ requests, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def listen(
+ self,
+ requests,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Listens to changes.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>> request = {'database': database}
+ >>>
+ >>> requests = [request]
+ >>> for element in client.listen(requests):
+ ... # process element
+ ... pass
+
+ Args:
+ requests (iterator[dict|google.cloud.firestore_v1.proto.firestore_pb2.ListenRequest]): The input objects. If a dict is provided, it must be of the
+ same form as the protobuf message :class:`~google.cloud.firestore_v1.types.ListenRequest`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1.types.ListenResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "listen" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "listen"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.listen,
+ default_retry=self._method_configs["Listen"].retry,
+ default_timeout=self._method_configs["Listen"].timeout,
+ client_info=self._client_info,
+ )
+
+ return self._inner_api_calls["listen"](
+ requests, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def list_collection_ids(
+ self,
+ parent,
+ page_size=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists all the collection IDs underneath a document.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.list_collection_ids(parent):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.list_collection_ids(parent).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent document. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`str` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "list_collection_ids" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "list_collection_ids"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.list_collection_ids,
+ default_retry=self._method_configs["ListCollectionIds"].retry,
+ default_timeout=self._method_configs["ListCollectionIds"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.ListCollectionIdsRequest(
+ parent=parent, page_size=page_size,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["list_collection_ids"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="collection_ids",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
+
+ def partition_query(
+ self,
+ parent,
+ structured_query=None,
+ partition_count=None,
+ page_size=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Partitions a query by returning partition cursors that can be used to run
+ the query in parallel. The returned partition cursors are split points that
+ can be used by RunQuery as starting/end points for the query results.
+
+ Example:
+ >>> from google.cloud import firestore_v1
+ >>>
+ >>> client = firestore_v1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.partition_query(parent):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.partition_query(parent).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents``. Document
+ resource names are not supported; only database resource names can be
+ specified.
+ structured_query (Union[dict, ~google.cloud.firestore_v1.types.StructuredQuery]): A structured query.
+ Filters, order bys, limits, offsets, and start/end cursors are not
+ supported.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1.types.StructuredQuery`
+ partition_count (long): The desired maximum number of partition points.
+ The partitions may be returned across multiple pages of results.
+ The number must be strictly positive. The actual number of partitions
+ returned may be fewer.
+
+ For example, this may be set to one fewer than the number of parallel
+ queries to be run, or in running a data pipeline job, one fewer than the
+ number of workers or compute instances available.
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`~google.cloud.firestore_v1.types.Cursor` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "partition_query" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "partition_query"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.partition_query,
+ default_retry=self._method_configs["PartitionQuery"].retry,
+ default_timeout=self._method_configs["PartitionQuery"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query,)
+
+ request = firestore_pb2.PartitionQueryRequest(
+ parent=parent,
+ structured_query=structured_query,
+ partition_count=partition_count,
+ page_size=page_size,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["partition_query"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="partitions",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/firestore_client_config.py b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/firestore_client_config.py
new file mode 100644
index 000000000..95cdc8cd3
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/firestore_client_config.py
@@ -0,0 +1,109 @@
+config = {
+ "interfaces": {
+ "google.firestore.v1.Firestore": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
+ "aborted_unavailable": ["ABORTED", "UNAVAILABLE"],
+ "non_idempotent": [],
+ "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 60000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 60000,
+ "total_timeout_millis": 600000,
+ },
+ "streaming": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 60000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 60000,
+ "total_timeout_millis": 600000,
+ },
+ },
+ "methods": {
+ "GetDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent2",
+ "retry_params_name": "default",
+ },
+ "ListDocuments": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent2",
+ "retry_params_name": "default",
+ },
+ "CreateDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "UpdateDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "DeleteDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "BatchGetDocuments": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "streaming",
+ },
+ "BatchWrite": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "aborted_unavailable",
+ "retry_params_name": "default",
+ },
+ "BeginTransaction": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "Commit": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "Rollback": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "RunQuery": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "streaming",
+ },
+ "Write": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "streaming",
+ },
+ "Listen": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "streaming",
+ },
+ "ListCollectionIds": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "PartitionQuery": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ },
+ }
+ }
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..6f9581c06
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc
new file mode 100644
index 000000000..d3fd0a2e7
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py
new file mode 100644
index 000000000..13cfc84e8
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py
@@ -0,0 +1,319 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import google.api_core.grpc_helpers
+
+from google.cloud.firestore_v1.proto import firestore_pb2_grpc
+
+
+class FirestoreGrpcTransport(object):
+ """gRPC transport class providing stubs for
+ google.firestore.v1 Firestore API.
+
+ The transport provides access to the raw gRPC stubs,
+ which can be used to take advantage of advanced
+ features of gRPC.
+ """
+
+ # The scopes needed to make gRPC calls to all of the methods defined
+ # in this service.
+ _OAUTH_SCOPES = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+
+ def __init__(
+ self, channel=None, credentials=None, address="firestore.googleapis.com:443"
+ ):
+ """Instantiate the transport class.
+
+ Args:
+ channel (grpc.Channel): A ``Channel`` instance through
+ which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ address (str): The address where the service is hosted.
+ """
+ # If both `channel` and `credentials` are specified, raise an
+ # exception (channels come with credentials baked in already).
+ if channel is not None and credentials is not None:
+ raise ValueError(
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
+ )
+
+ # Create the channel.
+ if channel is None:
+ channel = self.create_channel(
+ address=address,
+ credentials=credentials,
+ options={
+ "grpc.max_send_message_length": -1,
+ "grpc.max_receive_message_length": -1,
+ }.items(),
+ )
+
+ self._channel = channel
+
+ # gRPC uses objects called "stubs" that are bound to the
+ # channel and provide a basic method for each RPC.
+ self._stubs = {
+ "firestore_stub": firestore_pb2_grpc.FirestoreStub(channel),
+ }
+
+ @classmethod
+ def create_channel(
+ cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
+ ):
+ """Create and return a gRPC channel object.
+
+ Args:
+ address (str): The host for the channel to use.
+ credentials (~.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ kwargs (dict): Keyword arguments, which are passed to the
+ channel creation.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return google.api_core.grpc_helpers.create_channel(
+ address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
+ )
+
+ @property
+ def channel(self):
+ """The gRPC channel used by the transport.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return self._channel
+
+ @property
+ def get_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.get_document`.
+
+ Gets a single document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].GetDocument
+
+ @property
+ def list_documents(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.list_documents`.
+
+ Lists documents.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].ListDocuments
+
+ @property
+ def create_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.create_document`.
+
+ Creates a new document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].CreateDocument
+
+ @property
+ def update_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.update_document`.
+
+ Updates or inserts a document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].UpdateDocument
+
+ @property
+ def delete_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.delete_document`.
+
+ Deletes a document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].DeleteDocument
+
+ @property
+ def batch_get_documents(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.batch_get_documents`.
+
+ Gets multiple documents.
+
+ Documents returned by this method are not guaranteed to be returned in the
+ same order that they were requested.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].BatchGetDocuments
+
+ @property
+ def batch_write(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.batch_write`.
+
+ Applies a batch of write operations.
+
+ The BatchWrite method does not apply the write operations atomically and
+ can apply them out of order. Method does not allow more than one write
+ per document. Each write succeeds or fails independently. See the
+ ``BatchWriteResponse`` for the success status of each write.
+
+ If you require an atomically applied set of writes, use ``Commit``
+ instead.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].BatchWrite
+
+ @property
+ def begin_transaction(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.begin_transaction`.
+
+ Starts a new transaction.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].BeginTransaction
+
+ @property
+ def commit(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.commit`.
+
+ Commits a transaction, while optionally updating documents.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Commit
+
+ @property
+ def rollback(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.rollback`.
+
+ Rolls back a transaction.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Rollback
+
+ @property
+ def run_query(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.run_query`.
+
+ Runs a query.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].RunQuery
+
+ @property
+ def write(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.write`.
+
+ Streams batches of document updates and deletes, in order.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Write
+
+ @property
+ def listen(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.listen`.
+
+ Listens to changes.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Listen
+
+ @property
+ def list_collection_ids(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.list_collection_ids`.
+
+ Lists all the collection IDs underneath a document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].ListCollectionIds
+
+ @property
+ def partition_query(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.partition_query`.
+
+ Partitions a query by returning partition cursors that can be used to run
+ the query in parallel. The returned partition cursors are split points that
+ can be used by RunQuery as starting/end points for the query results.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].PartitionQuery
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/order.py b/venv/Lib/site-packages/google/cloud/firestore_v1/order.py
new file mode 100644
index 000000000..d70293a36
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/order.py
@@ -0,0 +1,207 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import Enum
+from google.cloud.firestore_v1._helpers import decode_value
+import math
+
+
+class TypeOrder(Enum):
+ # NOTE: This order is defined by the backend and cannot be changed.
+ NULL = 0
+ BOOLEAN = 1
+ NUMBER = 2
+ TIMESTAMP = 3
+ STRING = 4
+ BLOB = 5
+ REF = 6
+ GEO_POINT = 7
+ ARRAY = 8
+ OBJECT = 9
+
+ @staticmethod
+ def from_value(value):
+ v = value.WhichOneof("value_type")
+
+ lut = {
+ "null_value": TypeOrder.NULL,
+ "boolean_value": TypeOrder.BOOLEAN,
+ "integer_value": TypeOrder.NUMBER,
+ "double_value": TypeOrder.NUMBER,
+ "timestamp_value": TypeOrder.TIMESTAMP,
+ "string_value": TypeOrder.STRING,
+ "bytes_value": TypeOrder.BLOB,
+ "reference_value": TypeOrder.REF,
+ "geo_point_value": TypeOrder.GEO_POINT,
+ "array_value": TypeOrder.ARRAY,
+ "map_value": TypeOrder.OBJECT,
+ }
+
+ if v not in lut:
+ raise ValueError("Could not detect value type for " + v)
+ return lut[v]
+
+
+class Order(object):
+ """
+ Order implements the ordering semantics of the backend.
+ """
+
+ @classmethod
+ def compare(cls, left, right):
+ """
+ Main comparison function for all Firestore types.
+ @return -1 is left < right, 0 if left == right, otherwise 1
+ """
+ # First compare the types.
+ leftType = TypeOrder.from_value(left).value
+ rightType = TypeOrder.from_value(right).value
+
+ if leftType != rightType:
+ if leftType < rightType:
+ return -1
+ return 1
+
+ value_type = left.WhichOneof("value_type")
+
+ if value_type == "null_value":
+ return 0 # nulls are all equal
+ elif value_type == "boolean_value":
+ return cls._compare_to(left.boolean_value, right.boolean_value)
+ elif value_type == "integer_value":
+ return cls.compare_numbers(left, right)
+ elif value_type == "double_value":
+ return cls.compare_numbers(left, right)
+ elif value_type == "timestamp_value":
+ return cls.compare_timestamps(left, right)
+ elif value_type == "string_value":
+ return cls._compare_to(left.string_value, right.string_value)
+ elif value_type == "bytes_value":
+ return cls.compare_blobs(left, right)
+ elif value_type == "reference_value":
+ return cls.compare_resource_paths(left, right)
+ elif value_type == "geo_point_value":
+ return cls.compare_geo_points(left, right)
+ elif value_type == "array_value":
+ return cls.compare_arrays(left, right)
+ elif value_type == "map_value":
+ return cls.compare_objects(left, right)
+ else:
+ raise ValueError("Unknown ``value_type``", str(value_type))
+
+ @staticmethod
+ def compare_blobs(left, right):
+ left_bytes = left.bytes_value
+ right_bytes = right.bytes_value
+
+ return Order._compare_to(left_bytes, right_bytes)
+
+ @staticmethod
+ def compare_timestamps(left, right):
+ left = left.timestamp_value
+ right = right.timestamp_value
+
+ seconds = Order._compare_to(left.seconds or 0, right.seconds or 0)
+ if seconds != 0:
+ return seconds
+
+ return Order._compare_to(left.nanos or 0, right.nanos or 0)
+
+ @staticmethod
+ def compare_geo_points(left, right):
+ left_value = decode_value(left, None)
+ right_value = decode_value(right, None)
+ cmp = (left_value.latitude > right_value.latitude) - (
+ left_value.latitude < right_value.latitude
+ )
+
+ if cmp != 0:
+ return cmp
+ return (left_value.longitude > right_value.longitude) - (
+ left_value.longitude < right_value.longitude
+ )
+
+ @staticmethod
+ def compare_resource_paths(left, right):
+ left = left.reference_value
+ right = right.reference_value
+
+ left_segments = left.split("/")
+ right_segments = right.split("/")
+ shorter = min(len(left_segments), len(right_segments))
+ # compare segments
+ for i in range(shorter):
+ if left_segments[i] < right_segments[i]:
+ return -1
+ if left_segments[i] > right_segments[i]:
+ return 1
+
+ left_length = len(left)
+ right_length = len(right)
+ return (left_length > right_length) - (left_length < right_length)
+
+ @staticmethod
+ def compare_arrays(left, right):
+ l_values = left.array_value.values
+ r_values = right.array_value.values
+
+ length = min(len(l_values), len(r_values))
+ for i in range(length):
+ cmp = Order.compare(l_values[i], r_values[i])
+ if cmp != 0:
+ return cmp
+
+ return Order._compare_to(len(l_values), len(r_values))
+
+ @staticmethod
+ def compare_objects(left, right):
+ left_fields = left.map_value.fields
+ right_fields = right.map_value.fields
+
+ for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)):
+ keyCompare = Order._compare_to(left_key, right_key)
+ if keyCompare != 0:
+ return keyCompare
+
+ value_compare = Order.compare(
+ left_fields[left_key], right_fields[right_key]
+ )
+ if value_compare != 0:
+ return value_compare
+
+ return Order._compare_to(len(left_fields), len(right_fields))
+
+ @staticmethod
+ def compare_numbers(left, right):
+ left_value = decode_value(left, None)
+ right_value = decode_value(right, None)
+ return Order.compare_doubles(left_value, right_value)
+
+ @staticmethod
+ def compare_doubles(left, right):
+ if math.isnan(left):
+ if math.isnan(right):
+ return 0
+ return -1
+ if math.isnan(right):
+ return 1
+
+ return Order._compare_to(left, right)
+
+ @staticmethod
+ def _compare_to(left, right):
+ # We can't just use cmp(left, right) because cmp doesn't exist
+ # in Python 3, so this is an equivalent suggested by
+ # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
+ return (left > right) - (left < right)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..590c5a56b
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/common_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/common_pb2.cpython-36.pyc
new file mode 100644
index 000000000..5a8ab7835
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/common_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..75c59c6e7
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/document_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/document_pb2.cpython-36.pyc
new file mode 100644
index 000000000..dfbdebaf2
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/document_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..253a1bb07
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/firestore_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/firestore_pb2.cpython-36.pyc
new file mode 100644
index 000000000..7702c8fac
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/firestore_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..9013a075f
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/query_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/query_pb2.cpython-36.pyc
new file mode 100644
index 000000000..bc9fbe024
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/query_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..4096ffbfa
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/test_v1_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/test_v1_pb2.cpython-36.pyc
new file mode 100644
index 000000000..50320e307
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/test_v1_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/tests_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/tests_pb2.cpython-36.pyc
new file mode 100644
index 000000000..df8f5d746
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/tests_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/write_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/write_pb2.cpython-36.pyc
new file mode 100644
index 000000000..5e9bc4d26
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/write_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..032cf0f69
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/common.proto b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/common.proto
new file mode 100644
index 000000000..4367f168d
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/common.proto
@@ -0,0 +1,83 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.firestore.v1;
+
+import "google/protobuf/timestamp.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "CommonProto";
+option java_package = "com.google.firestore.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1";
+option ruby_package = "Google::Cloud::Firestore::V1";
+
+// A set of field paths on a document.
+// Used to restrict a get or update operation on a document to a subset of its
+// fields.
+// This is different from standard field masks, as this is always scoped to a
+// [Document][google.firestore.v1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1.Value].
+message DocumentMask {
+ // The list of field paths in the mask. See [Document.fields][google.firestore.v1.Document.fields] for a field
+ // path syntax reference.
+ repeated string field_paths = 1;
+}
+
+// A precondition on a document, used for conditional operations.
+message Precondition {
+ // The type of precondition.
+ oneof condition_type {
+ // When set to `true`, the target document must exist.
+ // When set to `false`, the target document must not exist.
+ bool exists = 1;
+
+ // When set, the target document must exist and have been last updated at
+ // that time.
+ google.protobuf.Timestamp update_time = 2;
+ }
+}
+
+// Options for creating a new transaction.
+message TransactionOptions {
+ // Options for a transaction that can be used to read and write documents.
+ message ReadWrite {
+ // An optional transaction to retry.
+ bytes retry_transaction = 1;
+ }
+
+ // Options for a transaction that can only be used to read documents.
+ message ReadOnly {
+ // The consistency mode for this transaction. If not set, defaults to strong
+ // consistency.
+ oneof consistency_selector {
+ // Reads documents at the given time.
+ // This may not be older than 60 seconds.
+ google.protobuf.Timestamp read_time = 2;
+ }
+ }
+
+ // The mode of the transaction.
+ oneof mode {
+ // The transaction can only be used for read operations.
+ ReadOnly read_only = 2;
+
+ // The transaction can be used for both read and write operations.
+ ReadWrite read_write = 3;
+ }
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/common_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/common_pb2.py
new file mode 100644
index 000000000..7aea997da
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/common_pb2.py
@@ -0,0 +1,456 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1/proto/common.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1/proto/common.proto",
+ package="google.firestore.v1",
+ syntax="proto3",
+ serialized_options=b"\n\027com.google.firestore.v1B\013CommonProtoP\001Z fields = 2;
+
+ // Output only. The time at which the document was created.
+ //
+ // This value increases monotonically when a document is deleted then
+ // recreated. It can also be compared to values from other documents and
+ // the `read_time` of a query.
+ google.protobuf.Timestamp create_time = 3;
+
+ // Output only. The time at which the document was last changed.
+ //
+ // This value is initially set to the `create_time` then increases
+ // monotonically with each change to the document. It can also be
+ // compared to values from other documents and the `read_time` of a query.
+ google.protobuf.Timestamp update_time = 4;
+}
+
+// A message that can hold any of the supported value types.
+message Value {
+ // Must have a value set.
+ oneof value_type {
+ // A null value.
+ google.protobuf.NullValue null_value = 11;
+
+ // A boolean value.
+ bool boolean_value = 1;
+
+ // An integer value.
+ int64 integer_value = 2;
+
+ // A double value.
+ double double_value = 3;
+
+ // A timestamp value.
+ //
+ // Precise only to microseconds. When stored, any additional precision is
+ // rounded down.
+ google.protobuf.Timestamp timestamp_value = 10;
+
+ // A string value.
+ //
+ // The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes.
+ // Only the first 1,500 bytes of the UTF-8 representation are considered by
+ // queries.
+ string string_value = 17;
+
+ // A bytes value.
+ //
+ // Must not exceed 1 MiB - 89 bytes.
+ // Only the first 1,500 bytes are considered by queries.
+ bytes bytes_value = 18;
+
+ // A reference to a document. For example:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string reference_value = 5;
+
+ // A geo point value representing a point on the surface of Earth.
+ google.type.LatLng geo_point_value = 8;
+
+ // An array value.
+ //
+ // Cannot directly contain another array value, though can contain an
+ // map which contains another array.
+ ArrayValue array_value = 9;
+
+ // A map value.
+ MapValue map_value = 6;
+ }
+}
+
+// An array value.
+message ArrayValue {
+ // Values in the array.
+ repeated Value values = 1;
+}
+
+// A map value.
+message MapValue {
+ // The map's fields.
+ //
+ // The map keys represent field names. Field names matching the regular
+ // expression `__.*__` are reserved. Reserved field names are forbidden except
+ // in certain documented contexts. The map keys, represented as UTF-8, must
+ // not exceed 1,500 bytes and cannot be empty.
+ map fields = 1;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/document_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/document_pb2.py
new file mode 100644
index 000000000..d51e1a636
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/document_pb2.py
@@ -0,0 +1,815 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1/proto/document.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1/proto/document.proto",
+ package="google.firestore.v1",
+ syntax="proto3",
+ serialized_options=b"\n\027com.google.firestore.v1B\rDocumentProtoP\001Z labels = 5;
+}
+
+// The response for [Firestore.Write][google.firestore.v1.Firestore.Write].
+message WriteResponse {
+ // The ID of the stream.
+ // Only set on the first message, when a new stream was created.
+ string stream_id = 1;
+
+ // A token that represents the position of this response in the stream.
+ // This can be used by a client to resume the stream at this point.
+ //
+ // This field is always set.
+ bytes stream_token = 2;
+
+ // The result of applying the writes.
+ //
+ // This i-th write result corresponds to the i-th write in the
+ // request.
+ repeated WriteResult write_results = 3;
+
+ // The time at which the commit occurred. Any read with an equal or greater
+ // `read_time` is guaranteed to see the effects of the write.
+ google.protobuf.Timestamp commit_time = 4;
+}
+
+// A request for [Firestore.Listen][google.firestore.v1.Firestore.Listen]
+message ListenRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The supported target changes.
+ oneof target_change {
+ // A target to add to this stream.
+ Target add_target = 2;
+
+ // The ID of a target to remove from this stream.
+ int32 remove_target = 3;
+ }
+
+ // Labels associated with this target change.
+ map labels = 4;
+}
+
+// The response for [Firestore.Listen][google.firestore.v1.Firestore.Listen].
+message ListenResponse {
+ // The supported responses.
+ oneof response_type {
+ // Targets have changed.
+ TargetChange target_change = 2;
+
+ // A [Document][google.firestore.v1.Document] has changed.
+ DocumentChange document_change = 3;
+
+ // A [Document][google.firestore.v1.Document] has been deleted.
+ DocumentDelete document_delete = 4;
+
+ // A [Document][google.firestore.v1.Document] has been removed from a target (because it is no longer
+ // relevant to that target).
+ DocumentRemove document_remove = 6;
+
+ // A filter to apply to the set of documents previously returned for the
+ // given target.
+ //
+ // Returned when documents may have been removed from the given target, but
+ // the exact documents are unknown.
+ ExistenceFilter filter = 5;
+ }
+}
+
+// A specification of a set of documents to listen to.
+message Target {
+ // A target specified by a set of documents names.
+ message DocumentsTarget {
+ // The names of the documents to retrieve. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // The request will fail if any of the document is not a child resource of
+ // the given `database`. Duplicate names will be elided.
+ repeated string documents = 2;
+ }
+
+ // A target specified by a query.
+ message QueryTarget {
+ // The parent resource name. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents` or
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // For example:
+ // `projects/my-project/databases/my-database/documents` or
+ // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
+ string parent = 1;
+
+ // The query to run.
+ oneof query_type {
+ // A structured query.
+ StructuredQuery structured_query = 2;
+ }
+ }
+
+ // The type of target to listen to.
+ oneof target_type {
+ // A target specified by a query.
+ QueryTarget query = 2;
+
+ // A target specified by a set of document names.
+ DocumentsTarget documents = 3;
+ }
+
+ // When to start listening.
+ //
+ // If not specified, all matching Documents are returned before any
+ // subsequent changes.
+ oneof resume_type {
+ // A resume token from a prior [TargetChange][google.firestore.v1.TargetChange] for an identical target.
+ //
+ // Using a resume token with a different target is unsupported and may fail.
+ bytes resume_token = 4;
+
+ // Start listening after a specific `read_time`.
+ //
+ // The client must know the state of matching documents at this time.
+ google.protobuf.Timestamp read_time = 11;
+ }
+
+ // The target ID that identifies the target on the stream. Must be a positive
+ // number and non-zero.
+ int32 target_id = 5;
+
+ // If the target should be removed once it is current and consistent.
+ bool once = 6;
+}
+
+// Targets being watched have changed.
+message TargetChange {
+ // The type of change.
+ enum TargetChangeType {
+ // No change has occurred. Used only to send an updated `resume_token`.
+ NO_CHANGE = 0;
+
+ // The targets have been added.
+ ADD = 1;
+
+ // The targets have been removed.
+ REMOVE = 2;
+
+ // The targets reflect all changes committed before the targets were added
+ // to the stream.
+ //
+ // This will be sent after or with a `read_time` that is greater than or
+ // equal to the time at which the targets were added.
+ //
+ // Listeners can wait for this change if read-after-write semantics
+ // are desired.
+ CURRENT = 3;
+
+ // The targets have been reset, and a new initial state for the targets
+ // will be returned in subsequent changes.
+ //
+ // After the initial state is complete, `CURRENT` will be returned even
+ // if the target was previously indicated to be `CURRENT`.
+ RESET = 4;
+ }
+
+ // The type of change that occurred.
+ TargetChangeType target_change_type = 1;
+
+ // The target IDs of targets that have changed.
+ //
+ // If empty, the change applies to all targets.
+ //
+ // The order of the target IDs is not defined.
+ repeated int32 target_ids = 2;
+
+ // The error that resulted in this change, if applicable.
+ google.rpc.Status cause = 3;
+
+ // A token that can be used to resume the stream for the given `target_ids`,
+ // or all targets if `target_ids` is empty.
+ //
+ // Not set on every target change.
+ bytes resume_token = 4;
+
+ // The consistent `read_time` for the given `target_ids` (omitted when the
+ // target_ids are not at a consistent snapshot).
+ //
+ // The stream is guaranteed to send a `read_time` with `target_ids` empty
+ // whenever the entire stream reaches a new consistent snapshot. ADD,
+ // CURRENT, and RESET messages are guaranteed to (eventually) result in a
+ // new consistent snapshot (while NO_CHANGE and REMOVE messages are not).
+ //
+ // For a given stream, `read_time` is guaranteed to be monotonically
+ // increasing.
+ google.protobuf.Timestamp read_time = 6;
+}
+
+// The request for [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+message ListCollectionIdsRequest {
+ // Required. The parent document. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // For example:
+ // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
+ string parent = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The maximum number of results to return.
+ int32 page_size = 2;
+
+ // A page token. Must be a value from
+ // [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse].
+ string page_token = 3;
+}
+
+// The response from [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+message ListCollectionIdsResponse {
+ // The collection ids.
+ repeated string collection_ids = 1;
+
+ // A page token that may be used to continue the list.
+ string next_page_token = 2;
+}
+
+// The request for [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+message BatchWriteRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The writes to apply.
+ //
+ // Method does not apply writes atomically and does not guarantee ordering.
+ // Each write succeeds or fails independently. You cannot write to the same
+ // document more than once per request.
+ repeated Write writes = 2;
+
+ // Labels associated with this batch write.
+ map labels = 3;
+}
+
+// The response from [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+message BatchWriteResponse {
+ // The result of applying the writes.
+ //
+ // This i-th write result corresponds to the i-th write in the
+ // request.
+ repeated WriteResult write_results = 1;
+
+ // The status of applying the writes.
+ //
+ // This i-th write status corresponds to the i-th write in the
+ // request.
+ repeated google.rpc.Status status = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/firestore_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/firestore_pb2.py
new file mode 100644
index 000000000..c4e854268
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/firestore_pb2.py
@@ -0,0 +1,4500 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1/proto/firestore.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.api import client_pb2 as google_dot_api_dot_client__pb2
+from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
+from google.cloud.firestore_v1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ write_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_write__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1/proto/firestore.proto",
+ package="google.firestore.v1",
+ syntax="proto3",
+ serialized_options=b"\n\027com.google.firestore.v1B\016FirestoreProtoP\001Z\n\x06labels\x18\x04 \x03(\x0b\x32..google.firestore.v1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xd5\x02\n\x0eListenResponse\x12:\n\rtarget_change\x18\x02 \x01(\x0b\x32!.google.firestore.v1.TargetChangeH\x00\x12>\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32#.google.firestore.v1.DocumentChangeH\x00\x12>\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32#.google.firestore.v1.DocumentDeleteH\x00\x12>\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32#.google.firestore.v1.DocumentRemoveH\x00\x12\x36\n\x06\x66ilter\x18\x05 \x01(\x0b\x32$.google.firestore.v1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xa1\x03\n\x06Target\x12\x38\n\x05query\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.Target.QueryTargetH\x00\x12@\n\tdocuments\x18\x03 \x01(\x0b\x32+.google.firestore.v1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1am\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x10structured_query\x18\x02 \x01(\x0b\x32$.google.firestore.v1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaa\x02\n\x0cTargetChange\x12N\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x32.google.firestore.v1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"V\n\x18ListCollectionIdsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xc9\x01\n\x11\x42\x61tchWriteRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12*\n\x06writes\x18\x02 \x03(\x0b\x32\x1a.google.firestore.v1.Write\x12\x42\n\x06labels\x18\x03 \x03(\x0b\x32\x32.google.firestore.v1.BatchWriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"q\n\x12\x42\x61tchWriteResponse\x12\x37\n\rwrite_results\x18\x01 \x03(\x0b\x32 .google.firestore.v1.WriteResult\x12"\n\x06status\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status2\xfd\x16\n\tFirestore\x12\x8f\x01\n\x0bGetDocument\x12\'.google.firestore.v1.GetDocumentRequest\x1a\x1d.google.firestore.v1.Document"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb2\x01\n\rListDocuments\x12).google.firestore.v1.ListDocumentsRequest\x1a*.google.firestore.v1.ListDocumentsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbf\x01\n\x0eUpdateDocument\x12*.google.firestore.v1.UpdateDocumentRequest\x1a\x1d.google.firestore.v1.Document"b\x82\xd3\xe4\x93\x02\x45\x32\x39/v1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\xda\x41\x14\x64ocument,update_mask\x12\x95\x01\n\x0e\x44\x65leteDocument\x12*.google.firestore.v1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"?\x82\xd3\xe4\x93\x02\x32*0/v1/{name=projects/*/databases/*/documents/*/**}\xda\x41\x04name\x12\xb9\x01\n\x11\x42\x61tchGetDocuments\x12-.google.firestore.v1.BatchGetDocumentsRequest\x1a..google.firestore.v1.BatchGetDocumentsResponse"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xc7\x01\n\x10\x42\x65ginTransaction\x12,.google.firestore.v1.BeginTransactionRequest\x1a-.google.firestore.v1.BeginTransactionResponse"V\x82\xd3\xe4\x93\x02\x45"@/v1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\xda\x41\x08\x64\x61tabase\x12\xa6\x01\n\x06\x43ommit\x12".google.firestore.v1.CommitRequest\x1a#.google.firestore.v1.CommitResponse"S\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:commit:\x01*\xda\x41\x0f\x64\x61tabase,writes\x12\xa4\x01\n\x08Rollback\x12$.google.firestore.v1.RollbackRequest\x1a\x16.google.protobuf.Empty"Z\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:rollback:\x01*\xda\x41\x14\x64\x61tabase,transaction\x12\xdf\x01\n\x08RunQuery\x12$.google.firestore.v1.RunQueryRequest\x1a%.google.firestore.v1.RunQueryResponse"\x83\x01\x82\xd3\xe4\x93\x02}"6/v1/{parent=projects/*/databases/*/documents}:runQuery:\x01*Z@";/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xfc\x01\n\x0ePartitionQuery\x12*.google.firestore.v1.PartitionQueryRequest\x1a+.google.firestore.v1.PartitionQueryResponse"\x90\x01\x82\xd3\xe4\x93\x02\x89\x01" 1` becomes
+ // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`
+ repeated Order order_by = 4;
+
+ // A starting point for the query results.
+ Cursor start_at = 7;
+
+ // A end point for the query results.
+ Cursor end_at = 8;
+
+ // The number of results to skip.
+ //
+ // Applies before limit, but after all other constraints. Must be >= 0 if
+ // specified.
+ int32 offset = 6;
+
+ // The maximum number of results to return.
+ //
+ // Applies after all other constraints.
+ // Must be >= 0 if specified.
+ google.protobuf.Int32Value limit = 5;
+}
+
+// A position in a query result set.
+message Cursor {
+ // The values that represent a position, in the order they appear in
+ // the order by clause of a query.
+ //
+ // Can contain fewer values than specified in the order by clause.
+ repeated Value values = 1;
+
+ // If the position is just before or just after the given values, relative
+ // to the sort order defined by the query.
+ bool before = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/query_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/query_pb2.py
new file mode 100644
index 000000000..37d568225
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/query_pb2.py
@@ -0,0 +1,1280 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1/proto/query.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
+)
+from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1/proto/query.proto",
+ package="google.firestore.v1",
+ syntax="proto3",
+ serialized_options=b"\n\027com.google.firestore.v1B\nQueryProtoP\001Z 1``
+ becomes ``SELECT * FROM Foo WHERE A > 1 ORDER BY A,
+ __name__``
+ start_at:
+ A starting point for the query results.
+ end_at:
+ A end point for the query results.
+ offset:
+ The number of results to skip. Applies before limit, but
+ after all other constraints. Must be >= 0 if specified.
+ limit:
+ The maximum number of results to return. Applies after all
+ other constraints. Must be >= 0 if specified.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1.StructuredQuery)
+ },
+)
+_sym_db.RegisterMessage(StructuredQuery)
+_sym_db.RegisterMessage(StructuredQuery.CollectionSelector)
+_sym_db.RegisterMessage(StructuredQuery.Filter)
+_sym_db.RegisterMessage(StructuredQuery.CompositeFilter)
+_sym_db.RegisterMessage(StructuredQuery.FieldFilter)
+_sym_db.RegisterMessage(StructuredQuery.UnaryFilter)
+_sym_db.RegisterMessage(StructuredQuery.FieldReference)
+_sym_db.RegisterMessage(StructuredQuery.Projection)
+_sym_db.RegisterMessage(StructuredQuery.Order)
+
+Cursor = _reflection.GeneratedProtocolMessageType(
+ "Cursor",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CURSOR,
+ "__module__": "google.cloud.firestore_v1.proto.query_pb2",
+ "__doc__": """A position in a query result set.
+
+ Attributes:
+ values:
+ The values that represent a position, in the order they appear
+ in the order by clause of a query. Can contain fewer values
+ than specified in the order by clause.
+ before:
+ If the position is just before or just after the given values,
+ relative to the sort order defined by the query.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1.Cursor)
+ },
+)
+_sym_db.RegisterMessage(Cursor)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/query_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/query_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/query_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/test_v1_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/test_v1_pb2.py
new file mode 100644
index 000000000..336bab948
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/test_v1_pb2.py
@@ -0,0 +1,2190 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: test_v1.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ firestore_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
+)
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="test_v1.proto",
+ package="tests.v1",
+ syntax="proto3",
+ serialized_pb=_b(
+ '\n\rtest_v1.proto\x12\x08tests.v1\x1a,google/cloud/firestore_v1/proto/common.proto\x1a.google/cloud/firestore_v1/proto/document.proto\x1a/google/cloud/firestore_v1/proto/firestore.proto\x1a+google/cloud/firestore_v1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"*\n\tTestSuite\x12\x1d\n\x05tests\x18\x01 \x03(\x0b\x32\x0e.tests.v1.Test"\xe0\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12 \n\x03get\x18\x02 \x01(\x0b\x32\x11.tests.v1.GetTestH\x00\x12&\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x14.tests.v1.CreateTestH\x00\x12 \n\x03set\x18\x04 \x01(\x0b\x32\x11.tests.v1.SetTestH\x00\x12&\n\x06update\x18\x05 \x01(\x0b\x32\x14.tests.v1.UpdateTestH\x00\x12\x31\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x19.tests.v1.UpdatePathsTestH\x00\x12&\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x14.tests.v1.DeleteTestH\x00\x12$\n\x05query\x18\x08 \x01(\x0b\x32\x13.tests.v1.QueryTestH\x00\x12&\n\x06listen\x18\t \x01(\x0b\x32\x14.tests.v1.ListenTestH\x00\x42\x06\n\x04test"Y\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x38\n\x07request\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.GetDocumentRequest"|\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\x9e\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12#\n\x06option\x18\x02 \x01(\x0b\x32\x13.tests.v1.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xb5\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xe6\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12(\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x13.tests.v1.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x33\n\x07request\x18\x05 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xa2\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"=\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12#\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x13.tests.v1.FieldPath"\x88\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12!\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\x10.tests.v1.Clause\x12\x33\n\x05query\x18\x03 \x01(\x0b\x32$.google.firestore.v1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xbd\x02\n\x06\x43lause\x12"\n\x06select\x18\x01 \x01(\x0b\x32\x10.tests.v1.SelectH\x00\x12 \n\x05where\x18\x02 \x01(\x0b\x32\x0f.tests.v1.WhereH\x00\x12%\n\x08order_by\x18\x03 \x01(\x0b\x32\x11.tests.v1.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12$\n\x08start_at\x18\x06 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12\'\n\x0bstart_after\x18\x07 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12"\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12&\n\nend_before\x18\t \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x42\x08\n\x06\x63lause"-\n\x06Select\x12#\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x13.tests.v1.FieldPath"J\n\x05Where\x12!\n\x04path\x18\x01 \x01(\x0b\x32\x13.tests.v1.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"?\n\x07OrderBy\x12!\n\x04path\x18\x01 \x01(\x0b\x32\x13.tests.v1.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"J\n\x06\x43ursor\x12+\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x15.tests.v1.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"}\n\nListenTest\x12\x36\n\tresponses\x18\x01 \x03(\x0b\x32#.google.firestore.v1.ListenResponse\x12%\n\tsnapshots\x18\x02 \x03(\x0b\x32\x12.tests.v1.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x8c\x01\n\x08Snapshot\x12+\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\x1d.google.firestore.v1.Document\x12$\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x13.tests.v1.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xc9\x01\n\tDocChange\x12&\n\x04kind\x18\x01 \x01(\x0e\x32\x18.tests.v1.DocChange.Kind\x12*\n\x03\x64oc\x18\x02 \x01(\x0b\x32\x1d.google.firestore.v1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
+ ),
+ dependencies=[
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
+ name="Kind",
+ full_name="tests.v1.DocChange.Kind",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ADDED", index=1, number=1, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="REMOVED", index=2, number=2, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="MODIFIED", index=3, number=3, options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=2875,
+ serialized_end=2941,
+)
+_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
+
+
+_TESTSUITE = _descriptor.Descriptor(
+ name="TestSuite",
+ full_name="tests.v1.TestSuite",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="tests",
+ full_name="tests.v1.TestSuite.tests",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=248,
+ serialized_end=290,
+)
+
+
+_TEST = _descriptor.Descriptor(
+ name="Test",
+ full_name="tests.v1.Test",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="description",
+ full_name="tests.v1.Test.description",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="get",
+ full_name="tests.v1.Test.get",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="create",
+ full_name="tests.v1.Test.create",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="set",
+ full_name="tests.v1.Test.set",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update",
+ full_name="tests.v1.Test.update",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_paths",
+ full_name="tests.v1.Test.update_paths",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="delete",
+ full_name="tests.v1.Test.delete",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="tests.v1.Test.query",
+ index=7,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="listen",
+ full_name="tests.v1.Test.listen",
+ index=8,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="test",
+ full_name="tests.v1.Test.test",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=293,
+ serialized_end=645,
+)
+
+
+_GETTEST = _descriptor.Descriptor(
+ name="GetTest",
+ full_name="tests.v1.GetTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1.GetTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1.GetTest.request",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=647,
+ serialized_end=736,
+)
+
+
+_CREATETEST = _descriptor.Descriptor(
+ name="CreateTest",
+ full_name="tests.v1.CreateTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1.CreateTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1.CreateTest.json_data",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1.CreateTest.request",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.CreateTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=738,
+ serialized_end=862,
+)
+
+
+_SETTEST = _descriptor.Descriptor(
+ name="SetTest",
+ full_name="tests.v1.SetTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1.SetTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="option",
+ full_name="tests.v1.SetTest.option",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1.SetTest.json_data",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1.SetTest.request",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.SetTest.is_error",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=865,
+ serialized_end=1023,
+)
+
+
+_UPDATETEST = _descriptor.Descriptor(
+ name="UpdateTest",
+ full_name="tests.v1.UpdateTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1.UpdateTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="tests.v1.UpdateTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1.UpdateTest.json_data",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1.UpdateTest.request",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.UpdateTest.is_error",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1026,
+ serialized_end=1207,
+)
+
+
+_UPDATEPATHSTEST = _descriptor.Descriptor(
+ name="UpdatePathsTest",
+ full_name="tests.v1.UpdatePathsTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1.UpdatePathsTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="tests.v1.UpdatePathsTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field_paths",
+ full_name="tests.v1.UpdatePathsTest.field_paths",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_values",
+ full_name="tests.v1.UpdatePathsTest.json_values",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1.UpdatePathsTest.request",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.UpdatePathsTest.is_error",
+ index=5,
+ number=6,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1210,
+ serialized_end=1440,
+)
+
+
+_DELETETEST = _descriptor.Descriptor(
+ name="DeleteTest",
+ full_name="tests.v1.DeleteTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1.DeleteTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="tests.v1.DeleteTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1.DeleteTest.request",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.DeleteTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1443,
+ serialized_end=1605,
+)
+
+
+_SETOPTION = _descriptor.Descriptor(
+ name="SetOption",
+ full_name="tests.v1.SetOption",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="all",
+ full_name="tests.v1.SetOption.all",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="tests.v1.SetOption.fields",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1607,
+ serialized_end=1668,
+)
+
+
+_QUERYTEST = _descriptor.Descriptor(
+ name="QueryTest",
+ full_name="tests.v1.QueryTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="coll_path",
+ full_name="tests.v1.QueryTest.coll_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="clauses",
+ full_name="tests.v1.QueryTest.clauses",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="tests.v1.QueryTest.query",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.QueryTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1671,
+ serialized_end=1807,
+)
+
+
+_CLAUSE = _descriptor.Descriptor(
+ name="Clause",
+ full_name="tests.v1.Clause",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="select",
+ full_name="tests.v1.Clause.select",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="where",
+ full_name="tests.v1.Clause.where",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="order_by",
+ full_name="tests.v1.Clause.order_by",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="offset",
+ full_name="tests.v1.Clause.offset",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="limit",
+ full_name="tests.v1.Clause.limit",
+ index=4,
+ number=5,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_at",
+ full_name="tests.v1.Clause.start_at",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_after",
+ full_name="tests.v1.Clause.start_after",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_at",
+ full_name="tests.v1.Clause.end_at",
+ index=7,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_before",
+ full_name="tests.v1.Clause.end_before",
+ index=8,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="clause",
+ full_name="tests.v1.Clause.clause",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=1810,
+ serialized_end=2127,
+)
+
+
+_SELECT = _descriptor.Descriptor(
+ name="Select",
+ full_name="tests.v1.Select",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="tests.v1.Select.fields",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2129,
+ serialized_end=2174,
+)
+
+
+_WHERE = _descriptor.Descriptor(
+ name="Where",
+ full_name="tests.v1.Where",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="tests.v1.Where.path",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="op",
+ full_name="tests.v1.Where.op",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_value",
+ full_name="tests.v1.Where.json_value",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2176,
+ serialized_end=2250,
+)
+
+
+_ORDERBY = _descriptor.Descriptor(
+ name="OrderBy",
+ full_name="tests.v1.OrderBy",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="tests.v1.OrderBy.path",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="direction",
+ full_name="tests.v1.OrderBy.direction",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2252,
+ serialized_end=2315,
+)
+
+
+_CURSOR = _descriptor.Descriptor(
+ name="Cursor",
+ full_name="tests.v1.Cursor",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_snapshot",
+ full_name="tests.v1.Cursor.doc_snapshot",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_values",
+ full_name="tests.v1.Cursor.json_values",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2317,
+ serialized_end=2391,
+)
+
+
+_DOCSNAPSHOT = _descriptor.Descriptor(
+ name="DocSnapshot",
+ full_name="tests.v1.DocSnapshot",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="tests.v1.DocSnapshot.path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1.DocSnapshot.json_data",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2393,
+ serialized_end=2439,
+)
+
+
+_FIELDPATH = _descriptor.Descriptor(
+ name="FieldPath",
+ full_name="tests.v1.FieldPath",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="tests.v1.FieldPath.field",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2441,
+ serialized_end=2467,
+)
+
+
+_LISTENTEST = _descriptor.Descriptor(
+ name="ListenTest",
+ full_name="tests.v1.ListenTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="responses",
+ full_name="tests.v1.ListenTest.responses",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="snapshots",
+ full_name="tests.v1.ListenTest.snapshots",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1.ListenTest.is_error",
+ index=2,
+ number=3,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2469,
+ serialized_end=2594,
+)
+
+
+_SNAPSHOT = _descriptor.Descriptor(
+ name="Snapshot",
+ full_name="tests.v1.Snapshot",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="docs",
+ full_name="tests.v1.Snapshot.docs",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="changes",
+ full_name="tests.v1.Snapshot.changes",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="tests.v1.Snapshot.read_time",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2597,
+ serialized_end=2737,
+)
+
+
+_DOCCHANGE = _descriptor.Descriptor(
+ name="DocChange",
+ full_name="tests.v1.DocChange",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="kind",
+ full_name="tests.v1.DocChange.kind",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="doc",
+ full_name="tests.v1.DocChange.doc",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="old_index",
+ full_name="tests.v1.DocChange.old_index",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="new_index",
+ full_name="tests.v1.DocChange.new_index",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_DOCCHANGE_KIND],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2740,
+ serialized_end=2941,
+)
+
+_TESTSUITE.fields_by_name["tests"].message_type = _TEST
+_TEST.fields_by_name["get"].message_type = _GETTEST
+_TEST.fields_by_name["create"].message_type = _CREATETEST
+_TEST.fields_by_name["set"].message_type = _SETTEST
+_TEST.fields_by_name["update"].message_type = _UPDATETEST
+_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
+_TEST.fields_by_name["delete"].message_type = _DELETETEST
+_TEST.fields_by_name["query"].message_type = _QUERYTEST
+_TEST.fields_by_name["listen"].message_type = _LISTENTEST
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
+_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
+_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
+_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
+_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
+_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
+_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
+_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
+_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
+_GETTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
+)
+_CREATETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_SETTEST.fields_by_name["option"].message_type = _SETOPTION
+_SETTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_UPDATETEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_UPDATETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_UPDATEPATHSTEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
+_UPDATEPATHSTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_DELETETEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_DELETETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
+_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
+_QUERYTEST.fields_by_name[
+ "query"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
+)
+_CLAUSE.fields_by_name["select"].message_type = _SELECT
+_CLAUSE.fields_by_name["where"].message_type = _WHERE
+_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
+_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
+_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
+_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
+_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
+_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
+_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
+_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
+_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
+_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
+_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
+_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
+ "clause"
+]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
+_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
+_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
+_WHERE.fields_by_name["path"].message_type = _FIELDPATH
+_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
+_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
+_LISTENTEST.fields_by_name[
+ "responses"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
+)
+_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
+_SNAPSHOT.fields_by_name[
+ "docs"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
+_SNAPSHOT.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
+_DOCCHANGE.fields_by_name[
+ "doc"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_DOCCHANGE_KIND.containing_type = _DOCCHANGE
+DESCRIPTOR.message_types_by_name["TestSuite"] = _TESTSUITE
+DESCRIPTOR.message_types_by_name["Test"] = _TEST
+DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
+DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
+DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
+DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
+DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
+DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
+DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
+DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
+DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
+DESCRIPTOR.message_types_by_name["Select"] = _SELECT
+DESCRIPTOR.message_types_by_name["Where"] = _WHERE
+DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
+DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
+DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
+DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
+DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
+DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
+DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+TestSuite = _reflection.GeneratedProtocolMessageType(
+ "TestSuite",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_TESTSUITE,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.TestSuite)
+ ),
+)
+_sym_db.RegisterMessage(TestSuite)
+
+Test = _reflection.GeneratedProtocolMessageType(
+ "Test",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_TEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.Test)
+ ),
+)
+_sym_db.RegisterMessage(Test)
+
+GetTest = _reflection.GeneratedProtocolMessageType(
+ "GetTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_GETTEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.GetTest)
+ ),
+)
+_sym_db.RegisterMessage(GetTest)
+
+CreateTest = _reflection.GeneratedProtocolMessageType(
+ "CreateTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CREATETEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.CreateTest)
+ ),
+)
+_sym_db.RegisterMessage(CreateTest)
+
+SetTest = _reflection.GeneratedProtocolMessageType(
+ "SetTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SETTEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.SetTest)
+ ),
+)
+_sym_db.RegisterMessage(SetTest)
+
+UpdateTest = _reflection.GeneratedProtocolMessageType(
+ "UpdateTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_UPDATETEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.UpdateTest)
+ ),
+)
+_sym_db.RegisterMessage(UpdateTest)
+
+UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
+ "UpdatePathsTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_UPDATEPATHSTEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.UpdatePathsTest)
+ ),
+)
+_sym_db.RegisterMessage(UpdatePathsTest)
+
+DeleteTest = _reflection.GeneratedProtocolMessageType(
+ "DeleteTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DELETETEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.DeleteTest)
+ ),
+)
+_sym_db.RegisterMessage(DeleteTest)
+
+SetOption = _reflection.GeneratedProtocolMessageType(
+ "SetOption",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SETOPTION,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.SetOption)
+ ),
+)
+_sym_db.RegisterMessage(SetOption)
+
+QueryTest = _reflection.GeneratedProtocolMessageType(
+ "QueryTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_QUERYTEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.QueryTest)
+ ),
+)
+_sym_db.RegisterMessage(QueryTest)
+
+Clause = _reflection.GeneratedProtocolMessageType(
+ "Clause",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CLAUSE,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.Clause)
+ ),
+)
+_sym_db.RegisterMessage(Clause)
+
+Select = _reflection.GeneratedProtocolMessageType(
+ "Select",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SELECT,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.Select)
+ ),
+)
+_sym_db.RegisterMessage(Select)
+
+Where = _reflection.GeneratedProtocolMessageType(
+ "Where",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_WHERE,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.Where)
+ ),
+)
+_sym_db.RegisterMessage(Where)
+
+OrderBy = _reflection.GeneratedProtocolMessageType(
+ "OrderBy",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_ORDERBY,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.OrderBy)
+ ),
+)
+_sym_db.RegisterMessage(OrderBy)
+
+Cursor = _reflection.GeneratedProtocolMessageType(
+ "Cursor",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CURSOR,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.Cursor)
+ ),
+)
+_sym_db.RegisterMessage(Cursor)
+
+DocSnapshot = _reflection.GeneratedProtocolMessageType(
+ "DocSnapshot",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DOCSNAPSHOT,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.DocSnapshot)
+ ),
+)
+_sym_db.RegisterMessage(DocSnapshot)
+
+FieldPath = _reflection.GeneratedProtocolMessageType(
+ "FieldPath",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_FIELDPATH,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.FieldPath)
+ ),
+)
+_sym_db.RegisterMessage(FieldPath)
+
+ListenTest = _reflection.GeneratedProtocolMessageType(
+ "ListenTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_LISTENTEST,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.ListenTest)
+ ),
+)
+_sym_db.RegisterMessage(ListenTest)
+
+Snapshot = _reflection.GeneratedProtocolMessageType(
+ "Snapshot",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SNAPSHOT,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.Snapshot)
+ ),
+)
+_sym_db.RegisterMessage(Snapshot)
+
+DocChange = _reflection.GeneratedProtocolMessageType(
+ "DocChange",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DOCCHANGE,
+ __module__="test_v1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1.DocChange)
+ ),
+)
+_sym_db.RegisterMessage(DocChange)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(
+ descriptor_pb2.FileOptions(),
+ _b(
+ '\n&com.google.cloud.firestore.conformance\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
+ ),
+)
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/tests_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/tests_pb2.py
new file mode 100644
index 000000000..126887881
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/tests_pb2.py
@@ -0,0 +1,2208 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1/proto/tests.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ firestore_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
+)
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1/proto/tests.proto",
+ package="google.cloud.firestore_v1.proto",
+ syntax="proto3",
+ serialized_pb=_b(
+ '\n+google/cloud/firestore_v1/proto/tests.proto\x12\x1fgoogle.cloud.firestore_v1.proto\x1a,google/cloud/firestore_v1/proto/common.proto\x1a.google/cloud/firestore_v1/proto/document.proto\x1a/google/cloud/firestore_v1/proto/firestore.proto\x1a+google/cloud/firestore_v1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"@\n\x08TestFile\x12\x34\n\x05tests\x18\x01 \x03(\x0b\x32%.google.cloud.firestore_v1.proto.Test"\xa9\x04\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x0f\n\x07\x63omment\x18\n \x01(\t\x12\x37\n\x03get\x18\x02 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.GetTestH\x00\x12=\n\x06\x63reate\x18\x03 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.CreateTestH\x00\x12\x37\n\x03set\x18\x04 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.SetTestH\x00\x12=\n\x06update\x18\x05 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.UpdateTestH\x00\x12H\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x30.google.cloud.firestore_v1.proto.UpdatePathsTestH\x00\x12=\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.DeleteTestH\x00\x12;\n\x05query\x18\x08 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.QueryTestH\x00\x12=\n\x06listen\x18\t \x01(\x0b\x32+.google.cloud.firestore_v1.proto.ListenTestH\x00\x42\x06\n\x04test"Y\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x38\n\x07request\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.GetDocumentRequest"|\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xb5\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12:\n\x06option\x18\x02 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xb5\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xfd\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12?\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x33\n\x07request\x18\x05 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xa2\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"T\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12:\n\x06\x66ields\x18\x02 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath"\x9f\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x38\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\'.google.cloud.firestore_v1.proto.Clause\x12\x33\n\x05query\x18\x03 \x01(\x0b\x32$.google.firestore.v1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xde\x03\n\x06\x43lause\x12\x39\n\x06select\x18\x01 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.SelectH\x00\x12\x37\n\x05where\x18\x02 \x01(\x0b\x32&.google.cloud.firestore_v1.proto.WhereH\x00\x12<\n\x08order_by\x18\x03 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12;\n\x08start_at\x18\x06 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12>\n\x0bstart_after\x18\x07 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12\x39\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12=\n\nend_before\x18\t \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x42\x08\n\x06\x63lause"D\n\x06Select\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath"a\n\x05Where\x12\x38\n\x04path\x18\x01 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"V\n\x07OrderBy\x12\x38\n\x04path\x18\x01 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"a\n\x06\x43ursor\x12\x42\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32,.google.cloud.firestore_v1.proto.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x94\x01\n\nListenTest\x12\x36\n\tresponses\x18\x01 \x03(\x0b\x32#.google.firestore.v1.ListenResponse\x12<\n\tsnapshots\x18\x02 \x03(\x0b\x32).google.cloud.firestore_v1.proto.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\xa3\x01\n\x08Snapshot\x12+\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\x1d.google.firestore.v1.Document\x12;\n\x07\x63hanges\x18\x02 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xe0\x01\n\tDocChange\x12=\n\x04kind\x18\x01 \x01(\x0e\x32/.google.cloud.firestore_v1.proto.DocChange.Kind\x12*\n\x03\x64oc\x18\x02 \x01(\x0b\x32\x1d.google.firestore.v1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42\x8b\x01\n)com.google.cloud.conformance.firestore.v1B\x0eTestDefinition\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
+ ),
+ dependencies=[
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
+ name="Kind",
+ full_name="google.cloud.firestore_v1.proto.DocChange.Kind",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ADDED", index=1, number=1, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="REMOVED", index=2, number=2, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="MODIFIED", index=3, number=3, options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=3566,
+ serialized_end=3632,
+)
+_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
+
+
+_TESTFILE = _descriptor.Descriptor(
+ name="TestFile",
+ full_name="google.cloud.firestore_v1.proto.TestFile",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="tests",
+ full_name="google.cloud.firestore_v1.proto.TestFile.tests",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=301,
+ serialized_end=365,
+)
+
+
+_TEST = _descriptor.Descriptor(
+ name="Test",
+ full_name="google.cloud.firestore_v1.proto.Test",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="description",
+ full_name="google.cloud.firestore_v1.proto.Test.description",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="comment",
+ full_name="google.cloud.firestore_v1.proto.Test.comment",
+ index=1,
+ number=10,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="get",
+ full_name="google.cloud.firestore_v1.proto.Test.get",
+ index=2,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="create",
+ full_name="google.cloud.firestore_v1.proto.Test.create",
+ index=3,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="set",
+ full_name="google.cloud.firestore_v1.proto.Test.set",
+ index=4,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update",
+ full_name="google.cloud.firestore_v1.proto.Test.update",
+ index=5,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_paths",
+ full_name="google.cloud.firestore_v1.proto.Test.update_paths",
+ index=6,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="delete",
+ full_name="google.cloud.firestore_v1.proto.Test.delete",
+ index=7,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="google.cloud.firestore_v1.proto.Test.query",
+ index=8,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="listen",
+ full_name="google.cloud.firestore_v1.proto.Test.listen",
+ index=9,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="test",
+ full_name="google.cloud.firestore_v1.proto.Test.test",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=368,
+ serialized_end=921,
+)
+
+
+_GETTEST = _descriptor.Descriptor(
+ name="GetTest",
+ full_name="google.cloud.firestore_v1.proto.GetTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="google.cloud.firestore_v1.proto.GetTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="google.cloud.firestore_v1.proto.GetTest.request",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=923,
+ serialized_end=1012,
+)
+
+
+_CREATETEST = _descriptor.Descriptor(
+ name="CreateTest",
+ full_name="google.cloud.firestore_v1.proto.CreateTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="google.cloud.firestore_v1.proto.CreateTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="google.cloud.firestore_v1.proto.CreateTest.json_data",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="google.cloud.firestore_v1.proto.CreateTest.request",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.CreateTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1014,
+ serialized_end=1138,
+)
+
+
+_SETTEST = _descriptor.Descriptor(
+ name="SetTest",
+ full_name="google.cloud.firestore_v1.proto.SetTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="google.cloud.firestore_v1.proto.SetTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="option",
+ full_name="google.cloud.firestore_v1.proto.SetTest.option",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="google.cloud.firestore_v1.proto.SetTest.json_data",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="google.cloud.firestore_v1.proto.SetTest.request",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.SetTest.is_error",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1141,
+ serialized_end=1322,
+)
+
+
+_UPDATETEST = _descriptor.Descriptor(
+ name="UpdateTest",
+ full_name="google.cloud.firestore_v1.proto.UpdateTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="google.cloud.firestore_v1.proto.UpdateTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="google.cloud.firestore_v1.proto.UpdateTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="google.cloud.firestore_v1.proto.UpdateTest.json_data",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="google.cloud.firestore_v1.proto.UpdateTest.request",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.UpdateTest.is_error",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1325,
+ serialized_end=1506,
+)
+
+
+_UPDATEPATHSTEST = _descriptor.Descriptor(
+ name="UpdatePathsTest",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field_paths",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.field_paths",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_values",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.json_values",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.request",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.is_error",
+ index=5,
+ number=6,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1509,
+ serialized_end=1762,
+)
+
+
+_DELETETEST = _descriptor.Descriptor(
+ name="DeleteTest",
+ full_name="google.cloud.firestore_v1.proto.DeleteTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="google.cloud.firestore_v1.proto.DeleteTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="google.cloud.firestore_v1.proto.DeleteTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="google.cloud.firestore_v1.proto.DeleteTest.request",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.DeleteTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1765,
+ serialized_end=1927,
+)
+
+
+_SETOPTION = _descriptor.Descriptor(
+ name="SetOption",
+ full_name="google.cloud.firestore_v1.proto.SetOption",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="all",
+ full_name="google.cloud.firestore_v1.proto.SetOption.all",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.cloud.firestore_v1.proto.SetOption.fields",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1929,
+ serialized_end=2013,
+)
+
+
+_QUERYTEST = _descriptor.Descriptor(
+ name="QueryTest",
+ full_name="google.cloud.firestore_v1.proto.QueryTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="coll_path",
+ full_name="google.cloud.firestore_v1.proto.QueryTest.coll_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="clauses",
+ full_name="google.cloud.firestore_v1.proto.QueryTest.clauses",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="google.cloud.firestore_v1.proto.QueryTest.query",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.QueryTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2016,
+ serialized_end=2175,
+)
+
+
+_CLAUSE = _descriptor.Descriptor(
+ name="Clause",
+ full_name="google.cloud.firestore_v1.proto.Clause",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="select",
+ full_name="google.cloud.firestore_v1.proto.Clause.select",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="where",
+ full_name="google.cloud.firestore_v1.proto.Clause.where",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="order_by",
+ full_name="google.cloud.firestore_v1.proto.Clause.order_by",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="offset",
+ full_name="google.cloud.firestore_v1.proto.Clause.offset",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="limit",
+ full_name="google.cloud.firestore_v1.proto.Clause.limit",
+ index=4,
+ number=5,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_at",
+ full_name="google.cloud.firestore_v1.proto.Clause.start_at",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_after",
+ full_name="google.cloud.firestore_v1.proto.Clause.start_after",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_at",
+ full_name="google.cloud.firestore_v1.proto.Clause.end_at",
+ index=7,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_before",
+ full_name="google.cloud.firestore_v1.proto.Clause.end_before",
+ index=8,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="clause",
+ full_name="google.cloud.firestore_v1.proto.Clause.clause",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=2178,
+ serialized_end=2656,
+)
+
+
+_SELECT = _descriptor.Descriptor(
+ name="Select",
+ full_name="google.cloud.firestore_v1.proto.Select",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.cloud.firestore_v1.proto.Select.fields",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2658,
+ serialized_end=2726,
+)
+
+
+_WHERE = _descriptor.Descriptor(
+ name="Where",
+ full_name="google.cloud.firestore_v1.proto.Where",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="google.cloud.firestore_v1.proto.Where.path",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="op",
+ full_name="google.cloud.firestore_v1.proto.Where.op",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_value",
+ full_name="google.cloud.firestore_v1.proto.Where.json_value",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2728,
+ serialized_end=2825,
+)
+
+
+_ORDERBY = _descriptor.Descriptor(
+ name="OrderBy",
+ full_name="google.cloud.firestore_v1.proto.OrderBy",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="google.cloud.firestore_v1.proto.OrderBy.path",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="direction",
+ full_name="google.cloud.firestore_v1.proto.OrderBy.direction",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2827,
+ serialized_end=2913,
+)
+
+
+_CURSOR = _descriptor.Descriptor(
+ name="Cursor",
+ full_name="google.cloud.firestore_v1.proto.Cursor",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_snapshot",
+ full_name="google.cloud.firestore_v1.proto.Cursor.doc_snapshot",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_values",
+ full_name="google.cloud.firestore_v1.proto.Cursor.json_values",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2915,
+ serialized_end=3012,
+)
+
+
+_DOCSNAPSHOT = _descriptor.Descriptor(
+ name="DocSnapshot",
+ full_name="google.cloud.firestore_v1.proto.DocSnapshot",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="google.cloud.firestore_v1.proto.DocSnapshot.path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="google.cloud.firestore_v1.proto.DocSnapshot.json_data",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3014,
+ serialized_end=3060,
+)
+
+
+_FIELDPATH = _descriptor.Descriptor(
+ name="FieldPath",
+ full_name="google.cloud.firestore_v1.proto.FieldPath",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="google.cloud.firestore_v1.proto.FieldPath.field",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3062,
+ serialized_end=3088,
+)
+
+
+_LISTENTEST = _descriptor.Descriptor(
+ name="ListenTest",
+ full_name="google.cloud.firestore_v1.proto.ListenTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="responses",
+ full_name="google.cloud.firestore_v1.proto.ListenTest.responses",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="snapshots",
+ full_name="google.cloud.firestore_v1.proto.ListenTest.snapshots",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="google.cloud.firestore_v1.proto.ListenTest.is_error",
+ index=2,
+ number=3,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3091,
+ serialized_end=3239,
+)
+
+
+_SNAPSHOT = _descriptor.Descriptor(
+ name="Snapshot",
+ full_name="google.cloud.firestore_v1.proto.Snapshot",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="docs",
+ full_name="google.cloud.firestore_v1.proto.Snapshot.docs",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="changes",
+ full_name="google.cloud.firestore_v1.proto.Snapshot.changes",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.cloud.firestore_v1.proto.Snapshot.read_time",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3242,
+ serialized_end=3405,
+)
+
+
+_DOCCHANGE = _descriptor.Descriptor(
+ name="DocChange",
+ full_name="google.cloud.firestore_v1.proto.DocChange",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="kind",
+ full_name="google.cloud.firestore_v1.proto.DocChange.kind",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="doc",
+ full_name="google.cloud.firestore_v1.proto.DocChange.doc",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="old_index",
+ full_name="google.cloud.firestore_v1.proto.DocChange.old_index",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="new_index",
+ full_name="google.cloud.firestore_v1.proto.DocChange.new_index",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_DOCCHANGE_KIND],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3408,
+ serialized_end=3632,
+)
+
+_TESTFILE.fields_by_name["tests"].message_type = _TEST
+_TEST.fields_by_name["get"].message_type = _GETTEST
+_TEST.fields_by_name["create"].message_type = _CREATETEST
+_TEST.fields_by_name["set"].message_type = _SETTEST
+_TEST.fields_by_name["update"].message_type = _UPDATETEST
+_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
+_TEST.fields_by_name["delete"].message_type = _DELETETEST
+_TEST.fields_by_name["query"].message_type = _QUERYTEST
+_TEST.fields_by_name["listen"].message_type = _LISTENTEST
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
+_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
+_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
+_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
+_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
+_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
+_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
+_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
+_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
+_GETTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
+)
+_CREATETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_SETTEST.fields_by_name["option"].message_type = _SETOPTION
+_SETTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_UPDATETEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_UPDATETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_UPDATEPATHSTEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
+_UPDATEPATHSTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_DELETETEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_DELETETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
+_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
+_QUERYTEST.fields_by_name[
+ "query"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
+)
+_CLAUSE.fields_by_name["select"].message_type = _SELECT
+_CLAUSE.fields_by_name["where"].message_type = _WHERE
+_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
+_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
+_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
+_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
+_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
+_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
+_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
+_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
+_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
+_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
+_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
+_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
+ "clause"
+]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
+_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
+_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
+_WHERE.fields_by_name["path"].message_type = _FIELDPATH
+_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
+_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
+_LISTENTEST.fields_by_name[
+ "responses"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
+)
+_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
+_SNAPSHOT.fields_by_name[
+ "docs"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
+_SNAPSHOT.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
+_DOCCHANGE.fields_by_name[
+ "doc"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_DOCCHANGE_KIND.containing_type = _DOCCHANGE
+DESCRIPTOR.message_types_by_name["TestFile"] = _TESTFILE
+DESCRIPTOR.message_types_by_name["Test"] = _TEST
+DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
+DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
+DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
+DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
+DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
+DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
+DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
+DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
+DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
+DESCRIPTOR.message_types_by_name["Select"] = _SELECT
+DESCRIPTOR.message_types_by_name["Where"] = _WHERE
+DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
+DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
+DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
+DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
+DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
+DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
+DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+TestFile = _reflection.GeneratedProtocolMessageType(
+ "TestFile",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_TESTFILE,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.TestFile)
+ ),
+)
+_sym_db.RegisterMessage(TestFile)
+
+Test = _reflection.GeneratedProtocolMessageType(
+ "Test",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_TEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Test)
+ ),
+)
+_sym_db.RegisterMessage(Test)
+
+GetTest = _reflection.GeneratedProtocolMessageType(
+ "GetTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_GETTEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.GetTest)
+ ),
+)
+_sym_db.RegisterMessage(GetTest)
+
+CreateTest = _reflection.GeneratedProtocolMessageType(
+ "CreateTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CREATETEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.CreateTest)
+ ),
+)
+_sym_db.RegisterMessage(CreateTest)
+
+SetTest = _reflection.GeneratedProtocolMessageType(
+ "SetTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SETTEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.SetTest)
+ ),
+)
+_sym_db.RegisterMessage(SetTest)
+
+UpdateTest = _reflection.GeneratedProtocolMessageType(
+ "UpdateTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_UPDATETEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.UpdateTest)
+ ),
+)
+_sym_db.RegisterMessage(UpdateTest)
+
+UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
+ "UpdatePathsTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_UPDATEPATHSTEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.UpdatePathsTest)
+ ),
+)
+_sym_db.RegisterMessage(UpdatePathsTest)
+
+DeleteTest = _reflection.GeneratedProtocolMessageType(
+ "DeleteTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DELETETEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DeleteTest)
+ ),
+)
+_sym_db.RegisterMessage(DeleteTest)
+
+SetOption = _reflection.GeneratedProtocolMessageType(
+ "SetOption",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SETOPTION,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.SetOption)
+ ),
+)
+_sym_db.RegisterMessage(SetOption)
+
+QueryTest = _reflection.GeneratedProtocolMessageType(
+ "QueryTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_QUERYTEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.QueryTest)
+ ),
+)
+_sym_db.RegisterMessage(QueryTest)
+
+Clause = _reflection.GeneratedProtocolMessageType(
+ "Clause",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CLAUSE,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Clause)
+ ),
+)
+_sym_db.RegisterMessage(Clause)
+
+Select = _reflection.GeneratedProtocolMessageType(
+ "Select",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SELECT,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Select)
+ ),
+)
+_sym_db.RegisterMessage(Select)
+
+Where = _reflection.GeneratedProtocolMessageType(
+ "Where",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_WHERE,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Where)
+ ),
+)
+_sym_db.RegisterMessage(Where)
+
+OrderBy = _reflection.GeneratedProtocolMessageType(
+ "OrderBy",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_ORDERBY,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.OrderBy)
+ ),
+)
+_sym_db.RegisterMessage(OrderBy)
+
+Cursor = _reflection.GeneratedProtocolMessageType(
+ "Cursor",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CURSOR,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Cursor)
+ ),
+)
+_sym_db.RegisterMessage(Cursor)
+
+DocSnapshot = _reflection.GeneratedProtocolMessageType(
+ "DocSnapshot",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DOCSNAPSHOT,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DocSnapshot)
+ ),
+)
+_sym_db.RegisterMessage(DocSnapshot)
+
+FieldPath = _reflection.GeneratedProtocolMessageType(
+ "FieldPath",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_FIELDPATH,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.FieldPath)
+ ),
+)
+_sym_db.RegisterMessage(FieldPath)
+
+ListenTest = _reflection.GeneratedProtocolMessageType(
+ "ListenTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_LISTENTEST,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.ListenTest)
+ ),
+)
+_sym_db.RegisterMessage(ListenTest)
+
+Snapshot = _reflection.GeneratedProtocolMessageType(
+ "Snapshot",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SNAPSHOT,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Snapshot)
+ ),
+)
+_sym_db.RegisterMessage(Snapshot)
+
+DocChange = _reflection.GeneratedProtocolMessageType(
+ "DocChange",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DOCCHANGE,
+ __module__="google.cloud.firestore_v1.proto.tests_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DocChange)
+ ),
+)
+_sym_db.RegisterMessage(DocChange)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(
+ descriptor_pb2.FileOptions(),
+ _b(
+ '\n)com.google.cloud.conformance.firestore.v1B\016TestDefinition\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
+ ),
+)
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/write.proto b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/write.proto
new file mode 100644
index 000000000..a6befb0e6
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/write.proto
@@ -0,0 +1,258 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.firestore.v1;
+
+import "google/firestore/v1/common.proto";
+import "google/firestore/v1/document.proto";
+import "google/protobuf/timestamp.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "WriteProto";
+option java_package = "com.google.firestore.v1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1";
+option ruby_package = "Google::Cloud::Firestore::V1";
+
+// A write on a document.
+message Write {
+ // The operation to execute.
+ oneof operation {
+ // A document to write.
+ Document update = 1;
+
+ // A document name to delete. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string delete = 2;
+
+ // Applies a transformation to a document.
+ DocumentTransform transform = 6;
+ }
+
+ // The fields to update in this write.
+ //
+ // This field can be set only when the operation is `update`.
+ // If the mask is not set for an `update` and the document exists, any
+ // existing data will be overwritten.
+ // If the mask is set and the document on the server has fields not covered by
+ // the mask, they are left unchanged.
+ // Fields referenced in the mask, but not present in the input document, are
+ // deleted from the document on the server.
+ // The field paths in this mask must not contain a reserved field name.
+ DocumentMask update_mask = 3;
+
+ // The transforms to perform after update.
+ //
+ // This field can be set only when the operation is `update`. If present, this
+ // write is equivalent to performing `update` and `transform` to the same
+ // document atomically and in order.
+ repeated DocumentTransform.FieldTransform update_transforms = 7;
+
+ // An optional precondition on the document.
+ //
+ // The write will fail if this is set and not met by the target document.
+ Precondition current_document = 4;
+}
+
+// A transformation of a document.
+message DocumentTransform {
+ // A transformation of a field of the document.
+ message FieldTransform {
+ // A value that is calculated by the server.
+ enum ServerValue {
+ // Unspecified. This value must not be used.
+ SERVER_VALUE_UNSPECIFIED = 0;
+
+ // The time at which the server processed the request, with millisecond
+ // precision.
+ REQUEST_TIME = 1;
+ }
+
+ // The path of the field. See [Document.fields][google.firestore.v1.Document.fields] for the field path syntax
+ // reference.
+ string field_path = 1;
+
+ // The transformation to apply on the field.
+ oneof transform_type {
+ // Sets the field to the given server value.
+ ServerValue set_to_server_value = 2;
+
+ // Adds the given value to the field's current value.
+ //
+ // This must be an integer or a double value.
+ // If the field is not an integer or double, or if the field does not yet
+ // exist, the transformation will set the field to the given value.
+ // If either of the given value or the current field value are doubles,
+ // both values will be interpreted as doubles. Double arithmetic and
+ // representation of double values follow IEEE 754 semantics.
+ // If there is positive/negative integer overflow, the field is resolved
+ // to the largest magnitude positive/negative integer.
+ Value increment = 3;
+
+ // Sets the field to the maximum of its current value and the given value.
+ //
+ // This must be an integer or a double value.
+ // If the field is not an integer or double, or if the field does not yet
+ // exist, the transformation will set the field to the given value.
+ // If a maximum operation is applied where the field and the input value
+ // are of mixed types (that is - one is an integer and one is a double)
+ // the field takes on the type of the larger operand. If the operands are
+ // equivalent (e.g. 3 and 3.0), the field does not change.
+ // 0, 0.0, and -0.0 are all zero. The maximum of a zero stored value and
+ // zero input value is always the stored value.
+ // The maximum of any numeric value x and NaN is NaN.
+ Value maximum = 4;
+
+ // Sets the field to the minimum of its current value and the given value.
+ //
+ // This must be an integer or a double value.
+ // If the field is not an integer or double, or if the field does not yet
+ // exist, the transformation will set the field to the input value.
+ // If a minimum operation is applied where the field and the input value
+ // are of mixed types (that is - one is an integer and one is a double)
+ // the field takes on the type of the smaller operand. If the operands are
+ // equivalent (e.g. 3 and 3.0), the field does not change.
+ // 0, 0.0, and -0.0 are all zero. The minimum of a zero stored value and
+ // zero input value is always the stored value.
+ // The minimum of any numeric value x and NaN is NaN.
+ Value minimum = 5;
+
+ // Append the given elements in order if they are not already present in
+ // the current field value.
+ // If the field is not an array, or if the field does not yet exist, it is
+ // first set to the empty array.
+ //
+ // Equivalent numbers of different types (e.g. 3L and 3.0) are
+ // considered equal when checking if a value is missing.
+ // NaN is equal to NaN, and Null is equal to Null.
+ // If the input contains multiple equivalent values, only the first will
+ // be considered.
+ //
+ // The corresponding transform_result will be the null value.
+ ArrayValue append_missing_elements = 6;
+
+ // Remove all of the given elements from the array in the field.
+ // If the field is not an array, or if the field does not yet exist, it is
+ // set to the empty array.
+ //
+ // Equivalent numbers of the different types (e.g. 3L and 3.0) are
+ // considered equal when deciding whether an element should be removed.
+ // NaN is equal to NaN, and Null is equal to Null.
+ // This will remove all equivalent values if there are duplicates.
+ //
+ // The corresponding transform_result will be the null value.
+ ArrayValue remove_all_from_array = 7;
+ }
+ }
+
+ // The name of the document to transform.
+ string document = 1;
+
+ // The list of transformations to apply to the fields of the document, in
+ // order.
+ // This must not be empty.
+ repeated FieldTransform field_transforms = 2;
+}
+
+// The result of applying a write.
+message WriteResult {
+ // The last update time of the document after applying the write. Not set
+ // after a `delete`.
+ //
+ // If the write did not actually change the document, this will be the
+ // previous update_time.
+ google.protobuf.Timestamp update_time = 1;
+
+ // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], in the
+ // same order.
+ repeated Value transform_results = 2;
+}
+
+// A [Document][google.firestore.v1.Document] has changed.
+//
+// May be the result of multiple [writes][google.firestore.v1.Write], including deletes, that
+// ultimately resulted in a new value for the [Document][google.firestore.v1.Document].
+//
+// Multiple [DocumentChange][google.firestore.v1.DocumentChange] messages may be returned for the same logical
+// change, if multiple targets are affected.
+message DocumentChange {
+ // The new state of the [Document][google.firestore.v1.Document].
+ //
+ // If `mask` is set, contains only fields that were updated or added.
+ Document document = 1;
+
+ // A set of target IDs of targets that match this document.
+ repeated int32 target_ids = 5;
+
+ // A set of target IDs for targets that no longer match this document.
+ repeated int32 removed_target_ids = 6;
+}
+
+// A [Document][google.firestore.v1.Document] has been deleted.
+//
+// May be the result of multiple [writes][google.firestore.v1.Write], including updates, the
+// last of which deleted the [Document][google.firestore.v1.Document].
+//
+// Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] messages may be returned for the same logical
+// delete, if multiple targets are affected.
+message DocumentDelete {
+ // The resource name of the [Document][google.firestore.v1.Document] that was deleted.
+ string document = 1;
+
+ // A set of target IDs for targets that previously matched this entity.
+ repeated int32 removed_target_ids = 6;
+
+ // The read timestamp at which the delete was observed.
+ //
+ // Greater or equal to the `commit_time` of the delete.
+ google.protobuf.Timestamp read_time = 4;
+}
+
+// A [Document][google.firestore.v1.Document] has been removed from the view of the targets.
+//
+// Sent if the document is no longer relevant to a target and is out of view.
+// Can be sent instead of a DocumentDelete or a DocumentChange if the server
+// can not send the new value of the document.
+//
+// Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] messages may be returned for the same logical
+// write or delete, if multiple targets are affected.
+message DocumentRemove {
+ // The resource name of the [Document][google.firestore.v1.Document] that has gone out of view.
+ string document = 1;
+
+ // A set of target IDs for targets that previously matched this document.
+ repeated int32 removed_target_ids = 2;
+
+ // The read timestamp at which the remove was observed.
+ //
+ // Greater or equal to the `commit_time` of the change/delete/remove.
+ google.protobuf.Timestamp read_time = 4;
+}
+
+// A digest of all the documents that match a given target.
+message ExistenceFilter {
+ // The target ID to which this filter applies.
+ int32 target_id = 1;
+
+ // The total count of documents that match [target_id][google.firestore.v1.ExistenceFilter.target_id].
+ //
+ // If different from the count of documents in the client that match, the
+ // client must manually determine which documents no longer match the target.
+ int32 count = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/proto/write_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/write_pb2.py
new file mode 100644
index 000000000..bcfd76ca1
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/proto/write_pb2.py
@@ -0,0 +1,1193 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1/proto/write.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
+)
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1/proto/write.proto",
+ package="google.firestore.v1",
+ syntax="proto3",
+ serialized_options=b"\n\027com.google.firestore.v1B\nWriteProtoP\001Z=": _operator_enum.GREATER_THAN_OR_EQUAL,
+ ">": _operator_enum.GREATER_THAN,
+ "array_contains": _operator_enum.ARRAY_CONTAINS,
+ "in": _operator_enum.IN,
+ "array_contains_any": _operator_enum.ARRAY_CONTAINS_ANY,
+}
+_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}."
+_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values'
+_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values."
+_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}."
+_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values."
+_MISSING_ORDER_BY = (
+ 'The "order by" field path {!r} is not present in the cursor data {!r}. '
+ "All fields sent to ``order_by()`` must be present in the fields "
+ "if passed to one of ``start_at()`` / ``start_after()`` / "
+ "``end_before()`` / ``end_at()`` to define a cursor."
+)
+_NO_ORDERS_FOR_CURSOR = (
+ "Attempting to create a cursor with no fields to order on. "
+ "When defining a cursor with one of ``start_at()`` / ``start_after()`` / "
+ "``end_before()`` / ``end_at()``, all fields in the cursor must "
+ "come from fields set in ``order_by()``."
+)
+_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}."
+
+
+class Query(object):
+ """Represents a query to the Firestore API.
+
+ Instances of this class are considered immutable: all methods that
+ would modify an instance instead return a new instance.
+
+ Args:
+ parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ The collection that this query applies to.
+ projection (Optional[:class:`google.cloud.proto.firestore.v1.\
+ query_pb2.StructuredQuery.Projection`]):
+ A projection of document fields to limit the query results to.
+ field_filters (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
+ query_pb2.StructuredQuery.FieldFilter`, ...]]):
+ The filters to be applied in the query.
+ orders (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
+ query_pb2.StructuredQuery.Order`, ...]]):
+ The "order by" entries to use in the query.
+ limit (Optional[int]):
+ The maximum number of documents the query is allowed to return.
+ limit_to_last (Optional[bool]):
+ Denotes whether a provided limit is applied to the end of the result set.
+ offset (Optional[int]):
+ The number of results to skip.
+ start_at (Optional[Tuple[dict, bool]]):
+ Two-tuple of :
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * an ``after`` flag
+
+ The fields and the flag combine to form a cursor used as
+ a starting point in a query result set. If the ``after``
+ flag is :data:`True`, the results will start just after any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ end_at (Optional[Tuple[dict, bool]]):
+ Two-tuple of:
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * a ``before`` flag
+
+ The fields and the flag combine to form a cursor used as
+ an ending point in a query result set. If the ``before``
+ flag is :data:`True`, the results will end just before any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ all_descendants (Optional[bool]):
+ When false, selects only collections that are immediate children
+ of the `parent` specified in the containing `RunQueryRequest`.
+ When true, selects all descendant collections.
+ """
+
+ ASCENDING = "ASCENDING"
+ """str: Sort query results in ascending order on a field."""
+ DESCENDING = "DESCENDING"
+ """str: Sort query results in descending order on a field."""
+
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ limit_to_last=False,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ all_descendants=False,
+ ):
+ self._parent = parent
+ self._projection = projection
+ self._field_filters = field_filters
+ self._orders = orders
+ self._limit = limit
+ self._limit_to_last = limit_to_last
+ self._offset = offset
+ self._start_at = start_at
+ self._end_at = end_at
+ self._all_descendants = all_descendants
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return (
+ self._parent == other._parent
+ and self._projection == other._projection
+ and self._field_filters == other._field_filters
+ and self._orders == other._orders
+ and self._limit == other._limit
+ and self._limit_to_last == other._limit_to_last
+ and self._offset == other._offset
+ and self._start_at == other._start_at
+ and self._end_at == other._end_at
+ and self._all_descendants == other._all_descendants
+ )
+
+ @property
+ def _client(self):
+ """The client of the parent collection.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.client.Client`:
+ The client that owns this query.
+ """
+ return self._parent._client
+
+ def select(self, field_paths):
+ """Project documents matching query to a limited set of fields.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ If the current query already has a projection set (i.e. has already
+ called :meth:`~google.cloud.firestore_v1.query.Query.select`), this
+ will overwrite it.
+
+ Args:
+ field_paths (Iterable[str, ...]): An iterable of field paths
+ (``.``-delimited list of field names) to use as a projection
+ of document fields in the query results.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A "projected" query. Acts as a copy of the current query,
+ modified with the newly added projection.
+ Raises:
+ ValueError: If any ``field_path`` is invalid.
+ """
+ field_paths = list(field_paths)
+ for field_path in field_paths:
+ field_path_module.split_field_path(field_path) # raises
+
+ new_projection = query_pb2.StructuredQuery.Projection(
+ fields=[
+ query_pb2.StructuredQuery.FieldReference(field_path=field_path)
+ for field_path in field_paths
+ ]
+ )
+ return self.__class__(
+ self._parent,
+ projection=new_projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def where(self, field_path, op_string, value):
+ """Filter the query on a field.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ Returns a new :class:`~google.cloud.firestore_v1.query.Query` that
+ filters on a specific field path, according to an operation (e.g.
+ ``==`` or "equals") and a particular value to be paired with that
+ operation.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) for the field to filter on.
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``, ``>``,
+ ``in``, ``array_contains`` and ``array_contains_any``.
+ value (Any): The value to compare the field against in the filter.
+ If ``value`` is :data:`None` or a NaN, then ``==`` is the only
+ allowed operation.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A filtered query. Acts as a copy of the current query,
+ modified with the newly added filter.
+
+ Raises:
+ ValueError: If ``field_path`` is invalid.
+ ValueError: If ``value`` is a NaN or :data:`None` and
+ ``op_string`` is not ``==``.
+ """
+ field_path_module.split_field_path(field_path) # raises
+
+ if value is None:
+ if op_string != _EQ_OP:
+ raise ValueError(_BAD_OP_NAN_NULL)
+ filter_pb = query_pb2.StructuredQuery.UnaryFilter(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL,
+ )
+ elif _isnan(value):
+ if op_string != _EQ_OP:
+ raise ValueError(_BAD_OP_NAN_NULL)
+ filter_pb = query_pb2.StructuredQuery.UnaryFilter(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ op=enums.StructuredQuery.UnaryFilter.Operator.IS_NAN,
+ )
+ elif isinstance(value, (transforms.Sentinel, transforms._ValueList)):
+ raise ValueError(_INVALID_WHERE_TRANSFORM)
+ else:
+ filter_pb = query_pb2.StructuredQuery.FieldFilter(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ op=_enum_from_op_string(op_string),
+ value=_helpers.encode_value(value),
+ )
+
+ new_filters = self._field_filters + (filter_pb,)
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=new_filters,
+ orders=self._orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ @staticmethod
+ def _make_order(field_path, direction):
+ """Helper for :meth:`order_by`."""
+ return query_pb2.StructuredQuery.Order(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ direction=_enum_from_direction(direction),
+ )
+
+ def order_by(self, field_path, direction=ASCENDING):
+ """Modify the query to add an order clause on a specific field.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ Successive :meth:`~google.cloud.firestore_v1.query.Query.order_by`
+ calls will further refine the ordering of results returned by the query
+ (i.e. the new "order by" fields will be added to existing ones).
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) on which to order the query results.
+ direction (Optional[str]): The direction to order by. Must be one
+ of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to
+ :attr:`ASCENDING`.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An ordered query. Acts as a copy of the current query, modified
+ with the newly added "order by" constraint.
+
+ Raises:
+ ValueError: If ``field_path`` is invalid.
+ ValueError: If ``direction`` is not one of :attr:`ASCENDING` or
+ :attr:`DESCENDING`.
+ """
+ field_path_module.split_field_path(field_path) # raises
+
+ order_pb = self._make_order(field_path, direction)
+
+ new_orders = self._orders + (order_pb,)
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=new_orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def limit(self, count):
+ """Limit a query to return at most `count` matching results.
+
+ If the current query already has a `limit` set, this will override it.
+
+ .. note::
+
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit` will drop previously set `limit_to_last`.
+
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited query. Acts as a copy of the current query, modified
+ with the newly added "limit" filter.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=count,
+ limit_to_last=False,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def limit_to_last(self, count):
+ """Limit a query to return the last `count` matching results.
+
+ If the current query already has a `limit_to_last`
+ set, this will override it.
+
+ .. note::
+
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit_to_last` will drop previously set `limit`.
+
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited query. Acts as a copy of the current query, modified
+ with the newly added "limit" filter.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=count,
+ limit_to_last=True,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def offset(self, num_to_skip):
+ """Skip to an offset in a query.
+
+ If the current query already has specified an offset, this will
+ overwrite it.
+
+ Args:
+ num_to_skip (int): The number of results to skip at the beginning
+ of query results. (Must be non-negative.)
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An offset query. Acts as a copy of the current query, modified
+ with the newly added "offset" field.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=num_to_skip,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def _check_snapshot(self, document_snapshot):
+ """Validate local snapshots for non-collection-group queries.
+
+ Raises:
+ ValueError: for non-collection-group queries, if the snapshot
+ is from a different collection.
+ """
+ if self._all_descendants:
+ return
+
+ if document_snapshot.reference._path[:-1] != self._parent._path:
+ raise ValueError("Cannot use snapshot from another collection as a cursor.")
+
+ def _cursor_helper(self, document_fields_or_snapshot, before, start):
+ """Set values to be used for a ``start_at`` or ``end_at`` cursor.
+
+ The values will later be used in a query protobuf.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+ before (bool): Flag indicating if the document in
+ ``document_fields_or_snapshot`` should (:data:`False`) or
+ shouldn't (:data:`True`) be included in the result set.
+ start (Optional[bool]): determines if the cursor is a ``start_at``
+ cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`).
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "start at" cursor.
+ """
+ if isinstance(document_fields_or_snapshot, tuple):
+ document_fields_or_snapshot = list(document_fields_or_snapshot)
+ elif isinstance(document_fields_or_snapshot, document.DocumentSnapshot):
+ self._check_snapshot(document_fields_or_snapshot)
+ else:
+ # NOTE: We copy so that the caller can't modify after calling.
+ document_fields_or_snapshot = copy.deepcopy(document_fields_or_snapshot)
+
+ cursor_pair = document_fields_or_snapshot, before
+ query_kwargs = {
+ "projection": self._projection,
+ "field_filters": self._field_filters,
+ "orders": self._orders,
+ "limit": self._limit,
+ "offset": self._offset,
+ "all_descendants": self._all_descendants,
+ }
+ if start:
+ query_kwargs["start_at"] = cursor_pair
+ query_kwargs["end_at"] = self._end_at
+ else:
+ query_kwargs["start_at"] = self._start_at
+ query_kwargs["end_at"] = cursor_pair
+
+ return self.__class__(self._parent, **query_kwargs)
+
+ def start_at(self, document_fields_or_snapshot):
+ """Start query results at a particular document value.
+
+ The result set will **include** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified a start cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.start_after` -- this
+ will overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "start at" cursor.
+ """
+ return self._cursor_helper(document_fields_or_snapshot, before=True, start=True)
+
+ def start_after(self, document_fields_or_snapshot):
+ """Start query results after a particular document value.
+
+ The result set will **exclude** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified a start cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.start_at` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "start after" cursor.
+ """
+ return self._cursor_helper(
+ document_fields_or_snapshot, before=False, start=True
+ )
+
+ def end_before(self, document_fields_or_snapshot):
+ """End query results before a particular document value.
+
+ The result set will **exclude** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified an end cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.end_at` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "end before" cursor.
+ """
+ return self._cursor_helper(
+ document_fields_or_snapshot, before=True, start=False
+ )
+
+ def end_at(self, document_fields_or_snapshot):
+ """End query results at a particular document value.
+
+ The result set will **include** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified an end cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.end_before` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "end at" cursor.
+ """
+ return self._cursor_helper(
+ document_fields_or_snapshot, before=False, start=False
+ )
+
+ def _filters_pb(self):
+ """Convert all the filters into a single generic Filter protobuf.
+
+ This may be a lone field filter or unary filter, may be a composite
+ filter or may be :data:`None`.
+
+ Returns:
+ :class:`google.cloud.firestore_v1.types.StructuredQuery.Filter`:
+ A "generic" filter representing the current query's filters.
+ """
+ num_filters = len(self._field_filters)
+ if num_filters == 0:
+ return None
+ elif num_filters == 1:
+ return _filter_pb(self._field_filters[0])
+ else:
+ composite_filter = query_pb2.StructuredQuery.CompositeFilter(
+ op=enums.StructuredQuery.CompositeFilter.Operator.AND,
+ filters=[_filter_pb(filter_) for filter_ in self._field_filters],
+ )
+ return query_pb2.StructuredQuery.Filter(composite_filter=composite_filter)
+
+ @staticmethod
+ def _normalize_projection(projection):
+ """Helper: convert field paths to message."""
+ if projection is not None:
+
+ fields = list(projection.fields)
+
+ if not fields:
+ field_ref = query_pb2.StructuredQuery.FieldReference(
+ field_path="__name__"
+ )
+ return query_pb2.StructuredQuery.Projection(fields=[field_ref])
+
+ return projection
+
+ def _normalize_orders(self):
+ """Helper: adjust orders based on cursors, where clauses."""
+ orders = list(self._orders)
+ _has_snapshot_cursor = False
+
+ if self._start_at:
+ if isinstance(self._start_at[0], document.DocumentSnapshot):
+ _has_snapshot_cursor = True
+
+ if self._end_at:
+ if isinstance(self._end_at[0], document.DocumentSnapshot):
+ _has_snapshot_cursor = True
+
+ if _has_snapshot_cursor:
+ should_order = [
+ _enum_from_op_string(key)
+ for key in _COMPARISON_OPERATORS
+ if key not in (_EQ_OP, "array_contains")
+ ]
+ order_keys = [order.field.field_path for order in orders]
+ for filter_ in self._field_filters:
+ field = filter_.field.field_path
+ if filter_.op in should_order and field not in order_keys:
+ orders.append(self._make_order(field, "ASCENDING"))
+ if not orders:
+ orders.append(self._make_order("__name__", "ASCENDING"))
+ else:
+ order_keys = [order.field.field_path for order in orders]
+ if "__name__" not in order_keys:
+ direction = orders[-1].direction # enum?
+ orders.append(self._make_order("__name__", direction))
+
+ return orders
+
+ def _normalize_cursor(self, cursor, orders):
+ """Helper: convert cursor to a list of values based on orders."""
+ if cursor is None:
+ return
+
+ if not orders:
+ raise ValueError(_NO_ORDERS_FOR_CURSOR)
+
+ document_fields, before = cursor
+
+ order_keys = [order.field.field_path for order in orders]
+
+ if isinstance(document_fields, document.DocumentSnapshot):
+ snapshot = document_fields
+ document_fields = snapshot.to_dict()
+ document_fields["__name__"] = snapshot.reference
+
+ if isinstance(document_fields, dict):
+ # Transform to list using orders
+ values = []
+ data = document_fields
+ for order_key in order_keys:
+ try:
+ if order_key in data:
+ values.append(data[order_key])
+ else:
+ values.append(
+ field_path_module.get_nested_value(order_key, data)
+ )
+ except KeyError:
+ msg = _MISSING_ORDER_BY.format(order_key, data)
+ raise ValueError(msg)
+ document_fields = values
+
+ if len(document_fields) != len(orders):
+ msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys)
+ raise ValueError(msg)
+
+ _transform_bases = (transforms.Sentinel, transforms._ValueList)
+
+ for index, key_field in enumerate(zip(order_keys, document_fields)):
+ key, field = key_field
+
+ if isinstance(field, _transform_bases):
+ msg = _INVALID_CURSOR_TRANSFORM
+ raise ValueError(msg)
+
+ if key == "__name__" and isinstance(field, six.string_types):
+ document_fields[index] = self._parent.document(field)
+
+ return document_fields, before
+
+ def _to_protobuf(self):
+ """Convert the current query into the equivalent protobuf.
+
+ Returns:
+ :class:`google.cloud.firestore_v1.types.StructuredQuery`:
+ The query protobuf.
+ """
+ projection = self._normalize_projection(self._projection)
+ orders = self._normalize_orders()
+ start_at = self._normalize_cursor(self._start_at, orders)
+ end_at = self._normalize_cursor(self._end_at, orders)
+
+ query_kwargs = {
+ "select": projection,
+ "from": [
+ query_pb2.StructuredQuery.CollectionSelector(
+ collection_id=self._parent.id, all_descendants=self._all_descendants
+ )
+ ],
+ "where": self._filters_pb(),
+ "order_by": orders,
+ "start_at": _cursor_pb(start_at),
+ "end_at": _cursor_pb(end_at),
+ }
+ if self._offset is not None:
+ query_kwargs["offset"] = self._offset
+ if self._limit is not None:
+ query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit)
+
+ return query_pb2.StructuredQuery(**query_kwargs)
+
+ def get(self, transaction=None):
+ """Read the documents in the collection that match this query.
+
+ This sends a ``RunQuery`` RPC and returns a list of documents
+ returned in the stream of ``RunQueryResponse`` messages.
+
+ Args:
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Returns:
+ list: The documents in the collection that match this query.
+ """
+ is_limited_to_last = self._limit_to_last
+
+ if self._limit_to_last:
+ # In order to fetch up to `self._limit` results from the end of the
+ # query flip the defined ordering on the query to start from the
+ # end, retrieving up to `self._limit` results from the backend.
+ for order in self._orders:
+ order.direction = _enum_from_direction(
+ self.DESCENDING
+ if order.direction == self.ASCENDING
+ else self.ASCENDING
+ )
+ self._limit_to_last = False
+
+ result = self.stream(transaction=transaction)
+ if is_limited_to_last:
+ result = reversed(list(result))
+
+ return list(result)
+
+ def stream(self, transaction=None):
+ """Read the documents in the collection that match this query.
+
+ This sends a ``RunQuery`` RPC and then returns an iterator which
+ consumes each document returned in the stream of ``RunQueryResponse``
+ messages.
+
+ .. note::
+
+ The underlying stream of responses will time out after
+ the ``max_rpc_timeout_millis`` value set in the GAPIC
+ client configuration for the ``RunQuery`` API. Snapshots
+ not consumed from the iterator before that point will be lost.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+
+ Yields:
+ :class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
+ The next document that fulfills the query.
+ """
+ if self._limit_to_last:
+ raise ValueError(
+ "Query results for queries that include limit_to_last() "
+ "constraints cannot be streamed. Use Query.get() instead."
+ )
+ parent_path, expected_prefix = self._parent._parent_info()
+ response_iterator = self._client._firestore_api.run_query(
+ parent_path,
+ self._to_protobuf(),
+ transaction=_helpers.get_transaction_id(transaction),
+ metadata=self._client._rpc_metadata,
+ )
+
+ for response in response_iterator:
+ if self._all_descendants:
+ snapshot = _collection_group_query_response_to_snapshot(
+ response, self._parent
+ )
+ else:
+ snapshot = _query_response_to_snapshot(
+ response, self._parent, expected_prefix
+ )
+ if snapshot is not None:
+ yield snapshot
+
+ def on_snapshot(self, callback):
+ """Monitor the documents in this collection that match this query.
+
+ This starts a watch on this query using a background thread. The
+ provided callback is run on the snapshot of the documents.
+
+ Args:
+ callback (Callable[List[:class:`~google.cloud.firestore_v1.query.QuerySnapshot`], \
+ List[:class:`~google.cloud.firestore_v1.watch.DocumentChange`], datetime.datetime], NoneType):
+ a callback to run when a change occurs.
+
+ Example:
+
+ .. code-block:: python
+
+ from google.cloud import firestore_v1
+
+ db = firestore_v1.Client()
+ query_ref = db.collection(u'users').where("user", "==", u'Ada')
+
+ def on_snapshot(docs, changes, read_time):
+ for doc in docs:
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+ # Watch this query
+ query_watch = query_ref.on_snapshot(on_snapshot)
+
+ # Terminate this watch
+ query_watch.unsubscribe()
+ """
+ return Watch.for_query(
+ self, callback, document.DocumentSnapshot, document.DocumentReference
+ )
+
+ def _comparator(self, doc1, doc2):
+ _orders = self._orders
+
+ # Add implicit sorting by name, using the last specified direction.
+ if len(_orders) == 0:
+ lastDirection = Query.ASCENDING
+ else:
+ if _orders[-1].direction == 1:
+ lastDirection = Query.ASCENDING
+ else:
+ lastDirection = Query.DESCENDING
+
+ orderBys = list(_orders)
+
+ order_pb = query_pb2.StructuredQuery.Order(
+ field=query_pb2.StructuredQuery.FieldReference(field_path="id"),
+ direction=_enum_from_direction(lastDirection),
+ )
+ orderBys.append(order_pb)
+
+ for orderBy in orderBys:
+ if orderBy.field.field_path == "id":
+ # If ordering by docuent id, compare resource paths.
+ comp = Order()._compare_to(doc1.reference._path, doc2.reference._path)
+ else:
+ if (
+ orderBy.field.field_path not in doc1._data
+ or orderBy.field.field_path not in doc2._data
+ ):
+ raise ValueError(
+ "Can only compare fields that exist in the "
+ "DocumentSnapshot. Please include the fields you are "
+ "ordering on in your select() call."
+ )
+ v1 = doc1._data[orderBy.field.field_path]
+ v2 = doc2._data[orderBy.field.field_path]
+ encoded_v1 = _helpers.encode_value(v1)
+ encoded_v2 = _helpers.encode_value(v2)
+ comp = Order().compare(encoded_v1, encoded_v2)
+
+ if comp != 0:
+ # 1 == Ascending, -1 == Descending
+ return orderBy.direction * comp
+
+ return 0
+
+
+def _enum_from_op_string(op_string):
+ """Convert a string representation of a binary operator to an enum.
+
+ These enums come from the protobuf message definition
+ ``StructuredQuery.FieldFilter.Operator``.
+
+ Args:
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``
+ and ``>``.
+
+ Returns:
+ int: The enum corresponding to ``op_string``.
+
+ Raises:
+ ValueError: If ``op_string`` is not a valid operator.
+ """
+ try:
+ return _COMPARISON_OPERATORS[op_string]
+ except KeyError:
+ choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys()))
+ msg = _BAD_OP_STRING.format(op_string, choices)
+ raise ValueError(msg)
+
+
+def _isnan(value):
+ """Check if a value is NaN.
+
+ This differs from ``math.isnan`` in that **any** input type is
+ allowed.
+
+ Args:
+ value (Any): A value to check for NaN-ness.
+
+ Returns:
+ bool: Indicates if the value is the NaN float.
+ """
+ if isinstance(value, float):
+ return math.isnan(value)
+ else:
+ return False
+
+
+def _enum_from_direction(direction):
+ """Convert a string representation of a direction to an enum.
+
+ Args:
+ direction (str): A direction to order by. Must be one of
+ :attr:`~google.cloud.firestore.Query.ASCENDING` or
+ :attr:`~google.cloud.firestore.Query.DESCENDING`.
+
+ Returns:
+ int: The enum corresponding to ``direction``.
+
+ Raises:
+ ValueError: If ``direction`` is not a valid direction.
+ """
+ if isinstance(direction, int):
+ return direction
+
+ if direction == Query.ASCENDING:
+ return enums.StructuredQuery.Direction.ASCENDING
+ elif direction == Query.DESCENDING:
+ return enums.StructuredQuery.Direction.DESCENDING
+ else:
+ msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING)
+ raise ValueError(msg)
+
+
+def _filter_pb(field_or_unary):
+ """Convert a specific protobuf filter to the generic filter type.
+
+ Args:
+ field_or_unary (Union[google.cloud.proto.firestore.v1.\
+ query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\
+ firestore.v1.query_pb2.StructuredQuery.FieldFilter]): A
+ field or unary filter to convert to a generic filter.
+
+ Returns:
+ google.cloud.firestore_v1.types.\
+ StructuredQuery.Filter: A "generic" filter.
+
+ Raises:
+ ValueError: If ``field_or_unary`` is not a field or unary filter.
+ """
+ if isinstance(field_or_unary, query_pb2.StructuredQuery.FieldFilter):
+ return query_pb2.StructuredQuery.Filter(field_filter=field_or_unary)
+ elif isinstance(field_or_unary, query_pb2.StructuredQuery.UnaryFilter):
+ return query_pb2.StructuredQuery.Filter(unary_filter=field_or_unary)
+ else:
+ raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary)
+
+
+def _cursor_pb(cursor_pair):
+ """Convert a cursor pair to a protobuf.
+
+ If ``cursor_pair`` is :data:`None`, just returns :data:`None`.
+
+ Args:
+ cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of
+
+ * a list of field values.
+ * a ``before`` flag
+
+ Returns:
+ Optional[google.cloud.firestore_v1.types.Cursor]: A
+ protobuf cursor corresponding to the values.
+ """
+ if cursor_pair is not None:
+ data, before = cursor_pair
+ value_pbs = [_helpers.encode_value(value) for value in data]
+ return query_pb2.Cursor(values=value_pbs, before=before)
+
+
+def _query_response_to_snapshot(response_pb, collection, expected_prefix):
+ """Parse a query response protobuf to a document snapshot.
+
+ Args:
+ response_pb (google.cloud.proto.firestore.v1.\
+ firestore_pb2.RunQueryResponse): A
+ collection (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ A reference to the collection that initiated the query.
+ expected_prefix (str): The expected prefix for fully-qualified
+ document names returned in the query results. This can be computed
+ directly from ``collection`` via :meth:`_parent_info`.
+
+ Returns:
+ Optional[:class:`~google.cloud.firestore.document.DocumentSnapshot`]:
+ A snapshot of the data returned in the query. If
+ ``response_pb.document`` is not set, the snapshot will be :data:`None`.
+ """
+ if not response_pb.HasField("document"):
+ return None
+
+ document_id = _helpers.get_doc_id(response_pb.document, expected_prefix)
+ reference = collection.document(document_id)
+ data = _helpers.decode_dict(response_pb.document.fields, collection._client)
+ snapshot = document.DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=response_pb.read_time,
+ create_time=response_pb.document.create_time,
+ update_time=response_pb.document.update_time,
+ )
+ return snapshot
+
+
+def _collection_group_query_response_to_snapshot(response_pb, collection):
+ """Parse a query response protobuf to a document snapshot.
+
+ Args:
+ response_pb (google.cloud.proto.firestore.v1.\
+ firestore_pb2.RunQueryResponse): A
+ collection (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ A reference to the collection that initiated the query.
+
+ Returns:
+ Optional[:class:`~google.cloud.firestore.document.DocumentSnapshot`]:
+ A snapshot of the data returned in the query. If
+ ``response_pb.document`` is not set, the snapshot will be :data:`None`.
+ """
+ if not response_pb.HasField("document"):
+ return None
+ reference = collection._client.document(response_pb.document.name)
+ data = _helpers.decode_dict(response_pb.document.fields, collection._client)
+ snapshot = document.DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=response_pb.read_time,
+ create_time=response_pb.document.create_time,
+ update_time=response_pb.document.update_time,
+ )
+ return snapshot
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/transaction.py b/venv/Lib/site-packages/google/cloud/firestore_v1/transaction.py
new file mode 100644
index 000000000..04485a84c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/transaction.py
@@ -0,0 +1,442 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for applying Google Cloud Firestore changes in a transaction."""
+
+
+import random
+import time
+
+import six
+
+from google.api_core import exceptions
+from google.cloud.firestore_v1 import batch
+from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1.document import DocumentReference
+from google.cloud.firestore_v1.query import Query
+
+
+MAX_ATTEMPTS = 5
+"""int: Default number of transaction attempts (with retries)."""
+_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}."
+_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}."
+_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back")
+_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed")
+_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction."
+_INITIAL_SLEEP = 1.0
+"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`."""
+_MAX_SLEEP = 30.0
+"""float: Eventual "max" sleep time. To be used in :func:`_sleep`."""
+_MULTIPLIER = 2.0
+"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`."""
+_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts."
+_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried."
+
+
+class Transaction(batch.WriteBatch):
+ """Accumulate read-and-write operations to be sent in a transaction.
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ The client that created this transaction.
+ max_attempts (Optional[int]): The maximum number of attempts for
+ the transaction (i.e. allowing retries). Defaults to
+ :attr:`~google.cloud.firestore_v1.transaction.MAX_ATTEMPTS`.
+ read_only (Optional[bool]): Flag indicating if the transaction
+ should be read-only or should allow writes. Defaults to
+ :data:`False`.
+ """
+
+ def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False):
+ super(Transaction, self).__init__(client)
+ self._max_attempts = max_attempts
+ self._read_only = read_only
+ self._id = None
+
+ def _add_write_pbs(self, write_pbs):
+ """Add `Write`` protobufs to this transaction.
+
+ Args:
+ write_pbs (List[google.cloud.proto.firestore.v1.\
+ write_pb2.Write]): A list of write protobufs to be added.
+
+ Raises:
+ ValueError: If this transaction is read-only.
+ """
+ if self._read_only:
+ raise ValueError(_WRITE_READ_ONLY)
+
+ super(Transaction, self)._add_write_pbs(write_pbs)
+
+ def _options_protobuf(self, retry_id):
+ """Convert the current object to protobuf.
+
+ The ``retry_id`` value is used when retrying a transaction that
+ failed (e.g. due to contention). It is intended to be the "first"
+ transaction that failed (i.e. if multiple retries are needed).
+
+ Args:
+ retry_id (Union[bytes, NoneType]): Transaction ID of a transaction
+ to be retried.
+
+ Returns:
+ Optional[google.cloud.firestore_v1.types.TransactionOptions]:
+ The protobuf ``TransactionOptions`` if ``read_only==True`` or if
+ there is a transaction ID to be retried, else :data:`None`.
+
+ Raises:
+ ValueError: If ``retry_id`` is not :data:`None` but the
+ transaction is read-only.
+ """
+ if retry_id is not None:
+ if self._read_only:
+ raise ValueError(_CANT_RETRY_READ_ONLY)
+
+ return types.TransactionOptions(
+ read_write=types.TransactionOptions.ReadWrite(
+ retry_transaction=retry_id
+ )
+ )
+ elif self._read_only:
+ return types.TransactionOptions(
+ read_only=types.TransactionOptions.ReadOnly()
+ )
+ else:
+ return None
+
+ @property
+ def in_progress(self):
+ """Determine if this transaction has already begun.
+
+ Returns:
+ bool: Indicates if the transaction has started.
+ """
+ return self._id is not None
+
+ @property
+ def id(self):
+ """Get the current transaction ID.
+
+ Returns:
+ Optional[bytes]: The transaction ID (or :data:`None` if the
+ current transaction is not in progress).
+ """
+ return self._id
+
+ def _begin(self, retry_id=None):
+ """Begin the transaction.
+
+ Args:
+ retry_id (Optional[bytes]): Transaction ID of a transaction to be
+ retried.
+
+ Raises:
+ ValueError: If the current transaction has already begun.
+ """
+ if self.in_progress:
+ msg = _CANT_BEGIN.format(self._id)
+ raise ValueError(msg)
+
+ transaction_response = self._client._firestore_api.begin_transaction(
+ self._client._database_string,
+ options_=self._options_protobuf(retry_id),
+ metadata=self._client._rpc_metadata,
+ )
+ self._id = transaction_response.transaction
+
+ def _clean_up(self):
+ """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``.
+
+ This intended to occur on success or failure of the associated RPCs.
+ """
+ self._write_pbs = []
+ self._id = None
+
+ def _rollback(self):
+ """Roll back the transaction.
+
+ Raises:
+ ValueError: If no transaction is in progress.
+ """
+ if not self.in_progress:
+ raise ValueError(_CANT_ROLLBACK)
+
+ try:
+ # NOTE: The response is just ``google.protobuf.Empty``.
+ self._client._firestore_api.rollback(
+ self._client._database_string,
+ self._id,
+ metadata=self._client._rpc_metadata,
+ )
+ finally:
+ self._clean_up()
+
+ def _commit(self):
+ """Transactionally commit the changes accumulated.
+
+ Returns:
+ List[:class:`google.cloud.proto.firestore.v1.write_pb2.WriteResult`, ...]:
+ The write results corresponding to the changes committed, returned
+ in the same order as the changes were applied to this transaction.
+ A write result contains an ``update_time`` field.
+
+ Raises:
+ ValueError: If no transaction is in progress.
+ """
+ if not self.in_progress:
+ raise ValueError(_CANT_COMMIT)
+
+ commit_response = _commit_with_retry(self._client, self._write_pbs, self._id)
+
+ self._clean_up()
+ return list(commit_response.write_results)
+
+ def get_all(self, references):
+ """Retrieves multiple documents from Firestore.
+
+ Args:
+ references (List[.DocumentReference, ...]): Iterable of document
+ references to be retrieved.
+
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ return self._client.get_all(references, transaction=self)
+
+ def get(self, ref_or_query):
+ """
+ Retrieve a document or a query result from the database.
+ Args:
+ ref_or_query The document references or query object to return.
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ if isinstance(ref_or_query, DocumentReference):
+ return self._client.get_all([ref_or_query], transaction=self)
+ elif isinstance(ref_or_query, Query):
+ return ref_or_query.stream(transaction=self)
+ else:
+ raise ValueError(
+ 'Value for argument "ref_or_query" must be a DocumentReference or a Query.'
+ )
+
+
+class _Transactional(object):
+ """Provide a callable object to use as a transactional decorater.
+
+ This is surfaced via
+ :func:`~google.cloud.firestore_v1.transaction.transactional`.
+
+ Args:
+ to_wrap (Callable[[:class:`~google.cloud.firestore_v1.transaction.Transaction`, ...], Any]):
+ A callable that should be run (and retried) in a transaction.
+ """
+
+ def __init__(self, to_wrap):
+ self.to_wrap = to_wrap
+ self.current_id = None
+ """Optional[bytes]: The current transaction ID."""
+ self.retry_id = None
+ """Optional[bytes]: The ID of the first attempted transaction."""
+
+ def _reset(self):
+ """Unset the transaction IDs."""
+ self.current_id = None
+ self.retry_id = None
+
+ def _pre_commit(self, transaction, *args, **kwargs):
+ """Begin transaction and call the wrapped callable.
+
+ If the callable raises an exception, the transaction will be rolled
+ back. If not, the transaction will be "ready" for ``Commit`` (i.e.
+ it will have staged writes).
+
+ Args:
+ transaction
+ (:class:`~google.cloud.firestore_v1.transaction.Transaction`):
+ A transaction to execute the callable within.
+ args (Tuple[Any, ...]): The extra positional arguments to pass
+ along to the wrapped callable.
+ kwargs (Dict[str, Any]): The extra keyword arguments to pass
+ along to the wrapped callable.
+
+ Returns:
+ Any: result of the wrapped callable.
+
+ Raises:
+ Exception: Any failure caused by ``to_wrap``.
+ """
+ # Force the ``transaction`` to be not "in progress".
+ transaction._clean_up()
+ transaction._begin(retry_id=self.retry_id)
+
+ # Update the stored transaction IDs.
+ self.current_id = transaction._id
+ if self.retry_id is None:
+ self.retry_id = self.current_id
+ try:
+ return self.to_wrap(transaction, *args, **kwargs)
+ except: # noqa
+ # NOTE: If ``rollback`` fails this will lose the information
+ # from the original failure.
+ transaction._rollback()
+ raise
+
+ def _maybe_commit(self, transaction):
+ """Try to commit the transaction.
+
+ If the transaction is read-write and the ``Commit`` fails with the
+ ``ABORTED`` status code, it will be retried. Any other failure will
+ not be caught.
+
+ Args:
+ transaction
+ (:class:`~google.cloud.firestore_v1.transaction.Transaction`):
+ The transaction to be ``Commit``-ed.
+
+ Returns:
+ bool: Indicating if the commit succeeded.
+ """
+ try:
+ transaction._commit()
+ return True
+ except exceptions.GoogleAPICallError as exc:
+ if transaction._read_only:
+ raise
+
+ if isinstance(exc, exceptions.Aborted):
+ # If a read-write transaction returns ABORTED, retry.
+ return False
+ else:
+ raise
+
+ def __call__(self, transaction, *args, **kwargs):
+ """Execute the wrapped callable within a transaction.
+
+ Args:
+ transaction
+ (:class:`~google.cloud.firestore_v1.transaction.Transaction`):
+ A transaction to execute the callable within.
+ args (Tuple[Any, ...]): The extra positional arguments to pass
+ along to the wrapped callable.
+ kwargs (Dict[str, Any]): The extra keyword arguments to pass
+ along to the wrapped callable.
+
+ Returns:
+ Any: The result of the wrapped callable.
+
+ Raises:
+ ValueError: If the transaction does not succeed in
+ ``max_attempts``.
+ """
+ self._reset()
+
+ for attempt in six.moves.xrange(transaction._max_attempts):
+ result = self._pre_commit(transaction, *args, **kwargs)
+ succeeded = self._maybe_commit(transaction)
+ if succeeded:
+ return result
+
+ # Subsequent requests will use the failed transaction ID as part of
+ # the ``BeginTransactionRequest`` when restarting this transaction
+ # (via ``options.retry_transaction``). This preserves the "spot in
+ # line" of the transaction, so exponential backoff is not required
+ # in this case.
+
+ transaction._rollback()
+ msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts)
+ raise ValueError(msg)
+
+
+def transactional(to_wrap):
+ """Decorate a callable so that it runs in a transaction.
+
+ Args:
+ to_wrap
+ (Callable[[:class:`~google.cloud.firestore_v1.transaction.Transaction`, ...], Any]):
+ A callable that should be run (and retried) in a transaction.
+
+ Returns:
+ Callable[[:class:`~google.cloud.firestore_v1.transaction.Transaction`, ...], Any]:
+ the wrapped callable.
+ """
+ return _Transactional(to_wrap)
+
+
+def _commit_with_retry(client, write_pbs, transaction_id):
+ """Call ``Commit`` on the GAPIC client with retry / sleep.
+
+ Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level
+ retry is handled by the underlying GAPICd client, but in this case it
+ doesn't because ``Commit`` is not always idempotent. But here we know it
+ is "idempotent"-like because it has a transaction ID. We also need to do
+ our own retry to special-case the ``INVALID_ARGUMENT`` error.
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client with GAPIC client and configuration details.
+ write_pbs (List[:class:`google.cloud.proto.firestore.v1.write_pb2.Write`, ...]):
+ A ``Write`` protobuf instance to be committed.
+ transaction_id (bytes):
+ ID of an existing transaction that this commit will run in.
+
+ Returns:
+ :class:`google.cloud.firestore_v1.types.CommitResponse`:
+ The protobuf response from ``Commit``.
+
+ Raises:
+ ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable
+ exception is encountered.
+ """
+ current_sleep = _INITIAL_SLEEP
+ while True:
+ try:
+ return client._firestore_api.commit(
+ client._database_string,
+ write_pbs,
+ transaction=transaction_id,
+ metadata=client._rpc_metadata,
+ )
+ except exceptions.ServiceUnavailable:
+ # Retry
+ pass
+
+ current_sleep = _sleep(current_sleep)
+
+
+def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER):
+ """Sleep and produce a new sleep time.
+
+ .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\
+ 2015/03/backoff.html
+
+ Select a duration between zero and ``current_sleep``. It might seem
+ counterintuitive to have so much jitter, but
+ `Exponential Backoff And Jitter`_ argues that "full jitter" is
+ the best strategy.
+
+ Args:
+ current_sleep (float): The current "max" for sleep interval.
+ max_sleep (Optional[float]): Eventual "max" sleep time
+ multiplier (Optional[float]): Multiplier for exponential backoff.
+
+ Returns:
+ float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever
+ is smaller)
+ """
+ actual_sleep = random.uniform(0.0, current_sleep)
+ time.sleep(actual_sleep)
+ return min(multiplier * current_sleep, max_sleep)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/transforms.py b/venv/Lib/site-packages/google/cloud/firestore_v1/transforms.py
new file mode 100644
index 000000000..83b644608
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/transforms.py
@@ -0,0 +1,151 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpful constants to use for Google Cloud Firestore."""
+
+
+class Sentinel(object):
+ """Sentinel objects used to signal special handling."""
+
+ __slots__ = ("description",)
+
+ def __init__(self, description):
+ self.description = description
+
+ def __repr__(self):
+ return "Sentinel: {}".format(self.description)
+
+
+DELETE_FIELD = Sentinel("Value used to delete a field in a document.")
+
+
+SERVER_TIMESTAMP = Sentinel(
+ "Value used to set a document field to the server timestamp."
+)
+
+
+class _ValueList(object):
+ """Read-only list of values.
+
+ Args:
+ values (List | Tuple): values held in the helper.
+ """
+
+ slots = ("_values",)
+
+ def __init__(self, values):
+ if not isinstance(values, (list, tuple)):
+ raise ValueError("'values' must be a list or tuple.")
+
+ if len(values) == 0:
+ raise ValueError("'values' must be non-empty.")
+
+ self._values = list(values)
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._values == other._values
+
+ @property
+ def values(self):
+ """Values to append.
+
+ Returns (List):
+ values to be appended by the transform.
+ """
+ return self._values
+
+
+class ArrayUnion(_ValueList):
+ """Field transform: appends missing values to an array field.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.append_missing_elements
+
+ Args:
+ values (List | Tuple): values to append.
+ """
+
+
+class ArrayRemove(_ValueList):
+ """Field transform: remove values from an array field.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.remove_all_from_array
+
+ Args:
+ values (List | Tuple): values to remove.
+ """
+
+
+class _NumericValue(object):
+ """Hold a single integer / float value.
+
+ Args:
+ value (int | float): value held in the helper.
+ """
+
+ def __init__(self, value):
+ if not isinstance(value, (int, float)):
+ raise ValueError("Pass an integer / float value.")
+
+ self._value = value
+
+ @property
+ def value(self):
+ """Value used by the transform.
+
+ Returns:
+ (Integer | Float) value passed in the constructor.
+ """
+ return self._value
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._value == other._value
+
+
+class Increment(_NumericValue):
+ """Field transform: increment a numeric field with specified value.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.increment
+
+ Args:
+ value (int | float): value used to increment the field.
+ """
+
+
+class Maximum(_NumericValue):
+ """Field transform: bound numeric field with specified value.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.maximum
+
+ Args:
+ value (int | float): value used to bound the field.
+ """
+
+
+class Minimum(_NumericValue):
+ """Field transform: bound numeric field with specified value.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.minimum
+
+ Args:
+ value (int | float): value used to bound the field.
+ """
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/types.py b/venv/Lib/site-packages/google/cloud/firestore_v1/types.py
new file mode 100644
index 000000000..c4e7c3507
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/types.py
@@ -0,0 +1,63 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+import sys
+
+from google.api import http_pb2
+from google.protobuf import any_pb2
+from google.protobuf import descriptor_pb2
+from google.protobuf import empty_pb2
+from google.protobuf import struct_pb2
+from google.protobuf import timestamp_pb2
+from google.protobuf import wrappers_pb2
+from google.rpc import status_pb2
+from google.type import latlng_pb2
+
+from google.api_core.protobuf_helpers import get_messages
+from google.cloud.firestore_v1.proto import common_pb2
+from google.cloud.firestore_v1.proto import document_pb2
+from google.cloud.firestore_v1.proto import firestore_pb2
+from google.cloud.firestore_v1.proto import query_pb2
+from google.cloud.firestore_v1.proto import write_pb2
+
+
+_shared_modules = [
+ http_pb2,
+ any_pb2,
+ descriptor_pb2,
+ empty_pb2,
+ struct_pb2,
+ timestamp_pb2,
+ wrappers_pb2,
+ status_pb2,
+ latlng_pb2,
+]
+
+_local_modules = [common_pb2, document_pb2, firestore_pb2, query_pb2, write_pb2]
+
+names = []
+
+for module in _shared_modules:
+ for name, message in get_messages(module).items():
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+
+for module in _local_modules:
+ for name, message in get_messages(module).items():
+ message.__module__ = "google.cloud.firestore_v1.types"
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+
+__all__ = tuple(sorted(names))
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1/watch.py b/venv/Lib/site-packages/google/cloud/firestore_v1/watch.py
new file mode 100644
index 000000000..103732223
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1/watch.py
@@ -0,0 +1,743 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import collections
+import threading
+import datetime
+from enum import Enum
+import functools
+
+import pytz
+
+from google.api_core.bidi import ResumableBidiRpc
+from google.api_core.bidi import BackgroundConsumer
+from google.cloud.firestore_v1.proto import firestore_pb2
+from google.cloud.firestore_v1 import _helpers
+
+from google.api_core import exceptions
+
+import grpc
+
+"""Python client for Google Cloud Firestore Watch."""
+
+_LOGGER = logging.getLogger(__name__)
+
+WATCH_TARGET_ID = 0x5079 # "Py"
+
+GRPC_STATUS_CODE = {
+ "OK": 0,
+ "CANCELLED": 1,
+ "UNKNOWN": 2,
+ "INVALID_ARGUMENT": 3,
+ "DEADLINE_EXCEEDED": 4,
+ "NOT_FOUND": 5,
+ "ALREADY_EXISTS": 6,
+ "PERMISSION_DENIED": 7,
+ "UNAUTHENTICATED": 16,
+ "RESOURCE_EXHAUSTED": 8,
+ "FAILED_PRECONDITION": 9,
+ "ABORTED": 10,
+ "OUT_OF_RANGE": 11,
+ "UNIMPLEMENTED": 12,
+ "INTERNAL": 13,
+ "UNAVAILABLE": 14,
+ "DATA_LOSS": 15,
+ "DO_NOT_USE": -1,
+}
+_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated"
+_RECOVERABLE_STREAM_EXCEPTIONS = (
+ exceptions.Aborted,
+ exceptions.Cancelled,
+ exceptions.Unknown,
+ exceptions.DeadlineExceeded,
+ exceptions.ResourceExhausted,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ exceptions.Unauthenticated,
+)
+_TERMINATING_STREAM_EXCEPTIONS = (exceptions.Cancelled,)
+
+DocTreeEntry = collections.namedtuple("DocTreeEntry", ["value", "index"])
+
+
+class WatchDocTree(object):
+ # TODO: Currently this uses a dict. Other implementations us an rbtree.
+ # The performance of this implementation should be investigated and may
+ # require modifying the underlying datastructure to a rbtree.
+ def __init__(self):
+ self._dict = {}
+ self._index = 0
+
+ def keys(self):
+ return list(self._dict.keys())
+
+ def _copy(self):
+ wdt = WatchDocTree()
+ wdt._dict = self._dict.copy()
+ wdt._index = self._index
+ self = wdt
+ return self
+
+ def insert(self, key, value):
+ self = self._copy()
+ self._dict[key] = DocTreeEntry(value, self._index)
+ self._index += 1
+ return self
+
+ def find(self, key):
+ return self._dict[key]
+
+ def remove(self, key):
+ self = self._copy()
+ del self._dict[key]
+ return self
+
+ def __iter__(self):
+ for k in self._dict:
+ yield k
+
+ def __len__(self):
+ return len(self._dict)
+
+ def __contains__(self, k):
+ return k in self._dict
+
+
+class ChangeType(Enum):
+ ADDED = 1
+ REMOVED = 2
+ MODIFIED = 3
+
+
+class DocumentChange(object):
+ def __init__(self, type, document, old_index, new_index):
+ """DocumentChange
+
+ Args:
+ type (ChangeType):
+ document (document.DocumentSnapshot):
+ old_index (int):
+ new_index (int):
+ """
+ # TODO: spec indicated an isEqual param also
+ self.type = type
+ self.document = document
+ self.old_index = old_index
+ self.new_index = new_index
+
+
+class WatchResult(object):
+ def __init__(self, snapshot, name, change_type):
+ self.snapshot = snapshot
+ self.name = name
+ self.change_type = change_type
+
+
+def _maybe_wrap_exception(exception):
+ """Wraps a gRPC exception class, if needed."""
+ if isinstance(exception, grpc.RpcError):
+ return exceptions.from_grpc_error(exception)
+ return exception
+
+
+def document_watch_comparator(doc1, doc2):
+ assert doc1 == doc2, "Document watches only support one document."
+ return 0
+
+
+def _should_recover(exception):
+ wrapped = _maybe_wrap_exception(exception)
+ return isinstance(wrapped, _RECOVERABLE_STREAM_EXCEPTIONS)
+
+
+def _should_terminate(exception):
+ wrapped = _maybe_wrap_exception(exception)
+ return isinstance(wrapped, _TERMINATING_STREAM_EXCEPTIONS)
+
+
+class Watch(object):
+
+ BackgroundConsumer = BackgroundConsumer # FBO unit tests
+ ResumableBidiRpc = ResumableBidiRpc # FBO unit tests
+
+ def __init__(
+ self,
+ document_reference,
+ firestore,
+ target,
+ comparator,
+ snapshot_callback,
+ document_snapshot_cls,
+ document_reference_cls,
+ BackgroundConsumer=None, # FBO unit testing
+ ResumableBidiRpc=None, # FBO unit testing
+ ):
+ """
+ Args:
+ firestore:
+ target:
+ comparator:
+ snapshot_callback: Callback method to process snapshots.
+ Args:
+ docs (List(DocumentSnapshot)): A callback that returns the
+ ordered list of documents stored in this snapshot.
+ changes (List(str)): A callback that returns the list of
+ changed documents since the last snapshot delivered for
+ this watch.
+ read_time (string): The ISO 8601 time at which this
+ snapshot was obtained.
+
+ document_snapshot_cls: instance of DocumentSnapshot
+ document_reference_cls: instance of DocumentReference
+ """
+ self._document_reference = document_reference
+ self._firestore = firestore
+ self._api = firestore._firestore_api
+ self._targets = target
+ self._comparator = comparator
+ self.DocumentSnapshot = document_snapshot_cls
+ self.DocumentReference = document_reference_cls
+ self._snapshot_callback = snapshot_callback
+ self._closing = threading.Lock()
+ self._closed = False
+
+ self.resume_token = None
+
+ rpc_request = self._get_rpc_request
+
+ if ResumableBidiRpc is None:
+ ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests
+
+ self._rpc = ResumableBidiRpc(
+ self._api.transport.listen,
+ should_recover=_should_recover,
+ should_terminate=_should_terminate,
+ initial_request=rpc_request,
+ metadata=self._firestore._rpc_metadata,
+ )
+
+ self._rpc.add_done_callback(self._on_rpc_done)
+
+ # Initialize state for on_snapshot
+ # The sorted tree of QueryDocumentSnapshots as sent in the last
+ # snapshot. We only look at the keys.
+ self.doc_tree = WatchDocTree()
+
+ # A map of document names to QueryDocumentSnapshots for the last sent
+ # snapshot.
+ self.doc_map = {}
+
+ # The accumulates map of document changes (keyed by document name) for
+ # the current snapshot.
+ self.change_map = {}
+
+ # The current state of the query results.
+ self.current = False
+
+ # We need this to track whether we've pushed an initial set of changes,
+ # since we should push those even when there are no changes, if there
+ # aren't docs.
+ self.has_pushed = False
+
+ # The server assigns and updates the resume token.
+ if BackgroundConsumer is None: # FBO unit tests
+ BackgroundConsumer = self.BackgroundConsumer
+
+ self._consumer = BackgroundConsumer(self._rpc, self.on_snapshot)
+ self._consumer.start()
+
+ def _get_rpc_request(self):
+ if self.resume_token is not None:
+ self._targets["resume_token"] = self.resume_token
+ return firestore_pb2.ListenRequest(
+ database=self._firestore._database_string, add_target=self._targets
+ )
+
+ @property
+ def is_active(self):
+ """bool: True if this manager is actively streaming.
+
+ Note that ``False`` does not indicate this is complete shut down,
+ just that it stopped getting new messages.
+ """
+ return self._consumer is not None and self._consumer.is_active
+
+ def close(self, reason=None):
+ """Stop consuming messages and shutdown all helper threads.
+
+ This method is idempotent. Additional calls will have no effect.
+
+ Args:
+ reason (Any): The reason to close this. If None, this is considered
+ an "intentional" shutdown.
+ """
+ with self._closing:
+ if self._closed:
+ return
+
+ # Stop consuming messages.
+ if self.is_active:
+ _LOGGER.debug("Stopping consumer.")
+ self._consumer.stop()
+ self._consumer = None
+
+ self._rpc.close()
+ self._rpc = None
+ self._closed = True
+ _LOGGER.debug("Finished stopping manager.")
+
+ if reason:
+ # Raise an exception if a reason is provided
+ _LOGGER.debug("reason for closing: %s" % reason)
+ if isinstance(reason, Exception):
+ raise reason
+ raise RuntimeError(reason)
+
+ def _on_rpc_done(self, future):
+ """Triggered whenever the underlying RPC terminates without recovery.
+
+ This is typically triggered from one of two threads: the background
+ consumer thread (when calling ``recv()`` produces a non-recoverable
+ error) or the grpc management thread (when cancelling the RPC).
+
+ This method is *non-blocking*. It will start another thread to deal
+ with shutting everything down. This is to prevent blocking in the
+ background consumer and preventing it from being ``joined()``.
+ """
+ _LOGGER.info("RPC termination has signaled manager shutdown.")
+ future = _maybe_wrap_exception(future)
+ thread = threading.Thread(
+ name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future}
+ )
+ thread.daemon = True
+ thread.start()
+
+ def unsubscribe(self):
+ self.close()
+
+ @classmethod
+ def for_document(
+ cls,
+ document_ref,
+ snapshot_callback,
+ snapshot_class_instance,
+ reference_class_instance,
+ ):
+ """
+ Creates a watch snapshot listener for a document. snapshot_callback
+ receives a DocumentChange object, but may also start to get
+ targetChange and such soon
+
+ Args:
+ document_ref: Reference to Document
+ snapshot_callback: callback to be called on snapshot
+ snapshot_class_instance: instance of DocumentSnapshot to make
+ snapshots with to pass to snapshot_callback
+ reference_class_instance: instance of DocumentReference to make
+ references
+
+ """
+ return cls(
+ document_ref,
+ document_ref._client,
+ {
+ "documents": {"documents": [document_ref._document_path]},
+ "target_id": WATCH_TARGET_ID,
+ },
+ document_watch_comparator,
+ snapshot_callback,
+ snapshot_class_instance,
+ reference_class_instance,
+ )
+
+ @classmethod
+ def for_query(
+ cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance
+ ):
+ parent_path, _ = query._parent._parent_info()
+ query_target = firestore_pb2.Target.QueryTarget(
+ parent=parent_path, structured_query=query._to_protobuf()
+ )
+
+ return cls(
+ query,
+ query._client,
+ {"query": query_target, "target_id": WATCH_TARGET_ID},
+ query._comparator,
+ snapshot_callback,
+ snapshot_class_instance,
+ reference_class_instance,
+ )
+
+ def _on_snapshot_target_change_no_change(self, proto):
+ _LOGGER.debug("on_snapshot: target change: NO_CHANGE")
+ change = proto.target_change
+
+ no_target_ids = change.target_ids is None or len(change.target_ids) == 0
+ if no_target_ids and change.read_time and self.current:
+ # TargetChange.CURRENT followed by TargetChange.NO_CHANGE
+ # signals a consistent state. Invoke the onSnapshot
+ # callback as specified by the user.
+ self.push(change.read_time, change.resume_token)
+
+ def _on_snapshot_target_change_add(self, proto):
+ _LOGGER.debug("on_snapshot: target change: ADD")
+ target_id = proto.target_change.target_ids[0]
+ if target_id != WATCH_TARGET_ID:
+ raise RuntimeError("Unexpected target ID %s sent by server" % target_id)
+
+ def _on_snapshot_target_change_remove(self, proto):
+ _LOGGER.debug("on_snapshot: target change: REMOVE")
+ change = proto.target_change
+
+ code = 13
+ message = "internal error"
+ if change.cause:
+ code = change.cause.code
+ message = change.cause.message
+
+ message = "Error %s: %s" % (code, message)
+
+ raise RuntimeError(message)
+
+ def _on_snapshot_target_change_reset(self, proto):
+ # Whatever changes have happened so far no longer matter.
+ _LOGGER.debug("on_snapshot: target change: RESET")
+ self._reset_docs()
+
+ def _on_snapshot_target_change_current(self, proto):
+ _LOGGER.debug("on_snapshot: target change: CURRENT")
+ self.current = True
+
+ def on_snapshot(self, proto):
+ """
+ Called everytime there is a response from listen. Collect changes
+ and 'push' the changes in a batch to the customer when we receive
+ 'current' from the listen response.
+
+ Args:
+ listen_response(`google.cloud.firestore_v1.types.ListenResponse`):
+ Callback method that receives a object to
+ """
+ TargetChange = firestore_pb2.TargetChange
+
+ target_changetype_dispatch = {
+ TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change,
+ TargetChange.ADD: self._on_snapshot_target_change_add,
+ TargetChange.REMOVE: self._on_snapshot_target_change_remove,
+ TargetChange.RESET: self._on_snapshot_target_change_reset,
+ TargetChange.CURRENT: self._on_snapshot_target_change_current,
+ }
+
+ target_change = getattr(proto, "target_change", "")
+ document_change = getattr(proto, "document_change", "")
+ document_delete = getattr(proto, "document_delete", "")
+ document_remove = getattr(proto, "document_remove", "")
+ filter_ = getattr(proto, "filter", "")
+
+ if str(target_change):
+ target_change_type = target_change.target_change_type
+ _LOGGER.debug("on_snapshot: target change: " + str(target_change_type))
+ meth = target_changetype_dispatch.get(target_change_type)
+ if meth is None:
+ _LOGGER.info(
+ "on_snapshot: Unknown target change " + str(target_change_type)
+ )
+ self.close(
+ reason="Unknown target change type: %s " % str(target_change_type)
+ )
+ else:
+ try:
+ meth(proto)
+ except Exception as exc2:
+ _LOGGER.debug("meth(proto) exc: " + str(exc2))
+ raise
+
+ # NOTE:
+ # in other implementations, such as node, the backoff is reset here
+ # in this version bidi rpc is just used and will control this.
+
+ elif str(document_change):
+ _LOGGER.debug("on_snapshot: document change")
+
+ # No other target_ids can show up here, but we still need to see
+ # if the targetId was in the added list or removed list.
+ target_ids = document_change.target_ids or []
+ removed_target_ids = document_change.removed_target_ids or []
+ changed = False
+ removed = False
+
+ if WATCH_TARGET_ID in target_ids:
+ changed = True
+
+ if WATCH_TARGET_ID in removed_target_ids:
+ removed = True
+
+ if changed:
+ _LOGGER.debug("on_snapshot: document change: CHANGED")
+
+ # google.cloud.firestore_v1.types.Document
+ document = document_change.document
+
+ data = _helpers.decode_dict(document.fields, self._firestore)
+
+ # Create a snapshot. As Document and Query objects can be
+ # passed we need to get a Document Reference in a more manual
+ # fashion than self._document_reference
+ document_name = document.name
+ db_str = self._firestore._database_string
+ db_str_documents = db_str + "/documents/"
+ if document_name.startswith(db_str_documents):
+ document_name = document_name[len(db_str_documents) :]
+
+ document_ref = self._firestore.document(document_name)
+
+ snapshot = self.DocumentSnapshot(
+ reference=document_ref,
+ data=data,
+ exists=True,
+ read_time=None,
+ create_time=document.create_time,
+ update_time=document.update_time,
+ )
+ self.change_map[document.name] = snapshot
+
+ elif removed:
+ _LOGGER.debug("on_snapshot: document change: REMOVED")
+ document = document_change.document
+ self.change_map[document.name] = ChangeType.REMOVED
+
+ # NB: document_delete and document_remove (as far as we, the client,
+ # are concerned) are functionally equivalent
+
+ elif str(document_delete):
+ _LOGGER.debug("on_snapshot: document change: DELETE")
+ name = document_delete.document
+ self.change_map[name] = ChangeType.REMOVED
+
+ elif str(document_remove):
+ _LOGGER.debug("on_snapshot: document change: REMOVE")
+ name = document_remove.document
+ self.change_map[name] = ChangeType.REMOVED
+
+ elif filter_:
+ _LOGGER.debug("on_snapshot: filter update")
+ if filter_.count != self._current_size():
+ # We need to remove all the current results.
+ self._reset_docs()
+ # The filter didn't match, so re-issue the query.
+ # TODO: reset stream method?
+ # self._reset_stream();
+
+ elif proto is None:
+ self.close()
+ else:
+ _LOGGER.debug("UNKNOWN TYPE. UHOH")
+ self.close(reason=ValueError("Unknown listen response type: %s" % proto))
+
+ def push(self, read_time, next_resume_token):
+ """
+ Assembles a new snapshot from the current set of changes and invokes
+ the user's callback. Clears the current changes on completion.
+ """
+ deletes, adds, updates = Watch._extract_changes(
+ self.doc_map, self.change_map, read_time
+ )
+
+ updated_tree, updated_map, appliedChanges = self._compute_snapshot(
+ self.doc_tree, self.doc_map, deletes, adds, updates
+ )
+
+ if not self.has_pushed or len(appliedChanges):
+ # TODO: It is possible in the future we will have the tree order
+ # on insert. For now, we sort here.
+ key = functools.cmp_to_key(self._comparator)
+ keys = sorted(updated_tree.keys(), key=key)
+
+ self._snapshot_callback(
+ keys,
+ appliedChanges,
+ datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc),
+ )
+ self.has_pushed = True
+
+ self.doc_tree = updated_tree
+ self.doc_map = updated_map
+ self.change_map.clear()
+ self.resume_token = next_resume_token
+
+ @staticmethod
+ def _extract_changes(doc_map, changes, read_time):
+ deletes = []
+ adds = []
+ updates = []
+
+ for name, value in changes.items():
+ if value == ChangeType.REMOVED:
+ if name in doc_map:
+ deletes.append(name)
+ elif name in doc_map:
+ if read_time is not None:
+ value.read_time = read_time
+ updates.append(value)
+ else:
+ if read_time is not None:
+ value.read_time = read_time
+ adds.append(value)
+
+ return (deletes, adds, updates)
+
+ def _compute_snapshot(
+ self, doc_tree, doc_map, delete_changes, add_changes, update_changes
+ ):
+ updated_tree = doc_tree
+ updated_map = doc_map
+
+ assert len(doc_tree) == len(doc_map), (
+ "The document tree and document map should have the same "
+ + "number of entries."
+ )
+
+ def delete_doc(name, updated_tree, updated_map):
+ """
+ Applies a document delete to the document tree and document map.
+ Returns the corresponding DocumentChange event.
+ """
+ assert name in updated_map, "Document to delete does not exist"
+ old_document = updated_map.get(name)
+ # TODO: If a document doesn't exist this raises IndexError. Handle?
+ existing = updated_tree.find(old_document)
+ old_index = existing.index
+ updated_tree = updated_tree.remove(old_document)
+ del updated_map[name]
+ return (
+ DocumentChange(ChangeType.REMOVED, old_document, old_index, -1),
+ updated_tree,
+ updated_map,
+ )
+
+ def add_doc(new_document, updated_tree, updated_map):
+ """
+ Applies a document add to the document tree and the document map.
+ Returns the corresponding DocumentChange event.
+ """
+ name = new_document.reference._document_path
+ assert name not in updated_map, "Document to add already exists"
+ updated_tree = updated_tree.insert(new_document, None)
+ new_index = updated_tree.find(new_document).index
+ updated_map[name] = new_document
+ return (
+ DocumentChange(ChangeType.ADDED, new_document, -1, new_index),
+ updated_tree,
+ updated_map,
+ )
+
+ def modify_doc(new_document, updated_tree, updated_map):
+ """
+ Applies a document modification to the document tree and the
+ document map.
+ Returns the DocumentChange event for successful modifications.
+ """
+ name = new_document.reference._document_path
+ assert name in updated_map, "Document to modify does not exist"
+ old_document = updated_map.get(name)
+ if old_document.update_time != new_document.update_time:
+ remove_change, updated_tree, updated_map = delete_doc(
+ name, updated_tree, updated_map
+ )
+ add_change, updated_tree, updated_map = add_doc(
+ new_document, updated_tree, updated_map
+ )
+ return (
+ DocumentChange(
+ ChangeType.MODIFIED,
+ new_document,
+ remove_change.old_index,
+ add_change.new_index,
+ ),
+ updated_tree,
+ updated_map,
+ )
+
+ return None, updated_tree, updated_map
+
+ # Process the sorted changes in the order that is expected by our
+ # clients (removals, additions, and then modifications). We also need
+ # to sort the individual changes to assure that old_index/new_index
+ # keep incrementing.
+ appliedChanges = []
+
+ key = functools.cmp_to_key(self._comparator)
+
+ # Deletes are sorted based on the order of the existing document.
+ delete_changes = sorted(delete_changes)
+ for name in delete_changes:
+ change, updated_tree, updated_map = delete_doc(
+ name, updated_tree, updated_map
+ )
+ appliedChanges.append(change)
+
+ add_changes = sorted(add_changes, key=key)
+ _LOGGER.debug("walk over add_changes")
+ for snapshot in add_changes:
+ _LOGGER.debug("in add_changes")
+ change, updated_tree, updated_map = add_doc(
+ snapshot, updated_tree, updated_map
+ )
+ appliedChanges.append(change)
+
+ update_changes = sorted(update_changes, key=key)
+ for snapshot in update_changes:
+ change, updated_tree, updated_map = modify_doc(
+ snapshot, updated_tree, updated_map
+ )
+ if change is not None:
+ appliedChanges.append(change)
+
+ assert len(updated_tree) == len(updated_map), (
+ "The update document "
+ + "tree and document map should have the same number of entries."
+ )
+ return (updated_tree, updated_map, appliedChanges)
+
+ def _affects_target(self, target_ids, current_id):
+ if target_ids is None:
+ return True
+
+ return current_id in target_ids
+
+ def _current_size(self):
+ """
+ Returns the current count of all documents, including the changes from
+ the current changeMap.
+ """
+ deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None)
+ return len(self.doc_map) + len(adds) - len(deletes)
+
+ def _reset_docs(self):
+ """
+ Helper to clear the docs on RESET or filter mismatch.
+ """
+ _LOGGER.debug("resetting documents")
+ self.change_map.clear()
+ self.resume_token = None
+
+ # Mark each document as deleted. If documents are not deleted
+ # they will be sent again by the server.
+ for snapshot in self.doc_tree.keys():
+ name = snapshot.reference._document_path
+ self.change_map[name] = ChangeType.REMOVED
+
+ self.current = False
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__init__.py
new file mode 100644
index 000000000..a1d80278f
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__init__.py
@@ -0,0 +1,73 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python idiomatic client for Google Cloud Firestore."""
+
+from pkg_resources import get_distribution
+import warnings
+
+__version__ = get_distribution("google-cloud-firestore").version
+
+from google.cloud.firestore_v1beta1 import types
+from google.cloud.firestore_v1beta1._helpers import GeoPoint
+from google.cloud.firestore_v1beta1._helpers import ExistsOption
+from google.cloud.firestore_v1beta1._helpers import LastUpdateOption
+from google.cloud.firestore_v1beta1._helpers import ReadAfterWriteError
+from google.cloud.firestore_v1beta1._helpers import WriteOption
+from google.cloud.firestore_v1beta1.batch import WriteBatch
+from google.cloud.firestore_v1beta1.client import Client
+from google.cloud.firestore_v1beta1.collection import CollectionReference
+from google.cloud.firestore_v1beta1.transforms import ArrayRemove
+from google.cloud.firestore_v1beta1.transforms import ArrayUnion
+from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
+from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
+from google.cloud.firestore_v1beta1.document import DocumentReference
+from google.cloud.firestore_v1beta1.document import DocumentSnapshot
+from google.cloud.firestore_v1beta1.gapic import enums
+from google.cloud.firestore_v1beta1.query import Query
+from google.cloud.firestore_v1beta1.transaction import Transaction
+from google.cloud.firestore_v1beta1.transaction import transactional
+from google.cloud.firestore_v1beta1.watch import Watch
+
+
+_V1BETA1_DEPRECATED_MESSAGE = (
+ "The 'v1beta1' API endpoint is deprecated. "
+ "The client/library which supports it will be removed in a future release."
+)
+warnings.warn(_V1BETA1_DEPRECATED_MESSAGE, DeprecationWarning)
+
+
+__all__ = [
+ "__version__",
+ "ArrayRemove",
+ "ArrayUnion",
+ "Client",
+ "CollectionReference",
+ "DELETE_FIELD",
+ "DocumentReference",
+ "DocumentSnapshot",
+ "enums",
+ "ExistsOption",
+ "GeoPoint",
+ "LastUpdateOption",
+ "Query",
+ "ReadAfterWriteError",
+ "SERVER_TIMESTAMP",
+ "Transaction",
+ "transactional",
+ "types",
+ "Watch",
+ "WriteBatch",
+ "WriteOption",
+]
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..8da2c9dd9
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..1ad1be306
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/batch.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/batch.cpython-36.pyc
new file mode 100644
index 000000000..e0322f473
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/batch.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/client.cpython-36.pyc
new file mode 100644
index 000000000..3c8cd6bd2
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/collection.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/collection.cpython-36.pyc
new file mode 100644
index 000000000..e0721837d
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/collection.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/document.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/document.cpython-36.pyc
new file mode 100644
index 000000000..7670e5dc0
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/document.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/field_path.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/field_path.cpython-36.pyc
new file mode 100644
index 000000000..c74816282
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/field_path.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/order.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/order.cpython-36.pyc
new file mode 100644
index 000000000..cddda79d9
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/order.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/query.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/query.cpython-36.pyc
new file mode 100644
index 000000000..da331026f
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/query.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/transaction.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/transaction.cpython-36.pyc
new file mode 100644
index 000000000..56c93ef48
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/transaction.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/transforms.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/transforms.cpython-36.pyc
new file mode 100644
index 000000000..ae60d6607
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/transforms.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/types.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/types.cpython-36.pyc
new file mode 100644
index 000000000..fe9c5b686
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/types.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/watch.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/watch.cpython-36.pyc
new file mode 100644
index 000000000..645cfcb29
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/__pycache__/watch.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/_helpers.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/_helpers.py
new file mode 100644
index 000000000..11dcefc98
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/_helpers.py
@@ -0,0 +1,998 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common helpers shared across Google Cloud Firestore modules."""
+
+import datetime
+
+from google.protobuf import struct_pb2
+from google.type import latlng_pb2
+import grpc
+import six
+
+from google.cloud import exceptions
+from google.cloud._helpers import _datetime_to_pb_timestamp
+from google.api_core.datetime_helpers import DatetimeWithNanoseconds
+from google.cloud.firestore_v1beta1 import transforms
+from google.cloud.firestore_v1beta1 import types
+from google.cloud.firestore_v1beta1.field_path import FieldPath
+from google.cloud.firestore_v1beta1.field_path import parse_field_path
+from google.cloud.firestore_v1beta1.gapic import enums
+from google.cloud.firestore_v1beta1.proto import common_pb2
+from google.cloud.firestore_v1beta1.proto import document_pb2
+from google.cloud.firestore_v1beta1.proto import write_pb2
+
+
+BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}."
+DOCUMENT_PATH_DELIMITER = "/"
+INACTIVE_TXN = "Transaction not in progress, cannot be used in API requests."
+READ_AFTER_WRITE_ERROR = "Attempted read after write in a transaction."
+BAD_REFERENCE_ERROR = (
+ "Reference value {!r} in unexpected format, expected to be of the form "
+ "``projects/{{project}}/databases/{{database}}/"
+ "documents/{{document_path}}``."
+)
+WRONG_APP_REFERENCE = (
+ "Document {!r} does not correspond to the same database " "({!r}) as the client."
+)
+REQUEST_TIME_ENUM = enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME
+_GRPC_ERROR_MAPPING = {
+ grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict,
+ grpc.StatusCode.NOT_FOUND: exceptions.NotFound,
+}
+
+
+class GeoPoint(object):
+ """Simple container for a geo point value.
+
+ Args:
+ latitude (float): Latitude of a point.
+ longitude (float): Longitude of a point.
+ """
+
+ def __init__(self, latitude, longitude):
+ self.latitude = latitude
+ self.longitude = longitude
+
+ def to_protobuf(self):
+ """Convert the current object to protobuf.
+
+ Returns:
+ google.type.latlng_pb2.LatLng: The current point as a protobuf.
+ """
+ return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude)
+
+ def __eq__(self, other):
+ """Compare two geo points for equality.
+
+ Returns:
+ Union[bool, NotImplemented]: :data:`True` if the points compare
+ equal, else :data:`False`. (Or :data:`NotImplemented` if
+ ``other`` is not a geo point.)
+ """
+ if not isinstance(other, GeoPoint):
+ return NotImplemented
+
+ return self.latitude == other.latitude and self.longitude == other.longitude
+
+ def __ne__(self, other):
+ """Compare two geo points for inequality.
+
+ Returns:
+ Union[bool, NotImplemented]: :data:`False` if the points compare
+ equal, else :data:`True`. (Or :data:`NotImplemented` if
+ ``other`` is not a geo point.)
+ """
+ equality_val = self.__eq__(other)
+ if equality_val is NotImplemented:
+ return NotImplemented
+ else:
+ return not equality_val
+
+
+def verify_path(path, is_collection):
+ """Verifies that a ``path`` has the correct form.
+
+ Checks that all of the elements in ``path`` are strings.
+
+ Args:
+ path (Tuple[str, ...]): The components in a collection or
+ document path.
+ is_collection (bool): Indicates if the ``path`` represents
+ a document or a collection.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * ``is_collection=True`` and there are an even number of elements
+ * ``is_collection=False`` and there are an odd number of elements
+ * an element is not a string
+ """
+ num_elements = len(path)
+ if num_elements == 0:
+ raise ValueError("Document or collection path cannot be empty")
+
+ if is_collection:
+ if num_elements % 2 == 0:
+ raise ValueError("A collection must have an odd number of path elements")
+ else:
+ if num_elements % 2 == 1:
+ raise ValueError("A document must have an even number of path elements")
+
+ for element in path:
+ if not isinstance(element, six.string_types):
+ msg = BAD_PATH_TEMPLATE.format(element, type(element))
+ raise ValueError(msg)
+
+
+def encode_value(value):
+ """Converts a native Python value into a Firestore protobuf ``Value``.
+
+ Args:
+ value (Union[NoneType, bool, int, float, datetime.datetime, \
+ str, bytes, dict, ~google.cloud.Firestore.GeoPoint]): A native
+ Python value to convert to a protobuf field.
+
+ Returns:
+ ~google.cloud.firestore_v1beta1.types.Value: A
+ value encoded as a Firestore protobuf.
+
+ Raises:
+ TypeError: If the ``value`` is not one of the accepted types.
+ """
+ if value is None:
+ return document_pb2.Value(null_value=struct_pb2.NULL_VALUE)
+
+ # Must come before six.integer_types since ``bool`` is an integer subtype.
+ if isinstance(value, bool):
+ return document_pb2.Value(boolean_value=value)
+
+ if isinstance(value, six.integer_types):
+ return document_pb2.Value(integer_value=value)
+
+ if isinstance(value, float):
+ return document_pb2.Value(double_value=value)
+
+ if isinstance(value, DatetimeWithNanoseconds):
+ return document_pb2.Value(timestamp_value=value.timestamp_pb())
+
+ if isinstance(value, datetime.datetime):
+ return document_pb2.Value(timestamp_value=_datetime_to_pb_timestamp(value))
+
+ if isinstance(value, six.text_type):
+ return document_pb2.Value(string_value=value)
+
+ if isinstance(value, six.binary_type):
+ return document_pb2.Value(bytes_value=value)
+
+ # NOTE: We avoid doing an isinstance() check for a Document
+ # here to avoid import cycles.
+ document_path = getattr(value, "_document_path", None)
+ if document_path is not None:
+ return document_pb2.Value(reference_value=document_path)
+
+ if isinstance(value, GeoPoint):
+ return document_pb2.Value(geo_point_value=value.to_protobuf())
+
+ if isinstance(value, list):
+ value_list = [encode_value(element) for element in value]
+ value_pb = document_pb2.ArrayValue(values=value_list)
+ return document_pb2.Value(array_value=value_pb)
+
+ if isinstance(value, dict):
+ value_dict = encode_dict(value)
+ value_pb = document_pb2.MapValue(fields=value_dict)
+ return document_pb2.Value(map_value=value_pb)
+
+ raise TypeError(
+ "Cannot convert to a Firestore Value", value, "Invalid type", type(value)
+ )
+
+
+def encode_dict(values_dict):
+ """Encode a dictionary into protobuf ``Value``-s.
+
+ Args:
+ values_dict (dict): The dictionary to encode as protobuf fields.
+
+ Returns:
+ Dict[str, ~google.cloud.firestore_v1beta1.types.Value]: A
+ dictionary of string keys and ``Value`` protobufs as dictionary
+ values.
+ """
+ return {key: encode_value(value) for key, value in six.iteritems(values_dict)}
+
+
+def reference_value_to_document(reference_value, client):
+ """Convert a reference value string to a document.
+
+ Args:
+ reference_value (str): A document reference value.
+ client (~.firestore_v1beta1.client.Client): A client that has
+ a document factory.
+
+ Returns:
+ ~.firestore_v1beta1.document.DocumentReference: The document
+ corresponding to ``reference_value``.
+
+ Raises:
+ ValueError: If the ``reference_value`` is not of the expected
+ format: ``projects/{project}/databases/{database}/documents/...``.
+ ValueError: If the ``reference_value`` does not come from the same
+ project / database combination as the ``client``.
+ """
+ # The first 5 parts are
+ # projects, {project}, databases, {database}, documents
+ parts = reference_value.split(DOCUMENT_PATH_DELIMITER, 5)
+ if len(parts) != 6:
+ msg = BAD_REFERENCE_ERROR.format(reference_value)
+ raise ValueError(msg)
+
+ # The sixth part is `a/b/c/d` (i.e. the document path)
+ document = client.document(parts[-1])
+ if document._document_path != reference_value:
+ msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string)
+ raise ValueError(msg)
+
+ return document
+
+
+def decode_value(value, client):
+ """Converts a Firestore protobuf ``Value`` to a native Python value.
+
+ Args:
+ value (google.cloud.firestore_v1beta1.types.Value): A
+ Firestore protobuf to be decoded / parsed / converted.
+ client (~.firestore_v1beta1.client.Client): A client that has
+ a document factory.
+
+ Returns:
+ Union[NoneType, bool, int, float, datetime.datetime, \
+ str, bytes, dict, ~google.cloud.Firestore.GeoPoint]: A native
+ Python value converted from the ``value``.
+
+ Raises:
+ NotImplementedError: If the ``value_type`` is ``reference_value``.
+ ValueError: If the ``value_type`` is unknown.
+ """
+ value_type = value.WhichOneof("value_type")
+
+ if value_type == "null_value":
+ return None
+ elif value_type == "boolean_value":
+ return value.boolean_value
+ elif value_type == "integer_value":
+ return value.integer_value
+ elif value_type == "double_value":
+ return value.double_value
+ elif value_type == "timestamp_value":
+ return DatetimeWithNanoseconds.from_timestamp_pb(value.timestamp_value)
+ elif value_type == "string_value":
+ return value.string_value
+ elif value_type == "bytes_value":
+ return value.bytes_value
+ elif value_type == "reference_value":
+ return reference_value_to_document(value.reference_value, client)
+ elif value_type == "geo_point_value":
+ return GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude)
+ elif value_type == "array_value":
+ return [decode_value(element, client) for element in value.array_value.values]
+ elif value_type == "map_value":
+ return decode_dict(value.map_value.fields, client)
+ else:
+ raise ValueError("Unknown ``value_type``", value_type)
+
+
+def decode_dict(value_fields, client):
+ """Converts a protobuf map of Firestore ``Value``-s.
+
+ Args:
+ value_fields (google.protobuf.pyext._message.MessageMapContainer): A
+ protobuf map of Firestore ``Value``-s.
+ client (~.firestore_v1beta1.client.Client): A client that has
+ a document factory.
+
+ Returns:
+ Dict[str, Union[NoneType, bool, int, float, datetime.datetime, \
+ str, bytes, dict, ~google.cloud.Firestore.GeoPoint]]: A dictionary
+ of native Python values converted from the ``value_fields``.
+ """
+ return {
+ key: decode_value(value, client) for key, value in six.iteritems(value_fields)
+ }
+
+
+def get_doc_id(document_pb, expected_prefix):
+ """Parse a document ID from a document protobuf.
+
+ Args:
+ document_pb (google.cloud.proto.firestore.v1beta1.\
+ document_pb2.Document): A protobuf for a document that
+ was created in a ``CreateDocument`` RPC.
+ expected_prefix (str): The expected collection prefix for the
+ fully-qualified document name.
+
+ Returns:
+ str: The document ID from the protobuf.
+
+ Raises:
+ ValueError: If the name does not begin with the prefix.
+ """
+ prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1)
+ if prefix != expected_prefix:
+ raise ValueError(
+ "Unexpected document name",
+ document_pb.name,
+ "Expected to begin with",
+ expected_prefix,
+ )
+
+ return document_id
+
+
+_EmptyDict = transforms.Sentinel("Marker for an empty dict value")
+
+
+def extract_fields(document_data, prefix_path, expand_dots=False):
+ """Do depth-first walk of tree, yielding field_path, value"""
+ if not document_data:
+ yield prefix_path, _EmptyDict
+ else:
+ for key, value in sorted(six.iteritems(document_data)):
+
+ if expand_dots:
+ sub_key = FieldPath.from_string(key)
+ else:
+ sub_key = FieldPath(key)
+
+ field_path = FieldPath(*(prefix_path.parts + sub_key.parts))
+
+ if isinstance(value, dict):
+ for s_path, s_value in extract_fields(value, field_path):
+ yield s_path, s_value
+ else:
+ yield field_path, value
+
+
+def set_field_value(document_data, field_path, value):
+ """Set a value into a document for a field_path"""
+ current = document_data
+ for element in field_path.parts[:-1]:
+ current = current.setdefault(element, {})
+ if value is _EmptyDict:
+ value = {}
+ current[field_path.parts[-1]] = value
+
+
+def get_field_value(document_data, field_path):
+ if not field_path.parts:
+ raise ValueError("Empty path")
+
+ current = document_data
+ for element in field_path.parts[:-1]:
+ current = current[element]
+ return current[field_path.parts[-1]]
+
+
+class DocumentExtractor(object):
+ """ Break document data up into actual data and transforms.
+
+ Handle special values such as ``DELETE_FIELD``, ``SERVER_TIMESTAMP``.
+
+ Args:
+ document_data (dict):
+ Property names and values to use for sending a change to
+ a document.
+ """
+
+ def __init__(self, document_data):
+ self.document_data = document_data
+ self.field_paths = []
+ self.deleted_fields = []
+ self.server_timestamps = []
+ self.array_removes = {}
+ self.array_unions = {}
+ self.set_fields = {}
+ self.empty_document = False
+
+ prefix_path = FieldPath()
+ iterator = self._get_document_iterator(prefix_path)
+
+ for field_path, value in iterator:
+
+ if field_path == prefix_path and value is _EmptyDict:
+ self.empty_document = True
+
+ elif value is transforms.DELETE_FIELD:
+ self.deleted_fields.append(field_path)
+
+ elif value is transforms.SERVER_TIMESTAMP:
+ self.server_timestamps.append(field_path)
+
+ elif isinstance(value, transforms.ArrayRemove):
+ self.array_removes[field_path] = value.values
+
+ elif isinstance(value, transforms.ArrayUnion):
+ self.array_unions[field_path] = value.values
+
+ else:
+ self.field_paths.append(field_path)
+ set_field_value(self.set_fields, field_path, value)
+
+ def _get_document_iterator(self, prefix_path):
+ return extract_fields(self.document_data, prefix_path)
+
+ @property
+ def has_transforms(self):
+ return bool(self.server_timestamps or self.array_removes or self.array_unions)
+
+ @property
+ def transform_paths(self):
+ return sorted(
+ self.server_timestamps + list(self.array_removes) + list(self.array_unions)
+ )
+
+ def _get_update_mask(self, allow_empty_mask=False):
+ return None
+
+ def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):
+
+ if exists is not None:
+ current_document = common_pb2.Precondition(exists=exists)
+ else:
+ current_document = None
+
+ update_pb = write_pb2.Write(
+ update=document_pb2.Document(
+ name=document_path, fields=encode_dict(self.set_fields)
+ ),
+ update_mask=self._get_update_mask(allow_empty_mask),
+ current_document=current_document,
+ )
+
+ return update_pb
+
+ def get_transform_pb(self, document_path, exists=None):
+ def make_array_value(values):
+ value_list = [encode_value(element) for element in values]
+ return document_pb2.ArrayValue(values=value_list)
+
+ path_field_transforms = (
+ [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(),
+ set_to_server_value=REQUEST_TIME_ENUM,
+ ),
+ )
+ for path in self.server_timestamps
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(),
+ remove_all_from_array=make_array_value(values),
+ ),
+ )
+ for path, values in self.array_removes.items()
+ ]
+ + [
+ (
+ path,
+ write_pb2.DocumentTransform.FieldTransform(
+ field_path=path.to_api_repr(),
+ append_missing_elements=make_array_value(values),
+ ),
+ )
+ for path, values in self.array_unions.items()
+ ]
+ )
+ field_transforms = [
+ transform for path, transform in sorted(path_field_transforms)
+ ]
+ transform_pb = write_pb2.Write(
+ transform=write_pb2.DocumentTransform(
+ document=document_path, field_transforms=field_transforms
+ )
+ )
+ if exists is not None:
+ transform_pb.current_document.CopyFrom(
+ common_pb2.Precondition(exists=exists)
+ )
+
+ return transform_pb
+
+
+def pbs_for_create(document_path, document_data):
+ """Make ``Write`` protobufs for ``create()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ document_data (dict): Property names and values to use for
+ creating a document.
+
+ Returns:
+ List[google.cloud.firestore_v1beta1.types.Write]: One or two
+ ``Write`` protobuf instances for ``create()``.
+ """
+ extractor = DocumentExtractor(document_data)
+
+ if extractor.deleted_fields:
+ raise ValueError("Cannot apply DELETE_FIELD in a create request.")
+
+ write_pbs = []
+
+ # Conformance tests require skipping the 'update_pb' if the document
+ # contains only transforms.
+ if extractor.empty_document or extractor.set_fields:
+ write_pbs.append(extractor.get_update_pb(document_path, exists=False))
+
+ if extractor.has_transforms:
+ exists = None if write_pbs else False
+ transform_pb = extractor.get_transform_pb(document_path, exists)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+def pbs_for_set_no_merge(document_path, document_data):
+ """Make ``Write`` protobufs for ``set()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ document_data (dict): Property names and values to use for
+ replacing a document.
+
+ Returns:
+ List[google.cloud.firestore_v1beta1.types.Write]: One
+ or two ``Write`` protobuf instances for ``set()``.
+ """
+ extractor = DocumentExtractor(document_data)
+
+ if extractor.deleted_fields:
+ raise ValueError(
+ "Cannot apply DELETE_FIELD in a set request without "
+ "specifying 'merge=True' or 'merge=[field_paths]'."
+ )
+
+ # Conformance tests require send the 'update_pb' even if the document
+ # contains only transforms.
+ write_pbs = [extractor.get_update_pb(document_path)]
+
+ if extractor.has_transforms:
+ transform_pb = extractor.get_transform_pb(document_path)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+class DocumentExtractorForMerge(DocumentExtractor):
+ """ Break document data up into actual data and transforms.
+ """
+
+ def __init__(self, document_data):
+ super(DocumentExtractorForMerge, self).__init__(document_data)
+ self.data_merge = []
+ self.transform_merge = []
+ self.merge = []
+
+ @property
+ def has_updates(self):
+ # for whatever reason, the conformance tests want to see the parent
+ # of nested transform paths in the update mask
+ # (see set-st-merge-nonleaf-alone.textproto)
+ update_paths = set(self.data_merge)
+
+ for transform_path in self.transform_paths:
+ if len(transform_path.parts) > 1:
+ parent_fp = FieldPath(*transform_path.parts[:-1])
+ update_paths.add(parent_fp)
+
+ return bool(update_paths)
+
+ def _apply_merge_all(self):
+ self.data_merge = sorted(self.field_paths + self.deleted_fields)
+ # TODO: other transforms
+ self.transform_merge = self.transform_paths
+ self.merge = sorted(self.data_merge + self.transform_paths)
+
+ def _construct_merge_paths(self, merge):
+ for merge_field in merge:
+ if isinstance(merge_field, FieldPath):
+ yield merge_field
+ else:
+ yield FieldPath(*parse_field_path(merge_field))
+
+ def _normalize_merge_paths(self, merge):
+ merge_paths = sorted(self._construct_merge_paths(merge))
+
+ # Raise if any merge path is a parent of another. Leverage sorting
+ # to avoid quadratic behavior.
+ for index in range(len(merge_paths) - 1):
+ lhs, rhs = merge_paths[index], merge_paths[index + 1]
+ if lhs.eq_or_parent(rhs):
+ raise ValueError("Merge paths overlap: {}, {}".format(lhs, rhs))
+
+ for merge_path in merge_paths:
+ if merge_path in self.deleted_fields:
+ continue
+ try:
+ get_field_value(self.document_data, merge_path)
+ except KeyError:
+ raise ValueError("Invalid merge path: {}".format(merge_path))
+
+ return merge_paths
+
+ def _apply_merge_paths(self, merge):
+
+ if self.empty_document:
+ raise ValueError("Cannot merge specific fields with empty document.")
+
+ merge_paths = self._normalize_merge_paths(merge)
+
+ del self.data_merge[:]
+ del self.transform_merge[:]
+ self.merge = merge_paths
+
+ for merge_path in merge_paths:
+
+ if merge_path in self.transform_paths:
+ self.transform_merge.append(merge_path)
+
+ for field_path in self.field_paths:
+ if merge_path.eq_or_parent(field_path):
+ self.data_merge.append(field_path)
+
+ # Clear out data for fields not merged.
+ merged_set_fields = {}
+ for field_path in self.data_merge:
+ value = get_field_value(self.document_data, field_path)
+ set_field_value(merged_set_fields, field_path, value)
+ self.set_fields = merged_set_fields
+
+ unmerged_deleted_fields = [
+ field_path
+ for field_path in self.deleted_fields
+ if field_path not in self.merge
+ ]
+ if unmerged_deleted_fields:
+ raise ValueError(
+ "Cannot delete unmerged fields: {}".format(unmerged_deleted_fields)
+ )
+ self.data_merge = sorted(self.data_merge + self.deleted_fields)
+
+ # Keep only transforms which are within merge.
+ merged_transform_paths = set()
+ for merge_path in self.merge:
+ tranform_merge_paths = [
+ transform_path
+ for transform_path in self.transform_paths
+ if merge_path.eq_or_parent(transform_path)
+ ]
+ merged_transform_paths.update(tranform_merge_paths)
+
+ self.server_timestamps = [
+ path for path in self.server_timestamps if path in merged_transform_paths
+ ]
+
+ self.array_removes = {
+ path: values
+ for path, values in self.array_removes.items()
+ if path in merged_transform_paths
+ }
+
+ self.array_unions = {
+ path: values
+ for path, values in self.array_unions.items()
+ if path in merged_transform_paths
+ }
+
+ def apply_merge(self, merge):
+ if merge is True: # merge all fields
+ self._apply_merge_all()
+ else:
+ self._apply_merge_paths(merge)
+
+ def _get_update_mask(self, allow_empty_mask=False):
+ # Mask uses dotted / quoted paths.
+ mask_paths = [
+ field_path.to_api_repr()
+ for field_path in self.merge
+ if field_path not in self.transform_merge
+ ]
+
+ if mask_paths or allow_empty_mask:
+ return common_pb2.DocumentMask(field_paths=mask_paths)
+
+
+def pbs_for_set_with_merge(document_path, document_data, merge):
+ """Make ``Write`` protobufs for ``set()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ document_data (dict): Property names and values to use for
+ replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, merge all fields; else, merge only the named fields.
+
+ Returns:
+ List[google.cloud.firestore_v1beta1.types.Write]: One
+ or two ``Write`` protobuf instances for ``set()``.
+ """
+ extractor = DocumentExtractorForMerge(document_data)
+ extractor.apply_merge(merge)
+
+ merge_empty = not document_data
+
+ write_pbs = []
+
+ if extractor.has_updates or merge_empty:
+ write_pbs.append(
+ extractor.get_update_pb(document_path, allow_empty_mask=merge_empty)
+ )
+
+ if extractor.transform_paths:
+ transform_pb = extractor.get_transform_pb(document_path)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+class DocumentExtractorForUpdate(DocumentExtractor):
+ """ Break document data up into actual data and transforms.
+ """
+
+ def __init__(self, document_data):
+ super(DocumentExtractorForUpdate, self).__init__(document_data)
+ self.top_level_paths = sorted(
+ [FieldPath.from_string(key) for key in document_data]
+ )
+ tops = set(self.top_level_paths)
+ for top_level_path in self.top_level_paths:
+ for ancestor in top_level_path.lineage():
+ if ancestor in tops:
+ raise ValueError(
+ "Conflicting field path: {}, {}".format(
+ top_level_path, ancestor
+ )
+ )
+
+ for field_path in self.deleted_fields:
+ if field_path not in tops:
+ raise ValueError(
+ "Cannot update with nest delete: {}".format(field_path)
+ )
+
+ def _get_document_iterator(self, prefix_path):
+ return extract_fields(self.document_data, prefix_path, expand_dots=True)
+
+ def _get_update_mask(self, allow_empty_mask=False):
+ mask_paths = []
+ for field_path in self.top_level_paths:
+ if field_path not in self.transform_paths:
+ mask_paths.append(field_path.to_api_repr())
+
+ return common_pb2.DocumentMask(field_paths=mask_paths)
+
+
+def pbs_for_update(document_path, field_updates, option):
+ """Make ``Write`` protobufs for ``update()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ field_updates (dict): Field names or paths to update and values
+ to update with.
+ option (optional[~.firestore_v1beta1.client.WriteOption]): A
+ write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ List[google.cloud.firestore_v1beta1.types.Write]: One
+ or two ``Write`` protobuf instances for ``update()``.
+ """
+ extractor = DocumentExtractorForUpdate(field_updates)
+
+ if extractor.empty_document:
+ raise ValueError("Cannot update with an empty document.")
+
+ if option is None: # Default is to use ``exists=True``.
+ option = ExistsOption(exists=True)
+
+ write_pbs = []
+
+ if extractor.field_paths or extractor.deleted_fields:
+ update_pb = extractor.get_update_pb(document_path)
+ option.modify_write(update_pb)
+ write_pbs.append(update_pb)
+
+ if extractor.has_transforms:
+ transform_pb = extractor.get_transform_pb(document_path)
+ if not write_pbs:
+ # NOTE: set the write option on the ``transform_pb`` only if there
+ # is no ``update_pb``
+ option.modify_write(transform_pb)
+ write_pbs.append(transform_pb)
+
+ return write_pbs
+
+
+def pb_for_delete(document_path, option):
+ """Make a ``Write`` protobuf for ``delete()`` methods.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ option (optional[~.firestore_v1beta1.client.WriteOption]): A
+ write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.Write: A
+ ``Write`` protobuf instance for the ``delete()``.
+ """
+ write_pb = write_pb2.Write(delete=document_path)
+ if option is not None:
+ option.modify_write(write_pb)
+
+ return write_pb
+
+
+class ReadAfterWriteError(Exception):
+ """Raised when a read is attempted after a write.
+
+ Raised by "read" methods that use transactions.
+ """
+
+
+def get_transaction_id(transaction, read_operation=True):
+ """Get the transaction ID from a ``Transaction`` object.
+
+ Args:
+ transaction (Optional[~.firestore_v1beta1.transaction.\
+ Transaction]): An existing transaction that this query will
+ run in.
+ read_operation (Optional[bool]): Indicates if the transaction ID
+ will be used in a read operation. Defaults to :data:`True`.
+
+ Returns:
+ Optional[bytes]: The ID of the transaction, or :data:`None` if the
+ ``transaction`` is :data:`None`.
+
+ Raises:
+ ValueError: If the ``transaction`` is not in progress (only if
+ ``transaction`` is not :data:`None`).
+ ReadAfterWriteError: If the ``transaction`` has writes stored on
+ it and ``read_operation`` is :data:`True`.
+ """
+ if transaction is None:
+ return None
+ else:
+ if not transaction.in_progress:
+ raise ValueError(INACTIVE_TXN)
+ if read_operation and len(transaction._write_pbs) > 0:
+ raise ReadAfterWriteError(READ_AFTER_WRITE_ERROR)
+ return transaction.id
+
+
+def metadata_with_prefix(prefix, **kw):
+ """Create RPC metadata containing a prefix.
+
+ Args:
+ prefix (str): appropriate resource path.
+
+ Returns:
+ List[Tuple[str, str]]: RPC metadata with supplied prefix
+ """
+ return [("google-cloud-resource-prefix", prefix)]
+
+
+class WriteOption(object):
+ """Option used to assert a condition on a write operation."""
+
+ def modify_write(self, write_pb, no_create_msg=None):
+ """Modify a ``Write`` protobuf based on the state of this write option.
+
+ This is a virtual method intended to be implemented by subclasses.
+
+ Args:
+ write_pb (google.cloud.firestore_v1beta1.types.Write): A
+ ``Write`` protobuf instance to be modified with a precondition
+ determined by the state of this option.
+ no_create_msg (Optional[str]): A message to use to indicate that
+ a create operation is not allowed.
+
+ Raises:
+ NotImplementedError: Always, this method is virtual.
+ """
+ raise NotImplementedError
+
+
+class LastUpdateOption(WriteOption):
+ """Option used to assert a "last update" condition on a write operation.
+
+ This will typically be created by
+ :meth:`~google.cloud.firestore_v1beta1.client.Client.write_option`.
+
+ Args:
+ last_update_time (google.protobuf.timestamp_pb2.Timestamp): A
+ timestamp. When set, the target document must exist and have
+ been last updated at that time. Protobuf ``update_time`` timestamps
+ are typically returned from methods that perform write operations
+ as part of a "write result" protobuf or directly.
+ """
+
+ def __init__(self, last_update_time):
+ self._last_update_time = last_update_time
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._last_update_time == other._last_update_time
+
+ def modify_write(self, write_pb, **unused_kwargs):
+ """Modify a ``Write`` protobuf based on the state of this write option.
+
+ The ``last_update_time`` is added to ``write_pb`` as an "update time"
+ precondition. When set, the target document must exist and have been
+ last updated at that time.
+
+ Args:
+ write_pb (google.cloud.firestore_v1beta1.types.Write): A
+ ``Write`` protobuf instance to be modified with a precondition
+ determined by the state of this option.
+ unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
+ other subclasses that are unused here.
+ """
+ current_doc = types.Precondition(update_time=self._last_update_time)
+ write_pb.current_document.CopyFrom(current_doc)
+
+
+class ExistsOption(WriteOption):
+ """Option used to assert existence on a write operation.
+
+ This will typically be created by
+ :meth:`~google.cloud.firestore_v1beta1.client.Client.write_option`.
+
+ Args:
+ exists (bool): Indicates if the document being modified
+ should already exist.
+ """
+
+ def __init__(self, exists):
+ self._exists = exists
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._exists == other._exists
+
+ def modify_write(self, write_pb, **unused_kwargs):
+ """Modify a ``Write`` protobuf based on the state of this write option.
+
+ If:
+
+ * ``exists=True``, adds a precondition that requires existence
+ * ``exists=False``, adds a precondition that requires non-existence
+
+ Args:
+ write_pb (google.cloud.firestore_v1beta1.types.Write): A
+ ``Write`` protobuf instance to be modified with a precondition
+ determined by the state of this option.
+ unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
+ other subclasses that are unused here.
+ """
+ current_doc = types.Precondition(exists=self._exists)
+ write_pb.current_document.CopyFrom(current_doc)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/batch.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/batch.py
new file mode 100644
index 000000000..f3e1018ab
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/batch.py
@@ -0,0 +1,162 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for batch requests to the Google Cloud Firestore API."""
+
+
+from google.cloud.firestore_v1beta1 import _helpers
+
+
+class WriteBatch(object):
+ """Accumulate write operations to be sent in a batch.
+
+ This has the same set of methods for write operations that
+ :class:`~google.cloud.firestore_v1beta1.document.DocumentReference`
+ does, e.g.
+ :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.create`.
+
+ Args:
+ client (~.firestore_v1beta1.client.Client): The client that
+ created this batch.
+ """
+
+ def __init__(self, client):
+ self._client = client
+ self._write_pbs = []
+ self.write_results = None
+ self.commit_time = None
+
+ def _add_write_pbs(self, write_pbs):
+ """Add `Write`` protobufs to this transaction.
+
+ This method intended to be over-ridden by subclasses.
+
+ Args:
+ write_pbs (List[google.cloud.proto.firestore.v1beta1.\
+ write_pb2.Write]): A list of write protobufs to be added.
+ """
+ self._write_pbs.extend(write_pbs)
+
+ def create(self, reference, document_data):
+ """Add a "change" to this batch to create a document.
+
+ If the document given by ``reference`` already exists, then this
+ batch will fail when :meth:`commit`-ed.
+
+ Args:
+ reference (~.firestore_v1beta1.document.DocumentReference): A
+ document reference to be created in this batch.
+ document_data (dict): Property names and values to use for
+ creating a document.
+ """
+ write_pbs = _helpers.pbs_for_create(reference._document_path, document_data)
+ self._add_write_pbs(write_pbs)
+
+ def set(self, reference, document_data, merge=False):
+ """Add a "change" to replace a document.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.set`
+ for more information on how ``option`` determines how the change is
+ applied.
+
+ Args:
+ reference (~.firestore_v1beta1.document.DocumentReference):
+ A document reference that will have values set in this batch.
+ document_data (dict):
+ Property names and values to use for replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, apply merging instead of overwriting the state
+ of the document.
+ """
+ if merge is not False:
+ write_pbs = _helpers.pbs_for_set_with_merge(
+ reference._document_path, document_data, merge
+ )
+ else:
+ write_pbs = _helpers.pbs_for_set_no_merge(
+ reference._document_path, document_data
+ )
+
+ self._add_write_pbs(write_pbs)
+
+ def update(self, reference, field_updates, option=None):
+ """Add a "change" to update a document.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.update`
+ for more information on ``field_updates`` and ``option``.
+
+ Args:
+ reference (~.firestore_v1beta1.document.DocumentReference): A
+ document reference that will be deleted in this batch.
+ field_updates (dict): Field names or paths to update and values
+ to update with.
+ option (Optional[~.firestore_v1beta1.client.WriteOption]): A
+ write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+ """
+ if option.__class__.__name__ == "ExistsOption":
+ raise ValueError("you must not pass an explicit write option to " "update.")
+ write_pbs = _helpers.pbs_for_update(
+ reference._document_path, field_updates, option
+ )
+ self._add_write_pbs(write_pbs)
+
+ def delete(self, reference, option=None):
+ """Add a "change" to delete a document.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.delete`
+ for more information on how ``option`` determines how the change is
+ applied.
+
+ Args:
+ reference (~.firestore_v1beta1.document.DocumentReference): A
+ document reference that will be deleted in this batch.
+ option (Optional[~.firestore_v1beta1.client.WriteOption]): A
+ write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+ """
+ write_pb = _helpers.pb_for_delete(reference._document_path, option)
+ self._add_write_pbs([write_pb])
+
+ def commit(self):
+ """Commit the changes accumulated in this batch.
+
+ Returns:
+ List[google.cloud.proto.firestore.v1beta1.\
+ write_pb2.WriteResult, ...]: The write results corresponding
+ to the changes committed, returned in the same order as the
+ changes were applied to this batch. A write result contains an
+ ``update_time`` field.
+ """
+ commit_response = self._client._firestore_api.commit(
+ self._client._database_string,
+ self._write_pbs,
+ transaction=None,
+ metadata=self._client._rpc_metadata,
+ )
+
+ self._write_pbs = []
+ self.write_results = results = list(commit_response.write_results)
+ self.commit_time = commit_response.commit_time
+ return results
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if exc_type is None:
+ self.commit()
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/client.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/client.py
new file mode 100644
index 000000000..50036f0ad
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/client.py
@@ -0,0 +1,542 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Firestore API.
+
+This is the base from which all interactions with the API occur.
+
+In the hierarchy of API concepts
+
+* a :class:`~google.cloud.firestore_v1beta1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1beta1.collection.CollectionReference`
+* a :class:`~google.cloud.firestore_v1beta1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1beta1.document.DocumentReference`
+"""
+import warnings
+
+from google.cloud.client import ClientWithProject
+
+from google.cloud.firestore_v1beta1 import _helpers
+from google.cloud.firestore_v1beta1 import types
+from google.cloud.firestore_v1beta1.batch import WriteBatch
+from google.cloud.firestore_v1beta1.collection import CollectionReference
+from google.cloud.firestore_v1beta1.document import DocumentReference
+from google.cloud.firestore_v1beta1.document import DocumentSnapshot
+from google.cloud.firestore_v1beta1.field_path import render_field_path
+from google.cloud.firestore_v1beta1.gapic import firestore_client
+from google.cloud.firestore_v1beta1.gapic.transports import firestore_grpc_transport
+from google.cloud.firestore_v1beta1.transaction import Transaction
+
+
+DEFAULT_DATABASE = "(default)"
+"""str: The default database used in a :class:`~google.cloud.firestore.client.Client`."""
+_BAD_OPTION_ERR = (
+ "Exactly one of ``last_update_time`` or ``exists`` " "must be provided."
+)
+_BAD_DOC_TEMPLATE = (
+ "Document {!r} appeared in response but was not present among references"
+)
+_ACTIVE_TXN = "There is already an active transaction."
+_INACTIVE_TXN = "There is no active transaction."
+_V1BETA1_DEPRECATED_MESSAGE = (
+ "The 'v1beta1' API endpoint is deprecated. "
+ "The client/library which supports it will be removed in a future release."
+)
+
+
+class Client(ClientWithProject):
+ """Client for interacting with Google Cloud Firestore API.
+
+ .. note::
+
+ Since the Cloud Firestore API requires the gRPC transport, no
+ ``_http`` argument is accepted by this class.
+
+ Args:
+ project (Optional[str]): The project which the client acts on behalf
+ of. If not passed, falls back to the default inferred
+ from the environment.
+ credentials (Optional[~google.auth.credentials.Credentials]): The
+ OAuth2 Credentials to use for this client. If not passed, falls
+ back to the default inferred from the environment.
+ database (Optional[str]): The database name that the client targets.
+ For now, :attr:`DEFAULT_DATABASE` (the default value) is the
+ only valid database.
+ """
+
+ SCOPE = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+ """The scopes required for authenticating with the Firestore service."""
+
+ _firestore_api_internal = None
+ _database_string_internal = None
+ _rpc_metadata_internal = None
+
+ def __init__(self, project=None, credentials=None, database=DEFAULT_DATABASE):
+ warnings.warn(_V1BETA1_DEPRECATED_MESSAGE, DeprecationWarning, stacklevel=2)
+ # NOTE: This API has no use for the _http argument, but sending it
+ # will have no impact since the _http() @property only lazily
+ # creates a working HTTP object.
+ super(Client, self).__init__(
+ project=project, credentials=credentials, _http=None
+ )
+ self._database = database
+
+ @property
+ def _firestore_api(self):
+ """Lazy-loading getter GAPIC Firestore API.
+
+ Returns:
+ ~.gapic.firestore.v1beta1.firestore_client.FirestoreClient: The
+ GAPIC client with the credentials of the current client.
+ """
+ if self._firestore_api_internal is None:
+ # Use a custom channel.
+ # We need this in order to set appropriate keepalive options.
+ channel = firestore_grpc_transport.FirestoreGrpcTransport.create_channel(
+ self._target,
+ credentials=self._credentials,
+ options={"grpc.keepalive_time_ms": 30000}.items(),
+ )
+
+ self._transport = firestore_grpc_transport.FirestoreGrpcTransport(
+ address=self._target, channel=channel
+ )
+
+ self._firestore_api_internal = firestore_client.FirestoreClient(
+ transport=self._transport
+ )
+
+ return self._firestore_api_internal
+
+ @property
+ def _target(self):
+ """Return the target (where the API is).
+
+ Returns:
+ str: The location of the API.
+ """
+ return firestore_client.FirestoreClient.SERVICE_ADDRESS
+
+ @property
+ def _database_string(self):
+ """The database string corresponding to this client's project.
+
+ This value is lazy-loaded and cached.
+
+ Will be of the form
+
+ ``projects/{project_id}/databases/{database_id}``
+
+ but ``database_id == '(default)'`` for the time being.
+
+ Returns:
+ str: The fully-qualified database string for the current
+ project. (The default database is also in this string.)
+ """
+ if self._database_string_internal is None:
+ # NOTE: database_root_path() is a classmethod, so we don't use
+ # self._firestore_api (it isn't necessary).
+ db_str = firestore_client.FirestoreClient.database_root_path(
+ self.project, self._database
+ )
+ self._database_string_internal = db_str
+
+ return self._database_string_internal
+
+ @property
+ def _rpc_metadata(self):
+ """The RPC metadata for this client's associated database.
+
+ Returns:
+ Sequence[Tuple(str, str)]: RPC metadata with resource prefix
+ for the database associated with this client.
+ """
+ if self._rpc_metadata_internal is None:
+ self._rpc_metadata_internal = _helpers.metadata_with_prefix(
+ self._database_string
+ )
+
+ return self._rpc_metadata_internal
+
+ def collection(self, *collection_path):
+ """Get a reference to a collection.
+
+ For a top-level collection:
+
+ .. code-block:: python
+
+ >>> client.collection('top')
+
+ For a sub-collection:
+
+ .. code-block:: python
+
+ >>> client.collection('mydocs/doc/subcol')
+ >>> # is the same as
+ >>> client.collection('mydocs', 'doc', 'subcol')
+
+ Sub-collections can be nested deeper in a similar fashion.
+
+ Args:
+ collection_path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path to a collection
+ * A tuple of collection path segments
+
+ Returns:
+ ~.firestore_v1beta1.collection.CollectionReference: A reference
+ to a collection in the Firestore database.
+ """
+ if len(collection_path) == 1:
+ path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
+ else:
+ path = collection_path
+
+ return CollectionReference(*path, client=self)
+
+ def document(self, *document_path):
+ """Get a reference to a document in a collection.
+
+ For a top-level document:
+
+ .. code-block:: python
+
+ >>> client.document('collek/shun')
+ >>> # is the same as
+ >>> client.document('collek', 'shun')
+
+ For a document in a sub-collection:
+
+ .. code-block:: python
+
+ >>> client.document('mydocs/doc/subcol/child')
+ >>> # is the same as
+ >>> client.document('mydocs', 'doc', 'subcol', 'child')
+
+ Documents in sub-collections can be nested deeper in a similar fashion.
+
+ Args:
+ document_path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path to a document
+ * A tuple of document path segments
+
+ Returns:
+ ~.firestore_v1beta1.document.DocumentReference: A reference
+ to a document in a collection.
+ """
+ if len(document_path) == 1:
+ path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
+ else:
+ path = document_path
+
+ return DocumentReference(*path, client=self)
+
+ @staticmethod
+ def field_path(*field_names):
+ """Create a **field path** from a list of nested field names.
+
+ A **field path** is a ``.``-delimited concatenation of the field
+ names. It is used to represent a nested field. For example,
+ in the data
+
+ .. code-block:: python
+
+ data = {
+ 'aa': {
+ 'bb': {
+ 'cc': 10,
+ },
+ },
+ }
+
+ the field path ``'aa.bb.cc'`` represents the data stored in
+ ``data['aa']['bb']['cc']``.
+
+ Args:
+ field_names (Tuple[str, ...]): The list of field names.
+
+ Returns:
+ str: The ``.``-delimited field path.
+ """
+ return render_field_path(field_names)
+
+ @staticmethod
+ def write_option(**kwargs):
+ """Create a write option for write operations.
+
+ Write operations include :meth:`~google.cloud.DocumentReference.set`,
+ :meth:`~google.cloud.DocumentReference.update` and
+ :meth:`~google.cloud.DocumentReference.delete`.
+
+ One of the following keyword arguments must be provided:
+
+ * ``last_update_time`` (:class:`google.protobuf.timestamp_pb2.\
+ Timestamp`): A timestamp. When set, the target document must
+ exist and have been last updated at that time. Protobuf
+ ``update_time`` timestamps are typically returned from methods
+ that perform write operations as part of a "write result"
+ protobuf or directly.
+ * ``exists`` (:class:`bool`): Indicates if the document being modified
+ should already exist.
+
+ Providing no argument would make the option have no effect (so
+ it is not allowed). Providing multiple would be an apparent
+ contradiction, since ``last_update_time`` assumes that the
+ document **was** updated (it can't have been updated if it
+ doesn't exist) and ``exists`` indicate that it is unknown if the
+ document exists or not.
+
+ Args:
+ kwargs (Dict[str, Any]): The keyword arguments described above.
+
+ Raises:
+ TypeError: If anything other than exactly one argument is
+ provided by the caller.
+ """
+ if len(kwargs) != 1:
+ raise TypeError(_BAD_OPTION_ERR)
+
+ name, value = kwargs.popitem()
+ if name == "last_update_time":
+ return _helpers.LastUpdateOption(value)
+ elif name == "exists":
+ return _helpers.ExistsOption(value)
+ else:
+ extra = "{!r} was provided".format(name)
+ raise TypeError(_BAD_OPTION_ERR, extra)
+
+ def get_all(self, references, field_paths=None, transaction=None):
+ """Retrieve a batch of documents.
+
+ .. note::
+
+ Documents returned by this method are not guaranteed to be
+ returned in the same order that they are given in ``references``.
+
+ .. note::
+
+ If multiple ``references`` refer to the same document, the server
+ will only return one result.
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
+ for more information on **field paths**.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ references (List[.DocumentReference, ...]): Iterable of document
+ references to be retrieved.
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results. If
+ no value is provided, all fields will be returned.
+ transaction (Optional[~.firestore_v1beta1.transaction.\
+ Transaction]): An existing transaction that these
+ ``references`` will be retrieved in.
+
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ document_paths, reference_map = _reference_info(references)
+ mask = _get_doc_mask(field_paths)
+ response_iterator = self._firestore_api.batch_get_documents(
+ self._database_string,
+ document_paths,
+ mask,
+ transaction=_helpers.get_transaction_id(transaction),
+ metadata=self._rpc_metadata,
+ )
+
+ for get_doc_response in response_iterator:
+ yield _parse_batch_get(get_doc_response, reference_map, self)
+
+ def collections(self):
+ """List top-level collections of the client's database.
+
+ Returns:
+ Sequence[~.firestore_v1beta1.collection.CollectionReference]:
+ iterator of subcollections of the current document.
+ """
+ iterator = self._firestore_api.list_collection_ids(
+ self._database_string, metadata=self._rpc_metadata
+ )
+ iterator.client = self
+ iterator.item_to_value = _item_to_collection_ref
+ return iterator
+
+ def batch(self):
+ """Get a batch instance from this client.
+
+ Returns:
+ ~.firestore_v1beta1.batch.WriteBatch: A "write" batch to be
+ used for accumulating document changes and sending the changes
+ all at once.
+ """
+ return WriteBatch(self)
+
+ def transaction(self, **kwargs):
+ """Get a transaction that uses this client.
+
+ See :class:`~google.cloud.firestore_v1beta1.transaction.Transaction`
+ for more information on transactions and the constructor arguments.
+
+ Args:
+ kwargs (Dict[str, Any]): The keyword arguments (other than
+ ``client``) to pass along to the
+ :class:`~google.cloud.firestore_v1beta1.transaction.Transaction`
+ constructor.
+
+ Returns:
+ ~.firestore_v1beta1.transaction.Transaction: A transaction
+ attached to this client.
+ """
+ return Transaction(self, **kwargs)
+
+
+def _reference_info(references):
+ """Get information about document references.
+
+ Helper for :meth:`~google.cloud.firestore_v1beta1.client.Client.get_all`.
+
+ Args:
+ references (List[.DocumentReference, ...]): Iterable of document
+ references.
+
+ Returns:
+ Tuple[List[str, ...], Dict[str, .DocumentReference]]: A two-tuple of
+
+ * fully-qualified documents paths for each reference in ``references``
+ * a mapping from the paths to the original reference. (If multiple
+ ``references`` contains multiple references to the same document,
+ that key will be overwritten in the result.)
+ """
+ document_paths = []
+ reference_map = {}
+ for reference in references:
+ doc_path = reference._document_path
+ document_paths.append(doc_path)
+ reference_map[doc_path] = reference
+
+ return document_paths, reference_map
+
+
+def _get_reference(document_path, reference_map):
+ """Get a document reference from a dictionary.
+
+ This just wraps a simple dictionary look-up with a helpful error that is
+ specific to :meth:`~google.cloud.firestore.client.Client.get_all`, the
+ **public** caller of this function.
+
+ Args:
+ document_path (str): A fully-qualified document path.
+ reference_map (Dict[str, .DocumentReference]): A mapping (produced
+ by :func:`_reference_info`) of fully-qualified document paths to
+ document references.
+
+ Returns:
+ .DocumentReference: The matching reference.
+
+ Raises:
+ ValueError: If ``document_path`` has not been encountered.
+ """
+ try:
+ return reference_map[document_path]
+ except KeyError:
+ msg = _BAD_DOC_TEMPLATE.format(document_path)
+ raise ValueError(msg)
+
+
+def _parse_batch_get(get_doc_response, reference_map, client):
+ """Parse a `BatchGetDocumentsResponse` protobuf.
+
+ Args:
+ get_doc_response (~google.cloud.proto.firestore.v1beta1.\
+ firestore_pb2.BatchGetDocumentsResponse): A single response (from
+ a stream) containing the "get" response for a document.
+ reference_map (Dict[str, .DocumentReference]): A mapping (produced
+ by :func:`_reference_info`) of fully-qualified document paths to
+ document references.
+ client (~.firestore_v1beta1.client.Client): A client that has
+ a document factory.
+
+ Returns:
+ [.DocumentSnapshot]: The retrieved snapshot.
+
+ Raises:
+ ValueError: If the response has a ``result`` field (a oneof) other
+ than ``found`` or ``missing``.
+ """
+ result_type = get_doc_response.WhichOneof("result")
+ if result_type == "found":
+ reference = _get_reference(get_doc_response.found.name, reference_map)
+ data = _helpers.decode_dict(get_doc_response.found.fields, client)
+ snapshot = DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=get_doc_response.read_time,
+ create_time=get_doc_response.found.create_time,
+ update_time=get_doc_response.found.update_time,
+ )
+ elif result_type == "missing":
+ snapshot = DocumentSnapshot(
+ None,
+ None,
+ exists=False,
+ read_time=get_doc_response.read_time,
+ create_time=None,
+ update_time=None,
+ )
+ else:
+ raise ValueError(
+ "`BatchGetDocumentsResponse.result` (a oneof) had a field other "
+ "than `found` or `missing` set, or was unset"
+ )
+ return snapshot
+
+
+def _get_doc_mask(field_paths):
+ """Get a document mask if field paths are provided.
+
+ Args:
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results.
+
+ Returns:
+ Optional[google.cloud.firestore_v1beta1.types.DocumentMask]: A mask
+ to project documents to a restricted set of field paths.
+ """
+ if field_paths is None:
+ return None
+ else:
+ return types.DocumentMask(field_paths=field_paths)
+
+
+def _item_to_collection_ref(iterator, item):
+ """Convert collection ID to collection ref.
+
+ Args:
+ iterator (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (str): ID of the collection
+ """
+ return iterator.client.collection(item)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/collection.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/collection.py
new file mode 100644
index 000000000..45b1ddae0
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/collection.py
@@ -0,0 +1,478 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing collections for the Google Cloud Firestore API."""
+import random
+import warnings
+
+import six
+
+from google.cloud.firestore_v1beta1 import _helpers
+from google.cloud.firestore_v1beta1 import query as query_mod
+from google.cloud.firestore_v1beta1.proto import document_pb2
+from google.cloud.firestore_v1beta1.watch import Watch
+from google.cloud.firestore_v1beta1 import document
+
+_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+
+
+class CollectionReference(object):
+ """A reference to a collection in a Firestore database.
+
+ The collection may already exist or this class can facilitate creation
+ of documents within the collection.
+
+ Args:
+ path (Tuple[str, ...]): The components in the collection path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection.
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1beta1.client.Client` if
+ provided. It represents the client that created this collection
+ reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ def __init__(self, *path, **kwargs):
+ _helpers.verify_path(path, is_collection=True)
+ self._path = path
+ self._client = kwargs.pop("client", None)
+ if kwargs:
+ raise TypeError(
+ "Received unexpected arguments", kwargs, "Only `client` is supported"
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._path == other._path and self._client == other._client
+
+ @property
+ def id(self):
+ """The collection identifier.
+
+ Returns:
+ str: The last component of the path.
+ """
+ return self._path[-1]
+
+ @property
+ def parent(self):
+ """Document that owns the current collection.
+
+ Returns:
+ Optional[~.firestore_v1beta1.document.DocumentReference]: The
+ parent document, if the current collection is not a
+ top-level collection.
+ """
+ if len(self._path) == 1:
+ return None
+ else:
+ parent_path = self._path[:-1]
+ return self._client.document(*parent_path)
+
+ def document(self, document_id=None):
+ """Create a sub-document underneath the current collection.
+
+ Args:
+ document_id (Optional[str]): The document identifier
+ within the current collection. If not provided, will default
+ to a random 20 character string composed of digits,
+ uppercase and lowercase and letters.
+
+ Returns:
+ ~.firestore_v1beta1.document.DocumentReference: The child
+ document.
+ """
+ if document_id is None:
+ document_id = _auto_id()
+
+ child_path = self._path + (document_id,)
+ return self._client.document(*child_path)
+
+ def _parent_info(self):
+ """Get fully-qualified parent path and prefix for this collection.
+
+ Returns:
+ Tuple[str, str]: Pair of
+
+ * the fully-qualified (with database and project) path to the
+ parent of this collection (will either be the database path
+ or a document path).
+ * the prefix to a document in this collection.
+ """
+ parent_doc = self.parent
+ if parent_doc is None:
+ parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join(
+ (self._client._database_string, "documents")
+ )
+ else:
+ parent_path = parent_doc._document_path
+
+ expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id))
+ return parent_path, expected_prefix
+
+ def add(self, document_data, document_id=None):
+ """Create a document in the Firestore database with the provided data.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ creating the document.
+ document_id (Optional[str]): The document identifier within the
+ current collection. If not provided, an ID will be
+ automatically assigned by the server (the assigned ID will be
+ a random 20 character string composed of digits,
+ uppercase and lowercase letters).
+
+ Returns:
+ Tuple[google.protobuf.timestamp_pb2.Timestamp, \
+ ~.firestore_v1beta1.document.DocumentReference]: Pair of
+
+ * The ``update_time`` when the document was created (or
+ overwritten).
+ * A document reference for the created document.
+
+ Raises:
+ ~google.cloud.exceptions.Conflict: If ``document_id`` is provided
+ and the document already exists.
+ """
+ if document_id is None:
+ parent_path, expected_prefix = self._parent_info()
+
+ document_pb = document_pb2.Document()
+
+ created_document_pb = self._client._firestore_api.create_document(
+ parent_path,
+ collection_id=self.id,
+ document_id=None,
+ document=document_pb,
+ mask=None,
+ metadata=self._client._rpc_metadata,
+ )
+
+ new_document_id = _helpers.get_doc_id(created_document_pb, expected_prefix)
+ document_ref = self.document(new_document_id)
+ set_result = document_ref.set(document_data)
+ return set_result.update_time, document_ref
+ else:
+ document_ref = self.document(document_id)
+ write_result = document_ref.create(document_data)
+ return write_result.update_time, document_ref
+
+ def list_documents(self, page_size=None):
+ """List all subdocuments of the current collection.
+
+ Args:
+ page_size (Optional[int]]): The maximum number of documents
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+
+ Returns:
+ Sequence[~.firestore_v1beta1.collection.DocumentReference]:
+ iterator of subdocuments of the current collection. If the
+ collection does not exist at the time of `snapshot`, the
+ iterator will be empty
+ """
+ parent, _ = self._parent_info()
+
+ iterator = self._client._firestore_api.list_documents(
+ parent,
+ self.id,
+ page_size=page_size,
+ show_missing=True,
+ metadata=self._client._rpc_metadata,
+ )
+ iterator.collection = self
+ iterator.item_to_value = _item_to_document_ref
+ return iterator
+
+ def select(self, field_paths):
+ """Create a "select" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.select` for
+ more information on this method.
+
+ Args:
+ field_paths (Iterable[str, ...]): An iterable of field paths
+ (``.``-delimited list of field names) to use as a projection
+ of document fields in the query results.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A "projected" query.
+ """
+ query = query_mod.Query(self)
+ return query.select(field_paths)
+
+ def where(self, field_path, op_string, value):
+ """Create a "where" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.where` for
+ more information on this method.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) for the field to filter on.
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``
+ and ``>``.
+ value (Any): The value to compare the field against in the filter.
+ If ``value`` is :data:`None` or a NaN, then ``==`` is the only
+ allowed operation.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A filtered query.
+ """
+ query = query_mod.Query(self)
+ return query.where(field_path, op_string, value)
+
+ def order_by(self, field_path, **kwargs):
+ """Create an "order by" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` for
+ more information on this method.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) on which to order the query results.
+ kwargs (Dict[str, Any]): The keyword arguments to pass along
+ to the query. The only supported keyword is ``direction``, see
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`
+ for more information.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: An "order by" query.
+ """
+ query = query_mod.Query(self)
+ return query.order_by(field_path, **kwargs)
+
+ def limit(self, count):
+ """Create a limited query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.limit` for
+ more information on this method.
+
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A limited query.
+ """
+ query = query_mod.Query(self)
+ return query.limit(count)
+
+ def offset(self, num_to_skip):
+ """Skip to an offset in a query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.offset` for
+ more information on this method.
+
+ Args:
+ num_to_skip (int): The number of results to skip at the beginning
+ of query results. (Must be non-negative.)
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: An offset query.
+ """
+ query = query_mod.Query(self)
+ return query.offset(num_to_skip)
+
+ def start_at(self, document_fields):
+ """Start query at a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.start_at` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.start_at(document_fields)
+
+ def start_after(self, document_fields):
+ """Start query after a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.start_after` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.start_after(document_fields)
+
+ def end_before(self, document_fields):
+ """End query before a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.end_before` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.end_before(document_fields)
+
+ def end_at(self, document_fields):
+ """End query at a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.end_at` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor.
+ """
+ query = query_mod.Query(self)
+ return query.end_at(document_fields)
+
+ def get(self, transaction=None):
+ """Deprecated alias for :meth:`stream`."""
+ warnings.warn(
+ "'Collection.get' is deprecated: please use 'Collection.stream' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.stream(transaction=transaction)
+
+ def stream(self, transaction=None):
+ """Read the documents in this collection.
+
+ This sends a ``RunQuery`` RPC and then returns an iterator which
+ consumes each document returned in the stream of ``RunQueryResponse``
+ messages.
+
+ .. note::
+
+ The underlying stream of responses will time out after
+ the ``max_rpc_timeout_millis`` value set in the GAPIC
+ client configuration for the ``RunQuery`` API. Snapshots
+ not consumed from the iterator before that point will be lost.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ transaction (Optional[~.firestore_v1beta1.transaction.\
+ Transaction]): An existing transaction that the query will
+ run in.
+
+ Yields:
+ ~.firestore_v1beta1.document.DocumentSnapshot: The next
+ document that fulfills the query.
+ """
+ query = query_mod.Query(self)
+ return query.stream(transaction=transaction)
+
+ def on_snapshot(self, callback):
+ """Monitor the documents in this collection.
+
+ This starts a watch on this collection using a background thread. The
+ provided callback is run on the snapshot of the documents.
+
+ Args:
+ callback(~.firestore.collection.CollectionSnapshot): a callback
+ to run when a change occurs.
+
+ Example:
+ from google.cloud import firestore_v1beta1
+
+ db = firestore_v1beta1.Client()
+ collection_ref = db.collection(u'users')
+
+ def on_snapshot(collection_snapshot):
+ for doc in collection_snapshot.documents:
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+ # Watch this collection
+ collection_watch = collection_ref.on_snapshot(on_snapshot)
+
+ # Terminate this watch
+ collection_watch.unsubscribe()
+ """
+ return Watch.for_query(
+ query_mod.Query(self),
+ callback,
+ document.DocumentSnapshot,
+ document.DocumentReference,
+ )
+
+
+def _auto_id():
+ """Generate a "random" automatically generated ID.
+
+ Returns:
+ str: A 20 character string composed of digits, uppercase and
+ lowercase and letters.
+ """
+ return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20))
+
+
+def _item_to_document_ref(iterator, item):
+ """Convert Document resource to document ref.
+
+ Args:
+ iterator (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (dict): document resource
+ """
+ document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1]
+ return iterator.collection.document(document_id)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/document.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/document.py
new file mode 100644
index 000000000..8efd45255
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/document.py
@@ -0,0 +1,780 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing documents for the Google Cloud Firestore API."""
+
+import copy
+
+import six
+
+from google.api_core import exceptions
+from google.cloud.firestore_v1beta1 import _helpers
+from google.cloud.firestore_v1beta1 import field_path as field_path_module
+from google.cloud.firestore_v1beta1.proto import common_pb2
+from google.cloud.firestore_v1beta1.watch import Watch
+
+
+class DocumentReference(object):
+ """A reference to a document in a Firestore database.
+
+ The document may already exist or can be created by this class.
+
+ Args:
+ path (Tuple[str, ...]): The components in the document path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection (as well as the base document).
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1beta1.client.Client`.
+ It represents the client that created this document reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ _document_path_internal = None
+
+ def __init__(self, *path, **kwargs):
+ _helpers.verify_path(path, is_collection=False)
+ self._path = path
+ self._client = kwargs.pop("client", None)
+ if kwargs:
+ raise TypeError(
+ "Received unexpected arguments", kwargs, "Only `client` is supported"
+ )
+
+ def __copy__(self):
+ """Shallow copy the instance.
+
+ We leave the client "as-is" but tuple-unpack the path.
+
+ Returns:
+ .DocumentReference: A copy of the current document.
+ """
+ result = self.__class__(*self._path, client=self._client)
+ result._document_path_internal = self._document_path_internal
+ return result
+
+ def __deepcopy__(self, unused_memo):
+ """Deep copy the instance.
+
+ This isn't a true deep copy, wee leave the client "as-is" but
+ tuple-unpack the path.
+
+ Returns:
+ .DocumentReference: A copy of the current document.
+ """
+ return self.__copy__()
+
+ def __eq__(self, other):
+ """Equality check against another instance.
+
+ Args:
+ other (Any): A value to compare against.
+
+ Returns:
+ Union[bool, NotImplementedType]: Indicating if the values are
+ equal.
+ """
+ if isinstance(other, DocumentReference):
+ return self._client == other._client and self._path == other._path
+ else:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(self._path) + hash(self._client)
+
+ def __ne__(self, other):
+ """Inequality check against another instance.
+
+ Args:
+ other (Any): A value to compare against.
+
+ Returns:
+ Union[bool, NotImplementedType]: Indicating if the values are
+ not equal.
+ """
+ if isinstance(other, DocumentReference):
+ return self._client != other._client or self._path != other._path
+ else:
+ return NotImplemented
+
+ @property
+ def path(self):
+ """Database-relative for this document.
+
+ Returns:
+ str: The document's relative path.
+ """
+ return "/".join(self._path)
+
+ @property
+ def _document_path(self):
+ """Create and cache the full path for this document.
+
+ Of the form:
+
+ ``projects/{project_id}/databases/{database_id}/...
+ documents/{document_path}``
+
+ Returns:
+ str: The full document path.
+
+ Raises:
+ ValueError: If the current document reference has no ``client``.
+ """
+ if self._document_path_internal is None:
+ if self._client is None:
+ raise ValueError("A document reference requires a `client`.")
+ self._document_path_internal = _get_document_path(self._client, self._path)
+
+ return self._document_path_internal
+
+ @property
+ def id(self):
+ """The document identifier (within its collection).
+
+ Returns:
+ str: The last component of the path.
+ """
+ return self._path[-1]
+
+ @property
+ def parent(self):
+ """Collection that owns the current document.
+
+ Returns:
+ ~.firestore_v1beta1.collection.CollectionReference: The
+ parent collection.
+ """
+ parent_path = self._path[:-1]
+ return self._client.collection(*parent_path)
+
+ def collection(self, collection_id):
+ """Create a sub-collection underneath the current document.
+
+ Args:
+ collection_id (str): The sub-collection identifier (sometimes
+ referred to as the "kind").
+
+ Returns:
+ ~.firestore_v1beta1.collection.CollectionReference: The
+ child collection.
+ """
+ child_path = self._path + (collection_id,)
+ return self._client.collection(*child_path)
+
+ def create(self, document_data):
+ """Create the current document in the Firestore database.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ creating a document.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.WriteResult: The
+ write result corresponding to the committed document. A write
+ result contains an ``update_time`` field.
+
+ Raises:
+ ~google.cloud.exceptions.Conflict: If the document already exists.
+ """
+ batch = self._client.batch()
+ batch.create(self, document_data)
+ write_results = batch.commit()
+ return _first_write_result(write_results)
+
+ def set(self, document_data, merge=False):
+ """Replace the current document in the Firestore database.
+
+ A write ``option`` can be specified to indicate preconditions of
+ the "set" operation. If no ``option`` is specified and this document
+ doesn't exist yet, this method will create it.
+
+ Overwrites all content for the document with the fields in
+ ``document_data``. This method performs almost the same functionality
+ as :meth:`create`. The only difference is that this method doesn't
+ make any requirements on the existence of the document (unless
+ ``option`` is used), whereas as :meth:`create` will fail if the
+ document already exists.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, apply merging instead of overwriting the state
+ of the document.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.WriteResult: The
+ write result corresponding to the committed document. A write
+ result contains an ``update_time`` field.
+ """
+ batch = self._client.batch()
+ batch.set(self, document_data, merge=merge)
+ write_results = batch.commit()
+ return _first_write_result(write_results)
+
+ def update(self, field_updates, option=None):
+ """Update an existing document in the Firestore database.
+
+ By default, this method verifies that the document exists on the
+ server before making updates. A write ``option`` can be specified to
+ override these preconditions.
+
+ Each key in ``field_updates`` can either be a field name or a
+ **field path** (For more information on **field paths**, see
+ :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`.) To
+ illustrate this, consider a document with
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ },
+ 'other': True,
+ }
+
+ stored on the server. If the field name is used in the update:
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo': {
+ ... 'quux': 800,
+ ... },
+ ... }
+ >>> document.update(field_updates)
+
+ then all of ``foo`` will be overwritten on the server and the new
+ value will be
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'quux': 800,
+ },
+ 'other': True,
+ }
+
+ On the other hand, if a ``.``-delimited **field path** is used in the
+ update:
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo.quux': 800,
+ ... }
+ >>> document.update(field_updates)
+
+ then only ``foo.quux`` will be updated on the server and the
+ field ``foo.bar`` will remain intact:
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ 'quux': 800,
+ },
+ 'other': True,
+ }
+
+ .. warning::
+
+ A **field path** can only be used as a top-level key in
+ ``field_updates``.
+
+ To delete / remove a field from an existing document, use the
+ :attr:`~google.cloud.firestore_v1beta1.transforms.DELETE_FIELD`
+ sentinel. So with the example above, sending
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'other': firestore.DELETE_FIELD,
+ ... }
+ >>> document.update(field_updates)
+
+ would update the value on the server to:
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ },
+ }
+
+ To set a field to the current time on the server when the
+ update is received, use the
+ :attr:`~google.cloud.firestore_v1beta1.transforms.SERVER_TIMESTAMP`
+ sentinel. Sending
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo.now': firestore.SERVER_TIMESTAMP,
+ ... }
+ >>> document.update(field_updates)
+
+ would update the value on the server to:
+
+ .. code-block:: python
+
+ >>> snapshot = document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ 'now': datetime.datetime(2012, ...),
+ },
+ 'other': True,
+ }
+
+ Args:
+ field_updates (dict): Field names or paths to update and values
+ to update with.
+ option (Optional[~.firestore_v1beta1.client.WriteOption]): A
+ write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.WriteResult: The
+ write result corresponding to the updated document. A write
+ result contains an ``update_time`` field.
+
+ Raises:
+ ~google.cloud.exceptions.NotFound: If the document does not exist.
+ """
+ batch = self._client.batch()
+ batch.update(self, field_updates, option=option)
+ write_results = batch.commit()
+ return _first_write_result(write_results)
+
+ def delete(self, option=None):
+ """Delete the current document in the Firestore database.
+
+ Args:
+ option (Optional[~.firestore_v1beta1.client.WriteOption]): A
+ write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+
+ Returns:
+ google.protobuf.timestamp_pb2.Timestamp: The time that the delete
+ request was received by the server. If the document did not exist
+ when the delete was sent (i.e. nothing was deleted), this method
+ will still succeed and will still return the time that the
+ request was received by the server.
+ """
+ write_pb = _helpers.pb_for_delete(self._document_path, option)
+ commit_response = self._client._firestore_api.commit(
+ self._client._database_string,
+ [write_pb],
+ transaction=None,
+ metadata=self._client._rpc_metadata,
+ )
+
+ return commit_response.commit_time
+
+ def get(self, field_paths=None, transaction=None):
+ """Retrieve a snapshot of the current document.
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
+ for more information on **field paths**.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results. If
+ no value is provided, all fields will be returned.
+ transaction (Optional[~.firestore_v1beta1.transaction.\
+ Transaction]): An existing transaction that this reference
+ will be retrieved in.
+
+ Returns:
+ ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of
+ the current document. If the document does not exist at
+ the time of `snapshot`, the snapshot `reference`, `data`,
+ `update_time`, and `create_time` attributes will all be
+ `None` and `exists` will be `False`.
+ """
+ if isinstance(field_paths, six.string_types):
+ raise ValueError("'field_paths' must be a sequence of paths, not a string.")
+
+ if field_paths is not None:
+ mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
+ else:
+ mask = None
+
+ firestore_api = self._client._firestore_api
+ try:
+ document_pb = firestore_api.get_document(
+ self._document_path,
+ mask=mask,
+ transaction=_helpers.get_transaction_id(transaction),
+ metadata=self._client._rpc_metadata,
+ )
+ except exceptions.NotFound:
+ data = None
+ exists = False
+ create_time = None
+ update_time = None
+ else:
+ data = _helpers.decode_dict(document_pb.fields, self._client)
+ exists = True
+ create_time = document_pb.create_time
+ update_time = document_pb.update_time
+
+ return DocumentSnapshot(
+ reference=self,
+ data=data,
+ exists=exists,
+ read_time=None, # No server read_time available
+ create_time=create_time,
+ update_time=update_time,
+ )
+
+ def collections(self, page_size=None):
+ """List subcollections of the current document.
+
+ Args:
+ page_size (Optional[int]]): The maximum number of collections
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+
+ Returns:
+ Sequence[~.firestore_v1beta1.collection.CollectionReference]:
+ iterator of subcollections of the current document. If the
+ document does not exist at the time of `snapshot`, the
+ iterator will be empty
+ """
+ iterator = self._client._firestore_api.list_collection_ids(
+ self._document_path,
+ page_size=page_size,
+ metadata=self._client._rpc_metadata,
+ )
+ iterator.document = self
+ iterator.item_to_value = _item_to_collection_ref
+ return iterator
+
+ def on_snapshot(self, callback):
+ """Watch this document.
+
+ This starts a watch on this document using a background thread. The
+ provided callback is run on the snapshot.
+
+ Args:
+ callback(~.firestore.document.DocumentSnapshot):a callback to run
+ when a change occurs
+
+ Example:
+ from google.cloud import firestore_v1beta1
+
+ db = firestore_v1beta1.Client()
+ collection_ref = db.collection(u'users')
+
+ def on_snapshot(document_snapshot):
+ doc = document_snapshot
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+ doc_ref = db.collection(u'users').document(
+ u'alovelace' + unique_resource_id())
+
+ # Watch this document
+ doc_watch = doc_ref.on_snapshot(on_snapshot)
+
+ # Terminate this watch
+ doc_watch.unsubscribe()
+ """
+ return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference)
+
+
+class DocumentSnapshot(object):
+ """A snapshot of document data in a Firestore database.
+
+ This represents data retrieved at a specific time and may not contain
+ all fields stored for the document (i.e. a hand-picked selection of
+ fields may have been retrieved).
+
+ Instances of this class are not intended to be constructed by hand,
+ rather they'll be returned as responses to various methods, such as
+ :meth:`~google.cloud.DocumentReference.get`.
+
+ Args:
+ reference (~.firestore_v1beta1.document.DocumentReference): A
+ document reference corresponding to the document that contains
+ the data in this snapshot.
+ data (Dict[str, Any]): The data retrieved in the snapshot.
+ exists (bool): Indicates if the document existed at the time the
+ snapshot was retrieved.
+ read_time (google.protobuf.timestamp_pb2.Timestamp): The time that
+ this snapshot was read from the server.
+ create_time (google.protobuf.timestamp_pb2.Timestamp): The time that
+ this document was created.
+ update_time (google.protobuf.timestamp_pb2.Timestamp): The time that
+ this document was last updated.
+ """
+
+ def __init__(self, reference, data, exists, read_time, create_time, update_time):
+ self._reference = reference
+ # We want immutable data, so callers can't modify this value
+ # out from under us.
+ self._data = copy.deepcopy(data)
+ self._exists = exists
+ self.read_time = read_time
+ """google.protobuf.timestamp_pb2.Timestamp: Time snapshot was read."""
+ self.create_time = create_time
+ """google.protobuf.timestamp_pb2.Timestamp: Document's creation."""
+ self.update_time = update_time
+ """google.protobuf.timestamp_pb2.Timestamp: Document's last update."""
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._reference == other._reference and self._data == other._data
+
+ def __hash__(self):
+ seconds = self.update_time.seconds
+ nanos = self.update_time.nanos
+ return hash(self._reference) + hash(seconds) + hash(nanos)
+
+ @property
+ def _client(self):
+ """The client that owns the document reference for this snapshot.
+
+ Returns:
+ ~.firestore_v1beta1.client.Client: The client that owns this
+ document.
+ """
+ return self._reference._client
+
+ @property
+ def exists(self):
+ """Existence flag.
+
+ Indicates if the document existed at the time this snapshot
+ was retrieved.
+
+ Returns:
+ bool: The existence flag.
+ """
+ return self._exists
+
+ @property
+ def id(self):
+ """The document identifier (within its collection).
+
+ Returns:
+ str: The last component of the path of the document.
+ """
+ return self._reference.id
+
+ @property
+ def reference(self):
+ """Document reference corresponding to document that owns this data.
+
+ Returns:
+ ~.firestore_v1beta1.document.DocumentReference: A document
+ reference corresponding to this document.
+ """
+ return self._reference
+
+ def get(self, field_path):
+ """Get a value from the snapshot data.
+
+ If the data is nested, for example:
+
+ .. code-block:: python
+
+ >>> snapshot.to_dict()
+ {
+ 'top1': {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ },
+ 'top6': b'\x00\x01 foo',
+ }
+
+ a **field path** can be used to access the nested data. For
+ example:
+
+ .. code-block:: python
+
+ >>> snapshot.get('top1')
+ {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ }
+ >>> snapshot.get('top1.middle2')
+ {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ }
+ >>> snapshot.get('top1.middle2.bottom3')
+ 20
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
+ for more information on **field paths**.
+
+ A copy is returned since the data may contain mutable values,
+ but the data stored in the snapshot must remain immutable.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names).
+
+ Returns:
+ Any or None:
+ (A copy of) the value stored for the ``field_path`` or
+ None if snapshot document does not exist.
+
+ Raises:
+ KeyError: If the ``field_path`` does not match nested data
+ in the snapshot.
+ """
+ if not self._exists:
+ return None
+ nested_data = field_path_module.get_nested_value(field_path, self._data)
+ return copy.deepcopy(nested_data)
+
+ def to_dict(self):
+ """Retrieve the data contained in this snapshot.
+
+ A copy is returned since the data may contain mutable values,
+ but the data stored in the snapshot must remain immutable.
+
+ Returns:
+ Dict[str, Any] or None:
+ The data in the snapshot. Returns None if reference
+ does not exist.
+ """
+ if not self._exists:
+ return None
+ return copy.deepcopy(self._data)
+
+
+def _get_document_path(client, path):
+ """Convert a path tuple into a full path string.
+
+ Of the form:
+
+ ``projects/{project_id}/databases/{database_id}/...
+ documents/{document_path}``
+
+ Args:
+ client (~.firestore_v1beta1.client.Client): The client that holds
+ configuration details and a GAPIC client object.
+ path (Tuple[str, ...]): The components in a document path.
+
+ Returns:
+ str: The fully-qualified document path.
+ """
+ parts = (client._database_string, "documents") + path
+ return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
+
+
+def _consume_single_get(response_iterator):
+ """Consume a gRPC stream that should contain a single response.
+
+ The stream will correspond to a ``BatchGetDocuments`` request made
+ for a single document.
+
+ Args:
+ response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
+ streaming iterator returned from a ``BatchGetDocuments``
+ request.
+
+ Returns:
+ ~google.cloud.proto.firestore.v1beta1.\
+ firestore_pb2.BatchGetDocumentsResponse: The single "get"
+ response in the batch.
+
+ Raises:
+ ValueError: If anything other than exactly one response is returned.
+ """
+ # Calling ``list()`` consumes the entire iterator.
+ all_responses = list(response_iterator)
+ if len(all_responses) != 1:
+ raise ValueError(
+ "Unexpected response from `BatchGetDocumentsResponse`",
+ all_responses,
+ "Expected only one result",
+ )
+
+ return all_responses[0]
+
+
+def _first_write_result(write_results):
+ """Get first write result from list.
+
+ For cases where ``len(write_results) > 1``, this assumes the writes
+ occurred at the same time (e.g. if an update and transform are sent
+ at the same time).
+
+ Args:
+ write_results (List[google.cloud.proto.firestore.v1beta1.\
+ write_pb2.WriteResult, ...]: The write results from a
+ ``CommitResponse``.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.WriteResult: The
+ lone write result from ``write_results``.
+
+ Raises:
+ ValueError: If there are zero write results. This is likely to
+ **never** occur, since the backend should be stable.
+ """
+ if not write_results:
+ raise ValueError("Expected at least one write result")
+
+ return write_results[0]
+
+
+def _item_to_collection_ref(iterator, item):
+ """Convert collection ID to collection ref.
+
+ Args:
+ iterator (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (str): ID of the collection
+ """
+ return iterator.document.collection(item)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/field_path.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/field_path.py
new file mode 100644
index 000000000..1570aefb5
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/field_path.py
@@ -0,0 +1,386 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for managing / converting field paths to / from strings."""
+
+try:
+ from collections import abc as collections_abc
+except ImportError: # Python 2.7
+ import collections as collections_abc
+
+import re
+
+import six
+
+
+_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data"
+_FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}"
+_FIELD_PATH_WRONG_TYPE = (
+ "The data at {!r} is not a dictionary, so it cannot contain the key {!r}"
+)
+
+_FIELD_PATH_DELIMITER = "."
+_BACKSLASH = "\\"
+_ESCAPED_BACKSLASH = _BACKSLASH * 2
+_BACKTICK = "`"
+_ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK
+
+_SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$")
+_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]")
+PATH_ELEMENT_TOKENS = [
+ ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements
+ ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted
+ ("DOT", r"\."), # separator
+]
+TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS)
+TOKENS_REGEX = re.compile(TOKENS_PATTERN)
+
+
+def _tokenize_field_path(path):
+ """Lex a field path into tokens (including dots).
+
+ Args:
+ path (str): field path to be lexed.
+ Returns:
+ List(str): tokens
+ """
+ pos = 0
+ get_token = TOKENS_REGEX.match
+ match = get_token(path)
+ while match is not None:
+ type_ = match.lastgroup
+ value = match.group(type_)
+ yield value
+ pos = match.end()
+ match = get_token(path, pos)
+ if pos != len(path):
+ raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:]))
+
+
+def split_field_path(path):
+ """Split a field path into valid elements (without dots).
+
+ Args:
+ path (str): field path to be lexed.
+ Returns:
+ List(str): tokens
+ Raises:
+ ValueError: if the path does not match the elements-interspersed-
+ with-dots pattern.
+ """
+ if not path:
+ return []
+
+ elements = []
+ want_dot = False
+
+ for element in _tokenize_field_path(path):
+ if want_dot:
+ if element != ".":
+ raise ValueError("Invalid path: {}".format(path))
+ else:
+ want_dot = False
+ else:
+ if element == ".":
+ raise ValueError("Invalid path: {}".format(path))
+ elements.append(element)
+ want_dot = True
+
+ if not want_dot or not elements:
+ raise ValueError("Invalid path: {}".format(path))
+
+ return elements
+
+
+def parse_field_path(api_repr):
+ """Parse a **field path** from into a list of nested field names.
+
+ See :func:`field_path` for more on **field paths**.
+
+ Args:
+ api_repr (str):
+ The unique Firestore api representation which consists of
+ either simple or UTF-8 field names. It cannot exceed
+ 1500 bytes, and cannot be empty. Simple field names match
+ ``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are
+ escaped by surrounding them with backticks.
+
+ Returns:
+ List[str, ...]: The list of field names in the field path.
+ """
+ # code dredged back up from
+ # https://github.com/googleapis/google-cloud-python/pull/5109/files
+ field_names = []
+ for field_name in split_field_path(api_repr):
+ # non-simple field name
+ if field_name[0] == "`" and field_name[-1] == "`":
+ field_name = field_name[1:-1]
+ field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK)
+ field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH)
+ field_names.append(field_name)
+ return field_names
+
+
+def render_field_path(field_names):
+ """Create a **field path** from a list of nested field names.
+
+ A **field path** is a ``.``-delimited concatenation of the field
+ names. It is used to represent a nested field. For example,
+ in the data
+
+ .. code-block: python
+
+ data = {
+ 'aa': {
+ 'bb': {
+ 'cc': 10,
+ },
+ },
+ }
+
+ the field path ``'aa.bb.cc'`` represents that data stored in
+ ``data['aa']['bb']['cc']``.
+
+ Args:
+ field_names (Iterable[str, ...]): The list of field names.
+
+ Returns:
+ str: The ``.``-delimited field path.
+ """
+ result = []
+
+ for field_name in field_names:
+ match = _SIMPLE_FIELD_NAME.match(field_name)
+ if match and match.group(0) == field_name:
+ result.append(field_name)
+ else:
+ replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace(
+ _BACKTICK, _ESCAPED_BACKTICK
+ )
+ result.append(_BACKTICK + replaced + _BACKTICK)
+
+ return _FIELD_PATH_DELIMITER.join(result)
+
+
+get_field_path = render_field_path # backward-compatibility
+
+
+def get_nested_value(field_path, data):
+ """Get a (potentially nested) value from a dictionary.
+
+ If the data is nested, for example:
+
+ .. code-block:: python
+
+ >>> data
+ {
+ 'top1': {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ },
+ 'top6': b'\x00\x01 foo',
+ }
+
+ a **field path** can be used to access the nested data. For
+ example:
+
+ .. code-block:: python
+
+ >>> get_nested_value('top1', data)
+ {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ }
+ >>> get_nested_value('top1.middle2', data)
+ {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ }
+ >>> get_nested_value('top1.middle2.bottom3', data)
+ 20
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` for
+ more information on **field paths**.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names).
+ data (Dict[str, Any]): The (possibly nested) data.
+
+ Returns:
+ Any: (A copy of) the value stored for the ``field_path``.
+
+ Raises:
+ KeyError: If the ``field_path`` does not match nested data.
+ """
+ field_names = parse_field_path(field_path)
+
+ nested_data = data
+ for index, field_name in enumerate(field_names):
+ if isinstance(nested_data, collections_abc.Mapping):
+ if field_name in nested_data:
+ nested_data = nested_data[field_name]
+ else:
+ if index == 0:
+ msg = _FIELD_PATH_MISSING_TOP.format(field_name)
+ raise KeyError(msg)
+ else:
+ partial = render_field_path(field_names[:index])
+ msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial)
+ raise KeyError(msg)
+ else:
+ partial = render_field_path(field_names[:index])
+ msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name)
+ raise KeyError(msg)
+
+ return nested_data
+
+
+class FieldPath(object):
+ """Field Path object for client use.
+
+ A field path is a sequence of element keys, separated by periods.
+ Each element key can be either a simple identifier, or a full unicode
+ string.
+
+ In the string representation of a field path, non-identifier elements
+ must be quoted using backticks, with internal backticks and backslashes
+ escaped with a backslash.
+
+ Args:
+ parts: (one or more strings)
+ Indicating path of the key to be used.
+ """
+
+ def __init__(self, *parts):
+ for part in parts:
+ if not isinstance(part, six.string_types) or not part:
+ error = "One or more components is not a string or is empty."
+ raise ValueError(error)
+ self.parts = tuple(parts)
+
+ @classmethod
+ def from_api_repr(cls, api_repr):
+ """Factory: create a FieldPath from the string formatted per the API.
+
+ Args:
+ api_repr (str): a string path, with non-identifier elements quoted
+ It cannot exceed 1500 characters, and cannot be empty.
+ Returns:
+ (:class:`FieldPath`) An instance parsed from ``api_repr``.
+ Raises:
+ ValueError if the parsing fails
+ """
+ api_repr = api_repr.strip()
+ if not api_repr:
+ raise ValueError("Field path API representation cannot be empty.")
+ return cls(*parse_field_path(api_repr))
+
+ @classmethod
+ def from_string(cls, path_string):
+ """Factory: create a FieldPath from a unicode string representation.
+
+ This method splits on the character `.` and disallows the
+ characters `~*/[]`. To create a FieldPath whose components have
+ those characters, call the constructor.
+
+ Args:
+ path_string (str): A unicode string which cannot contain
+ `~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty.
+
+ Returns:
+ (:class:`FieldPath`) An instance parsed from ``path_string``.
+ """
+ try:
+ return cls.from_api_repr(path_string)
+ except ValueError:
+ elements = path_string.split(".")
+ for element in elements:
+ if not element:
+ raise ValueError("Empty element")
+ if _LEADING_ALPHA_INVALID.match(element):
+ raise ValueError(
+ "Non-alphanum char in element with leading alpha: {}".format(
+ element
+ )
+ )
+ return FieldPath(*elements)
+
+ def __repr__(self):
+ paths = ""
+ for part in self.parts:
+ paths += "'" + part + "',"
+ paths = paths[:-1]
+ return "FieldPath({})".format(paths)
+
+ def __hash__(self):
+ return hash(self.to_api_repr())
+
+ def __eq__(self, other):
+ if isinstance(other, FieldPath):
+ return self.parts == other.parts
+ return NotImplemented
+
+ def __lt__(self, other):
+ if isinstance(other, FieldPath):
+ return self.parts < other.parts
+ return NotImplemented
+
+ def __add__(self, other):
+ """Adds `other` field path to end of this field path.
+
+ Args:
+ other (~google.cloud.firestore_v1beta1._helpers.FieldPath, str):
+ The field path to add to the end of this `FieldPath`.
+ """
+ if isinstance(other, FieldPath):
+ parts = self.parts + other.parts
+ return FieldPath(*parts)
+ elif isinstance(other, six.string_types):
+ parts = self.parts + FieldPath.from_string(other).parts
+ return FieldPath(*parts)
+ else:
+ return NotImplemented
+
+ def to_api_repr(self):
+ """Render a quoted string representation of the FieldPath
+
+ Returns:
+ (str) Quoted string representation of the path stored
+ within this FieldPath.
+ """
+ return render_field_path(self.parts)
+
+ def eq_or_parent(self, other):
+ """Check whether ``other`` is an ancestor.
+
+ Returns:
+ (bool) True IFF ``other`` is an ancestor or equal to ``self``,
+ else False.
+ """
+ return self.parts[: len(other.parts)] == other.parts[: len(self.parts)]
+
+ def lineage(self):
+ """Return field paths for all parents.
+
+ Returns: Set[:class:`FieldPath`]
+ """
+ indexes = six.moves.range(1, len(self.parts))
+ return {FieldPath(*self.parts[:index]) for index in indexes}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..dc57c21c0
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/enums.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/enums.cpython-36.pyc
new file mode 100644
index 000000000..4432e7476
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/enums.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client.cpython-36.pyc
new file mode 100644
index 000000000..57ca3bf9b
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client_config.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client_config.cpython-36.pyc
new file mode 100644
index 000000000..aaf496fcc
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client_config.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/enums.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/enums.py
new file mode 100644
index 000000000..69b8c5d06
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/enums.py
@@ -0,0 +1,156 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Wrappers for protocol buffer enum types."""
+
+import enum
+
+
+class NullValue(enum.IntEnum):
+ """
+ ``NullValue`` is a singleton enumeration to represent the null value
+ for the ``Value`` type union.
+
+ The JSON representation for ``NullValue`` is JSON ``null``.
+
+ Attributes:
+ NULL_VALUE (int): Null value.
+ """
+
+ NULL_VALUE = 0
+
+
+class DocumentTransform(object):
+ class FieldTransform(object):
+ class ServerValue(enum.IntEnum):
+ """
+ A value that is calculated by the server.
+
+ Attributes:
+ SERVER_VALUE_UNSPECIFIED (int): Unspecified. This value must not be used.
+ REQUEST_TIME (int): The time at which the server processed the request, with millisecond
+ precision.
+ """
+
+ SERVER_VALUE_UNSPECIFIED = 0
+ REQUEST_TIME = 1
+
+
+class StructuredQuery(object):
+ class Direction(enum.IntEnum):
+ """
+ A sort direction.
+
+ Attributes:
+ DIRECTION_UNSPECIFIED (int): Unspecified.
+ ASCENDING (int): Ascending.
+ DESCENDING (int): Descending.
+ """
+
+ DIRECTION_UNSPECIFIED = 0
+ ASCENDING = 1
+ DESCENDING = 2
+
+ class CompositeFilter(object):
+ class Operator(enum.IntEnum):
+ """
+ A composite filter operator.
+
+ Attributes:
+ OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
+ AND (int): The results are required to satisfy each of the combined filters.
+ """
+
+ OPERATOR_UNSPECIFIED = 0
+ AND = 1
+
+ class FieldFilter(object):
+ class Operator(enum.IntEnum):
+ """
+ A field filter operator.
+
+ Attributes:
+ OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
+ LESS_THAN (int): Less than. Requires that the field come first in ``order_by``.
+ LESS_THAN_OR_EQUAL (int): Less than or equal. Requires that the field come first in
+ ``order_by``.
+ GREATER_THAN (int): Greater than. Requires that the field come first in ``order_by``.
+ GREATER_THAN_OR_EQUAL (int): Greater than or equal. Requires that the field come first in
+ ``order_by``.
+ EQUAL (int): Equal.
+ ARRAY_CONTAINS (int): Contains. Requires that the field is an array.
+ IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most
+ 10 values.
+ ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a
+ non-empty ArrayValue with at most 10 values.
+ """
+
+ OPERATOR_UNSPECIFIED = 0
+ LESS_THAN = 1
+ LESS_THAN_OR_EQUAL = 2
+ GREATER_THAN = 3
+ GREATER_THAN_OR_EQUAL = 4
+ EQUAL = 5
+ ARRAY_CONTAINS = 7
+ IN = 8
+ ARRAY_CONTAINS_ANY = 9
+
+ class UnaryFilter(object):
+ class Operator(enum.IntEnum):
+ """
+ A unary operator.
+
+ Attributes:
+ OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
+ IS_NAN (int): Test if a field is equal to NaN.
+ IS_NULL (int): Test if an expression evaluates to Null.
+ """
+
+ OPERATOR_UNSPECIFIED = 0
+ IS_NAN = 2
+ IS_NULL = 3
+
+
+class TargetChange(object):
+ class TargetChangeType(enum.IntEnum):
+ """
+ The type of change.
+
+ Attributes:
+ NO_CHANGE (int): No change has occurred. Used only to send an updated
+ ``resume_token``.
+ ADD (int): The targets have been added.
+ REMOVE (int): The targets have been removed.
+ CURRENT (int): The targets reflect all changes committed before the targets were
+ added to the stream.
+
+ This will be sent after or with a ``read_time`` that is greater than or
+ equal to the time at which the targets were added.
+
+ Listeners can wait for this change if read-after-write semantics are
+ desired.
+ RESET (int): The targets have been reset, and a new initial state for the targets
+ will be returned in subsequent changes.
+
+ After the initial state is complete, ``CURRENT`` will be returned even
+ if the target was previously indicated to be ``CURRENT``.
+ """
+
+ NO_CHANGE = 0
+ ADD = 1
+ REMOVE = 2
+ CURRENT = 3
+ RESET = 4
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/firestore_client.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/firestore_client.py
new file mode 100644
index 000000000..b222ded19
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/firestore_client.py
@@ -0,0 +1,1457 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Accesses the google.firestore.v1beta1 Firestore API."""
+
+import functools
+import pkg_resources
+import warnings
+
+from google.oauth2 import service_account
+import google.api_core.client_options
+import google.api_core.gapic_v1.client_info
+import google.api_core.gapic_v1.config
+import google.api_core.gapic_v1.method
+import google.api_core.gapic_v1.routing_header
+import google.api_core.grpc_helpers
+import google.api_core.page_iterator
+import google.api_core.protobuf_helpers
+import grpc
+
+from google.cloud.firestore_v1beta1.gapic import enums
+from google.cloud.firestore_v1beta1.gapic import firestore_client_config
+from google.cloud.firestore_v1beta1.gapic.transports import firestore_grpc_transport
+from google.cloud.firestore_v1beta1.proto import common_pb2
+from google.cloud.firestore_v1beta1.proto import document_pb2
+from google.cloud.firestore_v1beta1.proto import firestore_pb2
+from google.cloud.firestore_v1beta1.proto import firestore_pb2_grpc
+from google.cloud.firestore_v1beta1.proto import query_pb2
+from google.cloud.firestore_v1beta1.proto import write_pb2
+from google.protobuf import empty_pb2
+from google.protobuf import timestamp_pb2
+
+
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-firestore",
+).version
+
+
+class FirestoreClient(object):
+ """
+ The Cloud Firestore service.
+
+ This service exposes several types of comparable timestamps:
+
+ - ``create_time`` - The time at which a document was created. Changes
+ only when a document is deleted, then re-created. Increases in a
+ strict monotonic fashion.
+ - ``update_time`` - The time at which a document was last updated.
+ Changes every time a document is modified. Does not change when a
+ write results in no modifications. Increases in a strict monotonic
+ fashion.
+ - ``read_time`` - The time at which a particular state was observed.
+ Used to denote a consistent snapshot of the database or the time at
+ which a Document was observed to not exist.
+ - ``commit_time`` - The time at which the writes in a transaction were
+ committed. Any read with an equal or greater ``read_time`` is
+ guaranteed to see the effects of the transaction.
+ """
+
+ SERVICE_ADDRESS = "firestore.googleapis.com:443"
+ """The default address of the service."""
+
+ # The name of the interface for this client. This is the key used to
+ # find the method configuration in the client_config dictionary.
+ _INTERFACE_NAME = "google.firestore.v1beta1.Firestore"
+
+ @classmethod
+ def from_service_account_file(cls, filename, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @classmethod
+ def any_path_path(cls, project, database, document, any_path):
+ """Return a fully-qualified any_path string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/documents/{document}/{any_path=**}",
+ project=project,
+ database=database,
+ document=document,
+ any_path=any_path,
+ )
+
+ @classmethod
+ def database_root_path(cls, project, database):
+ """Return a fully-qualified database_root string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}",
+ project=project,
+ database=database,
+ )
+
+ @classmethod
+ def document_path_path(cls, project, database, document_path):
+ """Return a fully-qualified document_path string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/documents/{document_path=**}",
+ project=project,
+ database=database,
+ document_path=document_path,
+ )
+
+ @classmethod
+ def document_root_path(cls, project, database):
+ """Return a fully-qualified document_root string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/databases/{database}/documents",
+ project=project,
+ database=database,
+ )
+
+ def __init__(
+ self,
+ transport=None,
+ channel=None,
+ credentials=None,
+ client_config=None,
+ client_info=None,
+ client_options=None,
+ ):
+ """Constructor.
+
+ Args:
+ transport (Union[~.FirestoreGrpcTransport,
+ Callable[[~.Credentials, type], ~.FirestoreGrpcTransport]): A transport
+ instance, responsible for actually making the API calls.
+ The default transport uses the gRPC protocol.
+ This argument may also be a callable which returns a
+ transport instance. Callables will be sent the credentials
+ as the first argument and the default transport class as
+ the second argument.
+ channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
+ through which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is mutually exclusive with providing a
+ transport instance to ``transport``; doing so will raise
+ an exception.
+ client_config (dict): DEPRECATED. A dictionary of call options for
+ each method. If not specified, the default configuration is used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ client_options (Union[dict, google.api_core.client_options.ClientOptions]):
+ Client options used to set user options on the client. API Endpoint
+ should be set through client_options.
+ """
+ # Raise deprecation warnings for things we want to go away.
+ if client_config is not None:
+ warnings.warn(
+ "The `client_config` argument is deprecated.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ client_config = firestore_client_config.config
+
+ if channel:
+ warnings.warn(
+ "The `channel` argument is deprecated; use " "`transport` instead.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+
+ api_endpoint = self.SERVICE_ADDRESS
+ if client_options:
+ if type(client_options) == dict:
+ client_options = google.api_core.client_options.from_dict(
+ client_options
+ )
+ if client_options.api_endpoint:
+ api_endpoint = client_options.api_endpoint
+
+ # Instantiate the transport.
+ # The transport is responsible for handling serialization and
+ # deserialization and actually sending data to the service.
+ if transport:
+ if callable(transport):
+ self.transport = transport(
+ credentials=credentials,
+ default_class=firestore_grpc_transport.FirestoreGrpcTransport,
+ address=api_endpoint,
+ )
+ else:
+ if credentials:
+ raise ValueError(
+ "Received both a transport instance and "
+ "credentials; these are mutually exclusive."
+ )
+ self.transport = transport
+ else:
+ self.transport = firestore_grpc_transport.FirestoreGrpcTransport(
+ address=api_endpoint, channel=channel, credentials=credentials,
+ )
+
+ if client_info is None:
+ client_info = google.api_core.gapic_v1.client_info.ClientInfo(
+ gapic_version=_GAPIC_LIBRARY_VERSION,
+ )
+ else:
+ client_info.gapic_version = _GAPIC_LIBRARY_VERSION
+ self._client_info = client_info
+
+ # Parse out the default settings for retry and timeout for each RPC
+ # from the client configuration.
+ # (Ordinarily, these are the defaults specified in the `*_config.py`
+ # file next to this one.)
+ self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
+ client_config["interfaces"][self._INTERFACE_NAME],
+ )
+
+ # Save a dictionary of cached API call functions.
+ # These are the actual callables which invoke the proper
+ # transport methods, wrapped with `wrap_method` to add retry,
+ # timeout, and the like.
+ self._inner_api_calls = {}
+
+ # Service calls
+ def delete_document(
+ self,
+ name,
+ current_document=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Deletes a document.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `name`:
+ >>> name = ''
+ >>>
+ >>> client.delete_document(name)
+
+ Args:
+ name (str): Required. The resource name of the Document to delete. In the
+ format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ current_document (Union[dict, ~google.cloud.firestore_v1beta1.types.Precondition]): An optional precondition on the document.
+ The request will fail if this is set and not met by the target document.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Precondition`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "delete_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "delete_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.delete_document,
+ default_retry=self._method_configs["DeleteDocument"].retry,
+ default_timeout=self._method_configs["DeleteDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.DeleteDocumentRequest(
+ name=name, current_document=current_document,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ self._inner_api_calls["delete_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def batch_get_documents(
+ self,
+ database,
+ documents=None,
+ mask=None,
+ transaction=None,
+ new_transaction=None,
+ read_time=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets multiple documents.
+
+ Documents returned by this method are not guaranteed to be returned in the
+ same order that they were requested.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> for element in client.batch_get_documents(database):
+ ... # process element
+ ... pass
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ documents (list[str]): The names of the documents to retrieve. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ The request will fail if any of the document is not a child resource of
+ the given ``database``. Duplicate names will be elided.
+ mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If a document has a field that is not present in this mask, that field will
+ not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
+ transaction (bytes): Reads documents in a transaction.
+ new_transaction (Union[dict, ~google.cloud.firestore_v1beta1.types.TransactionOptions]): Starts a new transaction and reads the documents.
+ Defaults to a read-only transaction.
+ The new transaction ID will be returned as the first response in the
+ stream.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.TransactionOptions`
+ read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads documents as they were at the given time.
+ This may not be older than 60 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1beta1.types.BatchGetDocumentsResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "batch_get_documents" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "batch_get_documents"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.batch_get_documents,
+ default_retry=self._method_configs["BatchGetDocuments"].retry,
+ default_timeout=self._method_configs["BatchGetDocuments"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+
+ request = firestore_pb2.BatchGetDocumentsRequest(
+ database=database,
+ documents=documents,
+ mask=mask,
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["batch_get_documents"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def begin_transaction(
+ self,
+ database,
+ options_=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Starts a new transaction.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> response = client.begin_transaction(database)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ options_ (Union[dict, ~google.cloud.firestore_v1beta1.types.TransactionOptions]): The options for the transaction.
+ Defaults to a read-write transaction.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.TransactionOptions`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1beta1.types.BeginTransactionResponse` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "begin_transaction" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "begin_transaction"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.begin_transaction,
+ default_retry=self._method_configs["BeginTransaction"].retry,
+ default_timeout=self._method_configs["BeginTransaction"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.BeginTransactionRequest(
+ database=database, options=options_,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["begin_transaction"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def rollback(
+ self,
+ database,
+ transaction,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Rolls back a transaction.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> # TODO: Initialize `transaction`:
+ >>> transaction = b''
+ >>>
+ >>> client.rollback(database, transaction)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ transaction (bytes): Required. The transaction to roll back.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "rollback" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "rollback"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.rollback,
+ default_retry=self._method_configs["Rollback"].retry,
+ default_timeout=self._method_configs["Rollback"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.RollbackRequest(
+ database=database, transaction=transaction,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ self._inner_api_calls["rollback"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def run_query(
+ self,
+ parent,
+ structured_query=None,
+ transaction=None,
+ new_transaction=None,
+ read_time=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Runs a query.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> for element in client.run_query(parent):
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example: ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ structured_query (Union[dict, ~google.cloud.firestore_v1beta1.types.StructuredQuery]): A structured query.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.StructuredQuery`
+ transaction (bytes): Reads documents in a transaction.
+ new_transaction (Union[dict, ~google.cloud.firestore_v1beta1.types.TransactionOptions]): Starts a new transaction and reads the documents.
+ Defaults to a read-only transaction.
+ The new transaction ID will be returned as the first response in the
+ stream.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.TransactionOptions`
+ read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads documents as they were at the given time.
+ This may not be older than 60 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1beta1.types.RunQueryResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "run_query" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "run_query"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.run_query,
+ default_retry=self._method_configs["RunQuery"].retry,
+ default_timeout=self._method_configs["RunQuery"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query,)
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+
+ request = firestore_pb2.RunQueryRequest(
+ parent=parent,
+ structured_query=structured_query,
+ transaction=transaction,
+ new_transaction=new_transaction,
+ read_time=read_time,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["run_query"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def write(
+ self,
+ requests,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Streams batches of document updates and deletes, in order.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>> request = {'database': database}
+ >>>
+ >>> requests = [request]
+ >>> for element in client.write(requests):
+ ... # process element
+ ... pass
+
+ Args:
+ requests (iterator[dict|google.cloud.firestore_v1beta1.proto.firestore_pb2.WriteRequest]): The input objects. If a dict is provided, it must be of the
+ same form as the protobuf message :class:`~google.cloud.firestore_v1beta1.types.WriteRequest`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1beta1.types.WriteResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "write" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "write"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.write,
+ default_retry=self._method_configs["Write"].retry,
+ default_timeout=self._method_configs["Write"].timeout,
+ client_info=self._client_info,
+ )
+
+ return self._inner_api_calls["write"](
+ requests, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def listen(
+ self,
+ requests,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Listens to changes.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>> request = {'database': database}
+ >>>
+ >>> requests = [request]
+ >>> for element in client.listen(requests):
+ ... # process element
+ ... pass
+
+ Args:
+ requests (iterator[dict|google.cloud.firestore_v1beta1.proto.firestore_pb2.ListenRequest]): The input objects. If a dict is provided, it must be of the
+ same form as the protobuf message :class:`~google.cloud.firestore_v1beta1.types.ListenRequest`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ Iterable[~google.cloud.firestore_v1beta1.types.ListenResponse].
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "listen" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "listen"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.listen,
+ default_retry=self._method_configs["Listen"].retry,
+ default_timeout=self._method_configs["Listen"].timeout,
+ client_info=self._client_info,
+ )
+
+ return self._inner_api_calls["listen"](
+ requests, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def list_collection_ids(
+ self,
+ parent,
+ page_size=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists all the collection IDs underneath a document.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.list_collection_ids(parent):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.list_collection_ids(parent).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent document. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`str` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "list_collection_ids" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "list_collection_ids"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.list_collection_ids,
+ default_retry=self._method_configs["ListCollectionIds"].retry,
+ default_timeout=self._method_configs["ListCollectionIds"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.ListCollectionIdsRequest(
+ parent=parent, page_size=page_size,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["list_collection_ids"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="collection_ids",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
+
+ def get_document(
+ self,
+ name,
+ mask=None,
+ transaction=None,
+ read_time=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets a single document.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `name`:
+ >>> name = ''
+ >>>
+ >>> response = client.get_document(name)
+
+ Args:
+ name (str): Required. The resource name of the Document to get. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If the document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
+ transaction (bytes): Reads the document in a transaction.
+ read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads the version of the document at the given time.
+ This may not be older than 60 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1beta1.types.Document` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "get_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "get_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.get_document,
+ default_retry=self._method_configs["GetDocument"].retry,
+ default_timeout=self._method_configs["GetDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction, read_time=read_time,
+ )
+
+ request = firestore_pb2.GetDocumentRequest(
+ name=name, mask=mask, transaction=transaction, read_time=read_time,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["get_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def list_documents(
+ self,
+ parent,
+ collection_id,
+ page_size=None,
+ order_by=None,
+ mask=None,
+ transaction=None,
+ read_time=None,
+ show_missing=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists documents.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # TODO: Initialize `collection_id`:
+ >>> collection_id = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for element in client.list_documents(parent, collection_id):
+ ... # process element
+ ... pass
+ >>>
+ >>>
+ >>> # Alternatively:
+ >>>
+ >>> # Iterate over results one page at a time
+ >>> for page in client.list_documents(parent, collection_id).pages:
+ ... for element in page:
+ ... # process element
+ ... pass
+
+ Args:
+ parent (str): Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example: ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
+ example: ``chatrooms`` or ``messages``.
+ page_size (int): The maximum number of resources contained in the
+ underlying API response. If page streaming is performed per-
+ resource, this parameter does not affect the return value. If page
+ streaming is performed per-page, this determines the maximum number
+ of resources in a page.
+ order_by (str): The order to sort results by. For example: ``priority desc, name``.
+ mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If a document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
+ transaction (bytes): Reads documents in a transaction.
+ read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads documents as they were at the given time.
+ This may not be older than 60 seconds.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
+ show_missing (bool): If the list should show missing documents. A missing document is a
+ document that does not exist but has sub-documents. These documents will
+ be returned with a key but will not have fields,
+ ``Document.create_time``, or ``Document.update_time`` set.
+
+ Requests with ``show_missing`` may not specify ``where`` or
+ ``order_by``.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.api_core.page_iterator.PageIterator` instance.
+ An iterable of :class:`~google.cloud.firestore_v1beta1.types.Document` instances.
+ You can also iterate over the pages of the response
+ using its `pages` property.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "list_documents" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "list_documents"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.list_documents,
+ default_retry=self._method_configs["ListDocuments"].retry,
+ default_timeout=self._method_configs["ListDocuments"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ transaction=transaction, read_time=read_time,
+ )
+
+ request = firestore_pb2.ListDocumentsRequest(
+ parent=parent,
+ collection_id=collection_id,
+ page_size=page_size,
+ order_by=order_by,
+ mask=mask,
+ transaction=transaction,
+ read_time=read_time,
+ show_missing=show_missing,
+ )
+ iterator = google.api_core.page_iterator.GRPCIterator(
+ client=None,
+ method=functools.partial(
+ self._inner_api_calls["list_documents"],
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ ),
+ request=request,
+ items_field="documents",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+ return iterator
+
+ def create_document(
+ self,
+ parent,
+ collection_id,
+ document_id,
+ document,
+ mask=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Creates a new document.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `parent`:
+ >>> parent = ''
+ >>>
+ >>> # TODO: Initialize `collection_id`:
+ >>> collection_id = ''
+ >>>
+ >>> # TODO: Initialize `document`:
+ >>> document = {}
+ >>>
+ >>> response = client.create_document(parent, collection_id, document)
+
+ Args:
+ parent (str): Required. The parent resource. For example:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}``
+ collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
+ example: ``chatrooms``.
+ document (Union[dict, ~google.cloud.firestore_v1beta1.types.Document]): Required. The document to create. ``name`` must not be set.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Document`
+ document_id (str): The client-assigned document ID to use for this document.
+
+ Optional. If not specified, an ID will be assigned by the service.
+ mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If the document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1beta1.types.Document` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "create_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "create_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.create_document,
+ default_retry=self._method_configs["CreateDocument"].retry,
+ default_timeout=self._method_configs["CreateDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.CreateDocumentRequest(
+ parent=parent,
+ collection_id=collection_id,
+ document=document,
+ document_id=document_id,
+ mask=mask,
+ )
+ return self._inner_api_calls["create_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def update_document(
+ self,
+ document,
+ update_mask=None,
+ mask=None,
+ current_document=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Updates or inserts a document.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `document`:
+ >>> document = {}
+ >>>
+ >>> response = client.update_document(document)
+
+ Args:
+ document (Union[dict, ~google.cloud.firestore_v1beta1.types.Document]): Required. The updated document.
+ Creates the document if it does not already exist.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Document`
+ update_mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to update.
+ None of the field paths in the mask may contain a reserved name.
+
+ If the document exists on the server and has fields not referenced in the
+ mask, they are left unchanged.
+ Fields referenced in the mask, but not present in the input document, are
+ deleted from the document on the server.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
+ mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
+
+ If the document has a field that is not present in this mask, that field
+ will not be returned in the response.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
+ current_document (Union[dict, ~google.cloud.firestore_v1beta1.types.Precondition]): An optional precondition on the document.
+ The request will fail if this is set and not met by the target document.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Precondition`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1beta1.types.Document` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "update_document" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "update_document"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.update_document,
+ default_retry=self._method_configs["UpdateDocument"].retry,
+ default_timeout=self._method_configs["UpdateDocument"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.UpdateDocumentRequest(
+ document=document,
+ update_mask=update_mask,
+ mask=mask,
+ current_document=current_document,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("document.name", document.name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["update_document"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def commit(
+ self,
+ database,
+ writes=None,
+ transaction=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Commits a transaction, while optionally updating documents.
+
+ Example:
+ >>> from google.cloud import firestore_v1beta1
+ >>>
+ >>> client = firestore_v1beta1.FirestoreClient()
+ >>>
+ >>> # TODO: Initialize `database`:
+ >>> database = ''
+ >>>
+ >>> response = client.commit(database)
+
+ Args:
+ database (str): Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ writes (list[Union[dict, ~google.cloud.firestore_v1beta1.types.Write]]): The writes to apply.
+
+ Always executed atomically and in order.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.firestore_v1beta1.types.Write`
+ transaction (bytes): If set, applies all writes in this transaction, and commits it.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.firestore_v1beta1.types.CommitResponse` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "commit" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "commit"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.commit,
+ default_retry=self._method_configs["Commit"].retry,
+ default_timeout=self._method_configs["Commit"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = firestore_pb2.CommitRequest(
+ database=database, writes=writes, transaction=transaction,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("database", database)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["commit"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py
new file mode 100644
index 000000000..46067cef5
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py
@@ -0,0 +1,97 @@
+config = {
+ "interfaces": {
+ "google.firestore.v1beta1.Firestore": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 20000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 20000,
+ "total_timeout_millis": 600000,
+ },
+ "streaming": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 600000,
+ },
+ },
+ "methods": {
+ "DeleteDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "BatchGetDocuments": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "streaming",
+ },
+ "BeginTransaction": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "Rollback": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "RunQuery": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "streaming",
+ },
+ "Write": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "streaming",
+ },
+ "Listen": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "streaming",
+ },
+ "ListCollectionIds": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "GetDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "ListDocuments": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "CreateDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "UpdateDocument": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ "Commit": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "non_idempotent",
+ "retry_params_name": "default",
+ },
+ },
+ }
+ }
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..4fdedb1e9
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc
new file mode 100644
index 000000000..65c224c1d
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py
new file mode 100644
index 000000000..c13f53332
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py
@@ -0,0 +1,283 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import google.api_core.grpc_helpers
+
+from google.cloud.firestore_v1beta1.proto import firestore_pb2_grpc
+
+
+class FirestoreGrpcTransport(object):
+ """gRPC transport class providing stubs for
+ google.firestore.v1beta1 Firestore API.
+
+ The transport provides access to the raw gRPC stubs,
+ which can be used to take advantage of advanced
+ features of gRPC.
+ """
+
+ # The scopes needed to make gRPC calls to all of the methods defined
+ # in this service.
+ _OAUTH_SCOPES = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+
+ def __init__(
+ self, channel=None, credentials=None, address="firestore.googleapis.com:443"
+ ):
+ """Instantiate the transport class.
+
+ Args:
+ channel (grpc.Channel): A ``Channel`` instance through
+ which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ address (str): The address where the service is hosted.
+ """
+ # If both `channel` and `credentials` are specified, raise an
+ # exception (channels come with credentials baked in already).
+ if channel is not None and credentials is not None:
+ raise ValueError(
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
+ )
+
+ # Create the channel.
+ if channel is None:
+ channel = self.create_channel(
+ address=address,
+ credentials=credentials,
+ options={
+ "grpc.max_send_message_length": -1,
+ "grpc.max_receive_message_length": -1,
+ }.items(),
+ )
+
+ self._channel = channel
+
+ # gRPC uses objects called "stubs" that are bound to the
+ # channel and provide a basic method for each RPC.
+ self._stubs = {
+ "firestore_stub": firestore_pb2_grpc.FirestoreStub(channel),
+ }
+
+ @classmethod
+ def create_channel(
+ cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
+ ):
+ """Create and return a gRPC channel object.
+
+ Args:
+ address (str): The host for the channel to use.
+ credentials (~.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ kwargs (dict): Keyword arguments, which are passed to the
+ channel creation.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return google.api_core.grpc_helpers.create_channel(
+ address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
+ )
+
+ @property
+ def channel(self):
+ """The gRPC channel used by the transport.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return self._channel
+
+ @property
+ def delete_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.delete_document`.
+
+ Deletes a document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].DeleteDocument
+
+ @property
+ def batch_get_documents(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.batch_get_documents`.
+
+ Gets multiple documents.
+
+ Documents returned by this method are not guaranteed to be returned in the
+ same order that they were requested.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].BatchGetDocuments
+
+ @property
+ def begin_transaction(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.begin_transaction`.
+
+ Starts a new transaction.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].BeginTransaction
+
+ @property
+ def rollback(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.rollback`.
+
+ Rolls back a transaction.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Rollback
+
+ @property
+ def run_query(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.run_query`.
+
+ Runs a query.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].RunQuery
+
+ @property
+ def write(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.write`.
+
+ Streams batches of document updates and deletes, in order.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Write
+
+ @property
+ def listen(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.listen`.
+
+ Listens to changes.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Listen
+
+ @property
+ def list_collection_ids(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.list_collection_ids`.
+
+ Lists all the collection IDs underneath a document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].ListCollectionIds
+
+ @property
+ def get_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.get_document`.
+
+ Gets a single document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].GetDocument
+
+ @property
+ def list_documents(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.list_documents`.
+
+ Lists documents.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].ListDocuments
+
+ @property
+ def create_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.create_document`.
+
+ Creates a new document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].CreateDocument
+
+ @property
+ def update_document(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.update_document`.
+
+ Updates or inserts a document.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].UpdateDocument
+
+ @property
+ def commit(self):
+ """Return the gRPC stub for :meth:`FirestoreClient.commit`.
+
+ Commits a transaction, while optionally updating documents.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["firestore_stub"].Commit
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/order.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/order.py
new file mode 100644
index 000000000..79207f530
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/order.py
@@ -0,0 +1,207 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import Enum
+from google.cloud.firestore_v1beta1._helpers import decode_value
+import math
+
+
+class TypeOrder(Enum):
+ # NOTE: This order is defined by the backend and cannot be changed.
+ NULL = 0
+ BOOLEAN = 1
+ NUMBER = 2
+ TIMESTAMP = 3
+ STRING = 4
+ BLOB = 5
+ REF = 6
+ GEO_POINT = 7
+ ARRAY = 8
+ OBJECT = 9
+
+ @staticmethod
+ def from_value(value):
+ v = value.WhichOneof("value_type")
+
+ lut = {
+ "null_value": TypeOrder.NULL,
+ "boolean_value": TypeOrder.BOOLEAN,
+ "integer_value": TypeOrder.NUMBER,
+ "double_value": TypeOrder.NUMBER,
+ "timestamp_value": TypeOrder.TIMESTAMP,
+ "string_value": TypeOrder.STRING,
+ "bytes_value": TypeOrder.BLOB,
+ "reference_value": TypeOrder.REF,
+ "geo_point_value": TypeOrder.GEO_POINT,
+ "array_value": TypeOrder.ARRAY,
+ "map_value": TypeOrder.OBJECT,
+ }
+
+ if v not in lut:
+ raise ValueError("Could not detect value type for " + v)
+ return lut[v]
+
+
+class Order(object):
+ """
+ Order implements the ordering semantics of the backend.
+ """
+
+ @classmethod
+ def compare(cls, left, right):
+ """
+ Main comparison function for all Firestore types.
+ @return -1 is left < right, 0 if left == right, otherwise 1
+ """
+ # First compare the types.
+ leftType = TypeOrder.from_value(left).value
+ rightType = TypeOrder.from_value(right).value
+
+ if leftType != rightType:
+ if leftType < rightType:
+ return -1
+ return 1
+
+ value_type = left.WhichOneof("value_type")
+
+ if value_type == "null_value":
+ return 0 # nulls are all equal
+ elif value_type == "boolean_value":
+ return cls._compare_to(left.boolean_value, right.boolean_value)
+ elif value_type == "integer_value":
+ return cls.compare_numbers(left, right)
+ elif value_type == "double_value":
+ return cls.compare_numbers(left, right)
+ elif value_type == "timestamp_value":
+ return cls.compare_timestamps(left, right)
+ elif value_type == "string_value":
+ return cls._compare_to(left.string_value, right.string_value)
+ elif value_type == "bytes_value":
+ return cls.compare_blobs(left, right)
+ elif value_type == "reference_value":
+ return cls.compare_resource_paths(left, right)
+ elif value_type == "geo_point_value":
+ return cls.compare_geo_points(left, right)
+ elif value_type == "array_value":
+ return cls.compare_arrays(left, right)
+ elif value_type == "map_value":
+ return cls.compare_objects(left, right)
+ else:
+ raise ValueError("Unknown ``value_type``", str(value_type))
+
+ @staticmethod
+ def compare_blobs(left, right):
+ left_bytes = left.bytes_value
+ right_bytes = right.bytes_value
+
+ return Order._compare_to(left_bytes, right_bytes)
+
+ @staticmethod
+ def compare_timestamps(left, right):
+ left = left.timestamp_value
+ right = right.timestamp_value
+
+ seconds = Order._compare_to(left.seconds or 0, right.seconds or 0)
+ if seconds != 0:
+ return seconds
+
+ return Order._compare_to(left.nanos or 0, right.nanos or 0)
+
+ @staticmethod
+ def compare_geo_points(left, right):
+ left_value = decode_value(left, None)
+ right_value = decode_value(right, None)
+ cmp = (left_value.latitude > right_value.latitude) - (
+ left_value.latitude < right_value.latitude
+ )
+
+ if cmp != 0:
+ return cmp
+ return (left_value.longitude > right_value.longitude) - (
+ left_value.longitude < right_value.longitude
+ )
+
+ @staticmethod
+ def compare_resource_paths(left, right):
+ left = left.reference_value
+ right = right.reference_value
+
+ left_segments = left.split("/")
+ right_segments = right.split("/")
+ shorter = min(len(left_segments), len(right_segments))
+ # compare segments
+ for i in range(shorter):
+ if left_segments[i] < right_segments[i]:
+ return -1
+ if left_segments[i] > right_segments[i]:
+ return 1
+
+ left_length = len(left)
+ right_length = len(right)
+ return (left_length > right_length) - (left_length < right_length)
+
+ @staticmethod
+ def compare_arrays(left, right):
+ l_values = left.array_value.values
+ r_values = right.array_value.values
+
+ length = min(len(l_values), len(r_values))
+ for i in range(length):
+ cmp = Order.compare(l_values[i], r_values[i])
+ if cmp != 0:
+ return cmp
+
+ return Order._compare_to(len(l_values), len(r_values))
+
+ @staticmethod
+ def compare_objects(left, right):
+ left_fields = left.map_value.fields
+ right_fields = right.map_value.fields
+
+ for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)):
+ keyCompare = Order._compare_to(left_key, right_key)
+ if keyCompare != 0:
+ return keyCompare
+
+ value_compare = Order.compare(
+ left_fields[left_key], right_fields[right_key]
+ )
+ if value_compare != 0:
+ return value_compare
+
+ return Order._compare_to(len(left_fields), len(right_fields))
+
+ @staticmethod
+ def compare_numbers(left, right):
+ left_value = decode_value(left, None)
+ right_value = decode_value(right, None)
+ return Order.compare_doubles(left_value, right_value)
+
+ @staticmethod
+ def compare_doubles(left, right):
+ if math.isnan(left):
+ if math.isnan(right):
+ return 0
+ return -1
+ if math.isnan(right):
+ return 1
+
+ return Order._compare_to(left, right)
+
+ @staticmethod
+ def _compare_to(left, right):
+ # We can't just use cmp(left, right) because cmp doesn't exist
+ # in Python 3, so this is an equivalent suggested by
+ # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
+ return (left > right) - (left < right)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..d1d91cd85
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2.cpython-36.pyc
new file mode 100644
index 000000000..7a4951aef
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..332fb7609
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2.cpython-36.pyc
new file mode 100644
index 000000000..da18eebc1
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..8ee5d494c
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2.cpython-36.pyc
new file mode 100644
index 000000000..9d365df6f
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..24ff1b5fc
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2.cpython-36.pyc
new file mode 100644
index 000000000..8f564063f
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..33d3f82c3
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2.cpython-36.pyc
new file mode 100644
index 000000000..2e11d63af
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..d69cf7460
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/test_v1beta1_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/test_v1beta1_pb2.cpython-36.pyc
new file mode 100644
index 000000000..bd6dc6502
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/test_v1beta1_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2.cpython-36.pyc
new file mode 100644
index 000000000..a11cd55bc
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..76d153c12
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__init__.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..a5242dc46
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2.cpython-36.pyc
new file mode 100644
index 000000000..ffc327ec4
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..29f8f93e6
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2.cpython-36.pyc
new file mode 100644
index 000000000..7e3447595
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2_grpc.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2_grpc.cpython-36.pyc
new file mode 100644
index 000000000..055723dd6
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2_grpc.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py
new file mode 100644
index 000000000..9bb7f6553
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py
@@ -0,0 +1,1343 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.cloud.firestore_v1beta1.proto.admin import (
+ index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2,
+)
+from google.longrunning import (
+ operations_pb2 as google_dot_longrunning_dot_operations__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto",
+ package="google.firestore.admin.v1beta1",
+ syntax="proto3",
+ serialized_pb=_b(
+ '\n@google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x36google/cloud/firestore_v1beta1/proto/admin/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x03\n\x16IndexOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05index\x18\x03 \x01(\t\x12\\\n\x0eoperation_type\x18\x04 \x01(\x0e\x32\x44.google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType\x12\x11\n\tcancelled\x18\x05 \x01(\x08\x12\x43\n\x11\x64ocument_progress\x18\x06 \x01(\x0b\x32(.google.firestore.admin.v1beta1.Progress"C\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x43REATING_INDEX\x10\x01":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03"Z\n\x12\x43reateIndexRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x34\n\x05index\x18\x02 \x01(\x0b\x32%.google.firestore.admin.v1beta1.Index"\x1f\n\x0fGetIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"[\n\x12ListIndexesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t""\n\x12\x44\x65leteIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"f\n\x13ListIndexesResponse\x12\x36\n\x07indexes\x18\x01 \x03(\x0b\x32%.google.firestore.admin.v1beta1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x9c\x05\n\x0e\x46irestoreAdmin\x12\xa1\x01\n\x0b\x43reateIndex\x12\x32.google.firestore.admin.v1beta1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"?\x82\xd3\xe4\x93\x02\x39"0/v1beta1/{parent=projects/*/databases/*}/indexes:\x05index\x12\xb0\x01\n\x0bListIndexes\x12\x32.google.firestore.admin.v1beta1.ListIndexesRequest\x1a\x33.google.firestore.admin.v1beta1.ListIndexesResponse"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{parent=projects/*/databases/*}/indexes\x12\x9c\x01\n\x08GetIndex\x12/.google.firestore.admin.v1beta1.GetIndexRequest\x1a%.google.firestore.admin.v1beta1.Index"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{name=projects/*/databases/*/indexes/*}\x12\x93\x01\n\x0b\x44\x65leteIndex\x12\x32.google.firestore.admin.v1beta1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\x32*0/v1beta1/{name=projects/*/databases/*/indexes/*}B\xae\x01\n"com.google.firestore.admin.v1beta1B\x13\x46irestoreAdminProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3'
+ ),
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.DESCRIPTOR,
+ google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_INDEXOPERATIONMETADATA_OPERATIONTYPE = _descriptor.EnumDescriptor(
+ name="OperationType",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="OPERATION_TYPE_UNSPECIFIED",
+ index=0,
+ number=0,
+ options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CREATING_INDEX", index=1, number=1, options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=603,
+ serialized_end=670,
+)
+_sym_db.RegisterEnumDescriptor(_INDEXOPERATIONMETADATA_OPERATIONTYPE)
+
+
+_INDEXOPERATIONMETADATA = _descriptor.Descriptor(
+ name="IndexOperationMetadata",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="start_time",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.start_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_time",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.end_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.index",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="operation_type",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.operation_type",
+ index=3,
+ number=4,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cancelled",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.cancelled",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document_progress",
+ full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.document_progress",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_INDEXOPERATIONMETADATA_OPERATIONTYPE],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=286,
+ serialized_end=670,
+)
+
+
+_PROGRESS = _descriptor.Descriptor(
+ name="Progress",
+ full_name="google.firestore.admin.v1beta1.Progress",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="work_completed",
+ full_name="google.firestore.admin.v1beta1.Progress.work_completed",
+ index=0,
+ number=1,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="work_estimated",
+ full_name="google.firestore.admin.v1beta1.Progress.work_estimated",
+ index=1,
+ number=2,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=672,
+ serialized_end=730,
+)
+
+
+_CREATEINDEXREQUEST = _descriptor.Descriptor(
+ name="CreateIndexRequest",
+ full_name="google.firestore.admin.v1beta1.CreateIndexRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.admin.v1beta1.CreateIndexRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="index",
+ full_name="google.firestore.admin.v1beta1.CreateIndexRequest.index",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=732,
+ serialized_end=822,
+)
+
+
+_GETINDEXREQUEST = _descriptor.Descriptor(
+ name="GetIndexRequest",
+ full_name="google.firestore.admin.v1beta1.GetIndexRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1beta1.GetIndexRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=824,
+ serialized_end=855,
+)
+
+
+_LISTINDEXESREQUEST = _descriptor.Descriptor(
+ name="ListIndexesRequest",
+ full_name="google.firestore.admin.v1beta1.ListIndexesRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.admin.v1beta1.ListIndexesRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="filter",
+ full_name="google.firestore.admin.v1beta1.ListIndexesRequest.filter",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_size",
+ full_name="google.firestore.admin.v1beta1.ListIndexesRequest.page_size",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_token",
+ full_name="google.firestore.admin.v1beta1.ListIndexesRequest.page_token",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=857,
+ serialized_end=948,
+)
+
+
+_DELETEINDEXREQUEST = _descriptor.Descriptor(
+ name="DeleteIndexRequest",
+ full_name="google.firestore.admin.v1beta1.DeleteIndexRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1beta1.DeleteIndexRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=950,
+ serialized_end=984,
+)
+
+
+_LISTINDEXESRESPONSE = _descriptor.Descriptor(
+ name="ListIndexesResponse",
+ full_name="google.firestore.admin.v1beta1.ListIndexesResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="indexes",
+ full_name="google.firestore.admin.v1beta1.ListIndexesResponse.indexes",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="next_page_token",
+ full_name="google.firestore.admin.v1beta1.ListIndexesResponse.next_page_token",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=986,
+ serialized_end=1088,
+)
+
+_INDEXOPERATIONMETADATA.fields_by_name[
+ "start_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_INDEXOPERATIONMETADATA.fields_by_name[
+ "end_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_INDEXOPERATIONMETADATA.fields_by_name[
+ "operation_type"
+].enum_type = _INDEXOPERATIONMETADATA_OPERATIONTYPE
+_INDEXOPERATIONMETADATA.fields_by_name["document_progress"].message_type = _PROGRESS
+_INDEXOPERATIONMETADATA_OPERATIONTYPE.containing_type = _INDEXOPERATIONMETADATA
+_CREATEINDEXREQUEST.fields_by_name[
+ "index"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX
+)
+_LISTINDEXESRESPONSE.fields_by_name[
+ "indexes"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX
+)
+DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA
+DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS
+DESCRIPTOR.message_types_by_name["CreateIndexRequest"] = _CREATEINDEXREQUEST
+DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST
+DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST
+DESCRIPTOR.message_types_by_name["DeleteIndexRequest"] = _DELETEINDEXREQUEST
+DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+IndexOperationMetadata = _reflection.GeneratedProtocolMessageType(
+ "IndexOperationMetadata",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_INDEXOPERATIONMETADATA,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""Metadata for index operations. This metadata populates the metadata
+ field of [google.longrunning.Operation][google.longrunning.Operation].
+
+
+ Attributes:
+ start_time:
+ The time that work began on the operation.
+ end_time:
+ The time the operation ended, either successfully or
+ otherwise. Unset if the operation is still active.
+ index:
+ The index resource that this operation is acting on. For
+ example: ``projects/{project_id}/databases/{database_id}/index
+ es/{index_id}``
+ operation_type:
+ The type of index operation.
+ cancelled:
+ True if the [google.longrunning.Operation] was cancelled. If
+ the cancellation is in progress, cancelled will be true but [g
+ oogle.longrunning.Operation.done][google.longrunning.Operation
+ .done] will be false.
+ document_progress:
+ Progress of the existing operation, measured in number of
+ documents.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexOperationMetadata)
+ ),
+)
+_sym_db.RegisterMessage(IndexOperationMetadata)
+
+Progress = _reflection.GeneratedProtocolMessageType(
+ "Progress",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_PROGRESS,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""Measures the progress of a particular metric.
+
+
+ Attributes:
+ work_completed:
+ An estimate of how much work has been completed. Note that
+ this may be greater than ``work_estimated``.
+ work_estimated:
+ An estimate of how much work needs to be performed. Zero if
+ the work estimate is unavailable. May change as work
+ progresses.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Progress)
+ ),
+)
+_sym_db.RegisterMessage(Progress)
+
+CreateIndexRequest = _reflection.GeneratedProtocolMessageType(
+ "CreateIndexRequest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CREATEINDEXREQUEST,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""The request for
+ [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+
+
+ Attributes:
+ parent:
+ The name of the database this index will apply to. For
+ example: ``projects/{project_id}/databases/{database_id}``
+ index:
+ The index to create. The name and state should not be
+ specified. Certain single field indexes cannot be created or
+ deleted.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.CreateIndexRequest)
+ ),
+)
+_sym_db.RegisterMessage(CreateIndexRequest)
+
+GetIndexRequest = _reflection.GeneratedProtocolMessageType(
+ "GetIndexRequest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_GETINDEXREQUEST,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""The request for
+ [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex].
+
+
+ Attributes:
+ name:
+ The name of the index. For example: ``projects/{project_id}/da
+ tabases/{database_id}/indexes/{index_id}``
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.GetIndexRequest)
+ ),
+)
+_sym_db.RegisterMessage(GetIndexRequest)
+
+ListIndexesRequest = _reflection.GeneratedProtocolMessageType(
+ "ListIndexesRequest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_LISTINDEXESREQUEST,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""The request for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
+
+
+ Attributes:
+ parent:
+ The database name. For example:
+ ``projects/{project_id}/databases/{database_id}``
+ page_size:
+ The standard List page size.
+ page_token:
+ The standard List page token.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesRequest)
+ ),
+)
+_sym_db.RegisterMessage(ListIndexesRequest)
+
+DeleteIndexRequest = _reflection.GeneratedProtocolMessageType(
+ "DeleteIndexRequest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DELETEINDEXREQUEST,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""The request for
+ [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex].
+
+
+ Attributes:
+ name:
+ The index name. For example: ``projects/{project_id}/databases
+ /{database_id}/indexes/{index_id}``
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.DeleteIndexRequest)
+ ),
+)
+_sym_db.RegisterMessage(DeleteIndexRequest)
+
+ListIndexesResponse = _reflection.GeneratedProtocolMessageType(
+ "ListIndexesResponse",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_LISTINDEXESRESPONSE,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
+ __doc__="""The response for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
+
+
+ Attributes:
+ indexes:
+ The indexes.
+ next_page_token:
+ The standard List next-page token.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesResponse)
+ ),
+)
+_sym_db.RegisterMessage(ListIndexesResponse)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(
+ descriptor_pb2.FileOptions(),
+ _b(
+ '\n"com.google.firestore.admin.v1beta1B\023FirestoreAdminProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1'
+ ),
+)
+
+_FIRESTOREADMIN = _descriptor.ServiceDescriptor(
+ name="FirestoreAdmin",
+ full_name="google.firestore.admin.v1beta1.FirestoreAdmin",
+ file=DESCRIPTOR,
+ index=0,
+ options=None,
+ serialized_start=1091,
+ serialized_end=1759,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="CreateIndex",
+ full_name="google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex",
+ index=0,
+ containing_service=None,
+ input_type=_CREATEINDEXREQUEST,
+ output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
+ options=_descriptor._ParseOptions(
+ descriptor_pb2.MethodOptions(),
+ _b(
+ '\202\323\344\223\0029"0/v1beta1/{parent=projects/*/databases/*}/indexes:\005index'
+ ),
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="ListIndexes",
+ full_name="google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes",
+ index=1,
+ containing_service=None,
+ input_type=_LISTINDEXESREQUEST,
+ output_type=_LISTINDEXESRESPONSE,
+ options=_descriptor._ParseOptions(
+ descriptor_pb2.MethodOptions(),
+ _b(
+ "\202\323\344\223\0022\0220/v1beta1/{parent=projects/*/databases/*}/indexes"
+ ),
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="GetIndex",
+ full_name="google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex",
+ index=2,
+ containing_service=None,
+ input_type=_GETINDEXREQUEST,
+ output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX,
+ options=_descriptor._ParseOptions(
+ descriptor_pb2.MethodOptions(),
+ _b(
+ "\202\323\344\223\0022\0220/v1beta1/{name=projects/*/databases/*/indexes/*}"
+ ),
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="DeleteIndex",
+ full_name="google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex",
+ index=3,
+ containing_service=None,
+ input_type=_DELETEINDEXREQUEST,
+ output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+ options=_descriptor._ParseOptions(
+ descriptor_pb2.MethodOptions(),
+ _b(
+ "\202\323\344\223\0022*0/v1beta1/{name=projects/*/databases/*/indexes/*}"
+ ),
+ ),
+ ),
+ ],
+)
+_sym_db.RegisterServiceDescriptor(_FIRESTOREADMIN)
+
+DESCRIPTOR.services_by_name["FirestoreAdmin"] = _FIRESTOREADMIN
+
+try:
+ # THESE ELEMENTS WILL BE DEPRECATED.
+ # Please use the generated *_pb2_grpc.py files instead.
+ import grpc
+ from grpc.beta import implementations as beta_implementations
+ from grpc.beta import interfaces as beta_interfaces
+ from grpc.framework.common import cardinality
+ from grpc.framework.interfaces.face import utilities as face_utilities
+
+ class FirestoreAdminStub(object):
+ """The Cloud Firestore Admin API.
+
+ This API provides several administrative services for Cloud Firestore.
+
+ # Concepts
+
+ Project, Database, Namespace, Collection, and Document are used as defined in
+ the Google Cloud Firestore API.
+
+ Operation: An Operation represents work being performed in the background.
+
+
+ # Services
+
+ ## Index
+
+ The index service manages Cloud Firestore indexes.
+
+ Index creation is performed asynchronously.
+ An Operation resource is created for each such asynchronous operation.
+ The state of the operation (including any errors encountered)
+ may be queried via the Operation resource.
+
+ ## Metadata
+
+ Provides metadata and statistical information about data in Cloud Firestore.
+ The data provided as part of this API may be stale.
+
+ ## Operation
+
+ The Operations collection provides a record of actions performed for the
+ specified Project (including any Operations in progress). Operations are not
+ created directly but through calls on other collections or resources.
+
+ An Operation that is not yet done may be cancelled. The request to cancel is
+ asynchronous and the Operation may continue to run for some time after the
+ request to cancel is made.
+
+ An Operation that is done may be deleted so that it is no longer listed as
+ part of the Operation collection.
+
+ Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.CreateIndex = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex",
+ request_serializer=CreateIndexRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.ListIndexes = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes",
+ request_serializer=ListIndexesRequest.SerializeToString,
+ response_deserializer=ListIndexesResponse.FromString,
+ )
+ self.GetIndex = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex",
+ request_serializer=GetIndexRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString,
+ )
+ self.DeleteIndex = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex",
+ request_serializer=DeleteIndexRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+
+ class FirestoreAdminServicer(object):
+ """The Cloud Firestore Admin API.
+
+ This API provides several administrative services for Cloud Firestore.
+
+ # Concepts
+
+ Project, Database, Namespace, Collection, and Document are used as defined in
+ the Google Cloud Firestore API.
+
+ Operation: An Operation represents work being performed in the background.
+
+
+ # Services
+
+ ## Index
+
+ The index service manages Cloud Firestore indexes.
+
+ Index creation is performed asynchronously.
+ An Operation resource is created for each such asynchronous operation.
+ The state of the operation (including any errors encountered)
+ may be queried via the Operation resource.
+
+ ## Metadata
+
+ Provides metadata and statistical information about data in Cloud Firestore.
+ The data provided as part of this API may be stale.
+
+ ## Operation
+
+ The Operations collection provides a record of actions performed for the
+ specified Project (including any Operations in progress). Operations are not
+ created directly but through calls on other collections or resources.
+
+ An Operation that is not yet done may be cancelled. The request to cancel is
+ asynchronous and the Operation may continue to run for some time after the
+ request to cancel is made.
+
+ An Operation that is done may be deleted so that it is no longer listed as
+ part of the Operation collection.
+
+ Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def CreateIndex(self, request, context):
+ """Creates the specified index.
+ A newly created index's initial state is `CREATING`. On completion of the
+ returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
+ If the index already exists, the call will return an `ALREADY_EXISTS`
+ status.
+
+ During creation, the process could result in an error, in which case the
+ index will move to the `ERROR` state. The process can be recovered by
+ fixing the data that caused the error, removing the index with
+ [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
+ [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+
+ Indexes with a single field cannot be created.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListIndexes(self, request, context):
+ """Lists the indexes that match the specified filters.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetIndex(self, request, context):
+ """Gets an index.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteIndex(self, request, context):
+ """Deletes an index.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def add_FirestoreAdminServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "CreateIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateIndex,
+ request_deserializer=CreateIndexRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "ListIndexes": grpc.unary_unary_rpc_method_handler(
+ servicer.ListIndexes,
+ request_deserializer=ListIndexesRequest.FromString,
+ response_serializer=ListIndexesResponse.SerializeToString,
+ ),
+ "GetIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.GetIndex,
+ request_deserializer=GetIndexRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString,
+ ),
+ "DeleteIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteIndex,
+ request_deserializer=DeleteIndexRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.firestore.admin.v1beta1.FirestoreAdmin", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
+
+ class BetaFirestoreAdminServicer(object):
+ """The Beta API is deprecated for 0.15.0 and later.
+
+ It is recommended to use the GA API (classes and functions in this
+ file not marked beta) for all further purposes. This class was generated
+ only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+
+ """The Cloud Firestore Admin API.
+
+ This API provides several administrative services for Cloud Firestore.
+
+ # Concepts
+
+ Project, Database, Namespace, Collection, and Document are used as defined in
+ the Google Cloud Firestore API.
+
+ Operation: An Operation represents work being performed in the background.
+
+
+ # Services
+
+ ## Index
+
+ The index service manages Cloud Firestore indexes.
+
+ Index creation is performed asynchronously.
+ An Operation resource is created for each such asynchronous operation.
+ The state of the operation (including any errors encountered)
+ may be queried via the Operation resource.
+
+ ## Metadata
+
+ Provides metadata and statistical information about data in Cloud Firestore.
+ The data provided as part of this API may be stale.
+
+ ## Operation
+
+ The Operations collection provides a record of actions performed for the
+ specified Project (including any Operations in progress). Operations are not
+ created directly but through calls on other collections or resources.
+
+ An Operation that is not yet done may be cancelled. The request to cancel is
+ asynchronous and the Operation may continue to run for some time after the
+ request to cancel is made.
+
+ An Operation that is done may be deleted so that it is no longer listed as
+ part of the Operation collection.
+
+ Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def CreateIndex(self, request, context):
+ """Creates the specified index.
+ A newly created index's initial state is `CREATING`. On completion of the
+ returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
+ If the index already exists, the call will return an `ALREADY_EXISTS`
+ status.
+
+ During creation, the process could result in an error, in which case the
+ index will move to the `ERROR` state. The process can be recovered by
+ fixing the data that caused the error, removing the index with
+ [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
+ [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+
+ Indexes with a single field cannot be created.
+ """
+ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+ def ListIndexes(self, request, context):
+ """Lists the indexes that match the specified filters.
+ """
+ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+ def GetIndex(self, request, context):
+ """Gets an index.
+ """
+ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+ def DeleteIndex(self, request, context):
+ """Deletes an index.
+ """
+ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+ class BetaFirestoreAdminStub(object):
+ """The Beta API is deprecated for 0.15.0 and later.
+
+ It is recommended to use the GA API (classes and functions in this
+ file not marked beta) for all further purposes. This class was generated
+ only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+
+ """The Cloud Firestore Admin API.
+
+ This API provides several administrative services for Cloud Firestore.
+
+ # Concepts
+
+ Project, Database, Namespace, Collection, and Document are used as defined in
+ the Google Cloud Firestore API.
+
+ Operation: An Operation represents work being performed in the background.
+
+
+ # Services
+
+ ## Index
+
+ The index service manages Cloud Firestore indexes.
+
+ Index creation is performed asynchronously.
+ An Operation resource is created for each such asynchronous operation.
+ The state of the operation (including any errors encountered)
+ may be queried via the Operation resource.
+
+ ## Metadata
+
+ Provides metadata and statistical information about data in Cloud Firestore.
+ The data provided as part of this API may be stale.
+
+ ## Operation
+
+ The Operations collection provides a record of actions performed for the
+ specified Project (including any Operations in progress). Operations are not
+ created directly but through calls on other collections or resources.
+
+ An Operation that is not yet done may be cancelled. The request to cancel is
+ asynchronous and the Operation may continue to run for some time after the
+ request to cancel is made.
+
+ An Operation that is done may be deleted so that it is no longer listed as
+ part of the Operation collection.
+
+ Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def CreateIndex(
+ self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None,
+ ):
+ """Creates the specified index.
+ A newly created index's initial state is `CREATING`. On completion of the
+ returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
+ If the index already exists, the call will return an `ALREADY_EXISTS`
+ status.
+
+ During creation, the process could result in an error, in which case the
+ index will move to the `ERROR` state. The process can be recovered by
+ fixing the data that caused the error, removing the index with
+ [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
+ [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+
+ Indexes with a single field cannot be created.
+ """
+ raise NotImplementedError()
+
+ CreateIndex.future = None
+
+ def ListIndexes(
+ self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None,
+ ):
+ """Lists the indexes that match the specified filters.
+ """
+ raise NotImplementedError()
+
+ ListIndexes.future = None
+
+ def GetIndex(
+ self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None,
+ ):
+ """Gets an index.
+ """
+ raise NotImplementedError()
+
+ GetIndex.future = None
+
+ def DeleteIndex(
+ self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None,
+ ):
+ """Deletes an index.
+ """
+ raise NotImplementedError()
+
+ DeleteIndex.future = None
+
+ def beta_create_FirestoreAdmin_server(
+ servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None
+ ):
+ """The Beta API is deprecated for 0.15.0 and later.
+
+ It is recommended to use the GA API (classes and functions in this
+ file not marked beta) for all further purposes. This function was
+ generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+ request_deserializers = {
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "CreateIndex",
+ ): CreateIndexRequest.FromString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "DeleteIndex",
+ ): DeleteIndexRequest.FromString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "GetIndex",
+ ): GetIndexRequest.FromString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "ListIndexes",
+ ): ListIndexesRequest.FromString,
+ }
+ response_serializers = {
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "CreateIndex",
+ ): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "DeleteIndex",
+ ): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "GetIndex",
+ ): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "ListIndexes",
+ ): ListIndexesResponse.SerializeToString,
+ }
+ method_implementations = {
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "CreateIndex",
+ ): face_utilities.unary_unary_inline(servicer.CreateIndex),
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "DeleteIndex",
+ ): face_utilities.unary_unary_inline(servicer.DeleteIndex),
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "GetIndex",
+ ): face_utilities.unary_unary_inline(servicer.GetIndex),
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "ListIndexes",
+ ): face_utilities.unary_unary_inline(servicer.ListIndexes),
+ }
+ server_options = beta_implementations.server_options(
+ request_deserializers=request_deserializers,
+ response_serializers=response_serializers,
+ thread_pool=pool,
+ thread_pool_size=pool_size,
+ default_timeout=default_timeout,
+ maximum_timeout=maximum_timeout,
+ )
+ return beta_implementations.server(
+ method_implementations, options=server_options
+ )
+
+ def beta_create_FirestoreAdmin_stub(
+ channel, host=None, metadata_transformer=None, pool=None, pool_size=None
+ ):
+ """The Beta API is deprecated for 0.15.0 and later.
+
+ It is recommended to use the GA API (classes and functions in this
+ file not marked beta) for all further purposes. This function was
+ generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+ request_serializers = {
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "CreateIndex",
+ ): CreateIndexRequest.SerializeToString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "DeleteIndex",
+ ): DeleteIndexRequest.SerializeToString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "GetIndex",
+ ): GetIndexRequest.SerializeToString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "ListIndexes",
+ ): ListIndexesRequest.SerializeToString,
+ }
+ response_deserializers = {
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "CreateIndex",
+ ): google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "DeleteIndex",
+ ): google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "GetIndex",
+ ): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString,
+ (
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ "ListIndexes",
+ ): ListIndexesResponse.FromString,
+ }
+ cardinalities = {
+ "CreateIndex": cardinality.Cardinality.UNARY_UNARY,
+ "DeleteIndex": cardinality.Cardinality.UNARY_UNARY,
+ "GetIndex": cardinality.Cardinality.UNARY_UNARY,
+ "ListIndexes": cardinality.Cardinality.UNARY_UNARY,
+ }
+ stub_options = beta_implementations.stub_options(
+ host=host,
+ metadata_transformer=metadata_transformer,
+ request_serializers=request_serializers,
+ response_deserializers=response_deserializers,
+ thread_pool=pool,
+ thread_pool_size=pool_size,
+ )
+ return beta_implementations.dynamic_stub(
+ channel,
+ "google.firestore.admin.v1beta1.FirestoreAdmin",
+ cardinalities,
+ options=stub_options,
+ )
+
+
+except ImportError:
+ pass
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py
new file mode 100644
index 000000000..81eaad7ad
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py
@@ -0,0 +1,203 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+
+from google.cloud.firestore_v1beta1.proto.admin import (
+ firestore_admin_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2,
+)
+from google.cloud.firestore_v1beta1.proto.admin import (
+ index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2,
+)
+from google.longrunning import (
+ operations_pb2 as google_dot_longrunning_dot_operations__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+
+class FirestoreAdminStub(object):
+ """The Cloud Firestore Admin API.
+
+ This API provides several administrative services for Cloud Firestore.
+
+ # Concepts
+
+ Project, Database, Namespace, Collection, and Document are used as defined in
+ the Google Cloud Firestore API.
+
+ Operation: An Operation represents work being performed in the background.
+
+
+ # Services
+
+ ## Index
+
+ The index service manages Cloud Firestore indexes.
+
+ Index creation is performed asynchronously.
+ An Operation resource is created for each such asynchronous operation.
+ The state of the operation (including any errors encountered)
+ may be queried via the Operation resource.
+
+ ## Metadata
+
+ Provides metadata and statistical information about data in Cloud Firestore.
+ The data provided as part of this API may be stale.
+
+ ## Operation
+
+ The Operations collection provides a record of actions performed for the
+ specified Project (including any Operations in progress). Operations are not
+ created directly but through calls on other collections or resources.
+
+ An Operation that is not yet done may be cancelled. The request to cancel is
+ asynchronous and the Operation may continue to run for some time after the
+ request to cancel is made.
+
+ An Operation that is done may be deleted so that it is no longer listed as
+ part of the Operation collection.
+
+ Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.CreateIndex = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.ListIndexes = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
+ )
+ self.GetIndex = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString,
+ )
+ self.DeleteIndex = channel.unary_unary(
+ "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+
+
+class FirestoreAdminServicer(object):
+ """The Cloud Firestore Admin API.
+
+ This API provides several administrative services for Cloud Firestore.
+
+ # Concepts
+
+ Project, Database, Namespace, Collection, and Document are used as defined in
+ the Google Cloud Firestore API.
+
+ Operation: An Operation represents work being performed in the background.
+
+
+ # Services
+
+ ## Index
+
+ The index service manages Cloud Firestore indexes.
+
+ Index creation is performed asynchronously.
+ An Operation resource is created for each such asynchronous operation.
+ The state of the operation (including any errors encountered)
+ may be queried via the Operation resource.
+
+ ## Metadata
+
+ Provides metadata and statistical information about data in Cloud Firestore.
+ The data provided as part of this API may be stale.
+
+ ## Operation
+
+ The Operations collection provides a record of actions performed for the
+ specified Project (including any Operations in progress). Operations are not
+ created directly but through calls on other collections or resources.
+
+ An Operation that is not yet done may be cancelled. The request to cancel is
+ asynchronous and the Operation may continue to run for some time after the
+ request to cancel is made.
+
+ An Operation that is done may be deleted so that it is no longer listed as
+ part of the Operation collection.
+
+ Operations are created by service `FirestoreAdmin`, but are accessed via
+ service `google.longrunning.Operations`.
+ """
+
+ def CreateIndex(self, request, context):
+ """Creates the specified index.
+ A newly created index's initial state is `CREATING`. On completion of the
+ returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
+ If the index already exists, the call will return an `ALREADY_EXISTS`
+ status.
+
+ During creation, the process could result in an error, in which case the
+ index will move to the `ERROR` state. The process can be recovered by
+ fixing the data that caused the error, removing the index with
+ [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
+ [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+
+ Indexes with a single field cannot be created.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListIndexes(self, request, context):
+ """Lists the indexes that match the specified filters.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetIndex(self, request, context):
+ """Gets an index.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteIndex(self, request, context):
+ """Deletes an index.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_FirestoreAdminServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "CreateIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateIndex,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "ListIndexes": grpc.unary_unary_rpc_method_handler(
+ servicer.ListIndexes,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString,
+ ),
+ "GetIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.GetIndex,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString,
+ ),
+ "DeleteIndex": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteIndex,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.firestore.admin.v1beta1.FirestoreAdmin", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py
new file mode 100644
index 000000000..de43ee88e
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py
@@ -0,0 +1,300 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/admin/index.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/admin/index.proto",
+ package="google.firestore.admin.v1beta1",
+ syntax="proto3",
+ serialized_pb=_b(
+ '\n6google/cloud/firestore_v1beta1/proto/admin/index.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto"\x9c\x01\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12=\n\x04mode\x18\x02 \x01(\x0e\x32/.google.firestore.admin.v1beta1.IndexField.Mode";\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x02\x12\x0e\n\nDESCENDING\x10\x03"\xe8\x01\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12:\n\x06\x66ields\x18\x03 \x03(\x0b\x32*.google.firestore.admin.v1beta1.IndexField\x12:\n\x05state\x18\x06 \x01(\x0e\x32+.google.firestore.admin.v1beta1.Index.State"B\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x03\x12\t\n\x05READY\x10\x02\x12\t\n\x05\x45RROR\x10\x05\x42\xa5\x01\n"com.google.firestore.admin.v1beta1B\nIndexProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3'
+ ),
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR],
+)
+
+
+_INDEXFIELD_MODE = _descriptor.EnumDescriptor(
+ name="Mode",
+ full_name="google.firestore.admin.v1beta1.IndexField.Mode",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="MODE_UNSPECIFIED", index=0, number=0, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ASCENDING", index=1, number=2, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="DESCENDING", index=2, number=3, options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=218,
+ serialized_end=277,
+)
+_sym_db.RegisterEnumDescriptor(_INDEXFIELD_MODE)
+
+_INDEX_STATE = _descriptor.EnumDescriptor(
+ name="State",
+ full_name="google.firestore.admin.v1beta1.Index.State",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="STATE_UNSPECIFIED", index=0, number=0, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CREATING", index=1, number=3, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="READY", index=2, number=2, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ERROR", index=3, number=5, options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=446,
+ serialized_end=512,
+)
+_sym_db.RegisterEnumDescriptor(_INDEX_STATE)
+
+
+_INDEXFIELD = _descriptor.Descriptor(
+ name="IndexField",
+ full_name="google.firestore.admin.v1beta1.IndexField",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field_path",
+ full_name="google.firestore.admin.v1beta1.IndexField.field_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mode",
+ full_name="google.firestore.admin.v1beta1.IndexField.mode",
+ index=1,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_INDEXFIELD_MODE],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=121,
+ serialized_end=277,
+)
+
+
+_INDEX = _descriptor.Descriptor(
+ name="Index",
+ full_name="google.firestore.admin.v1beta1.Index",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.admin.v1beta1.Index.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_id",
+ full_name="google.firestore.admin.v1beta1.Index.collection_id",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.firestore.admin.v1beta1.Index.fields",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ _descriptor.FieldDescriptor(
+ name="state",
+ full_name="google.firestore.admin.v1beta1.Index.state",
+ index=3,
+ number=6,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_INDEX_STATE],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=280,
+ serialized_end=512,
+)
+
+_INDEXFIELD.fields_by_name["mode"].enum_type = _INDEXFIELD_MODE
+_INDEXFIELD_MODE.containing_type = _INDEXFIELD
+_INDEX.fields_by_name["fields"].message_type = _INDEXFIELD
+_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE
+_INDEX_STATE.containing_type = _INDEX
+DESCRIPTOR.message_types_by_name["IndexField"] = _INDEXFIELD
+DESCRIPTOR.message_types_by_name["Index"] = _INDEX
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+IndexField = _reflection.GeneratedProtocolMessageType(
+ "IndexField",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_INDEXFIELD,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.index_pb2",
+ __doc__="""A field of an index.
+
+
+ Attributes:
+ field_path:
+ The path of the field. Must match the field path specification
+ described by
+ [google.firestore.v1beta1.Document.fields][fields]. Special
+ field path ``__name__`` may be used by itself or at the end of
+ a path. ``__type__`` may be used only at the end of path.
+ mode:
+ The field's mode.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexField)
+ ),
+)
+_sym_db.RegisterMessage(IndexField)
+
+Index = _reflection.GeneratedProtocolMessageType(
+ "Index",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_INDEX,
+ __module__="google.cloud.firestore_v1beta1.proto.admin.index_pb2",
+ __doc__="""An index definition.
+
+
+ Attributes:
+ name:
+ The resource name of the index.
+ collection_id:
+ The collection ID to which this index applies. Required.
+ fields:
+ The fields to index.
+ state:
+ The state of the index. The state is read-only. @OutputOnly
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Index)
+ ),
+)
+_sym_db.RegisterMessage(Index)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(
+ descriptor_pb2.FileOptions(),
+ _b(
+ '\n"com.google.firestore.admin.v1beta1B\nIndexProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1'
+ ),
+)
+try:
+ # THESE ELEMENTS WILL BE DEPRECATED.
+ # Please use the generated *_pb2_grpc.py files instead.
+ import grpc
+ from grpc.beta import implementations as beta_implementations
+ from grpc.beta import interfaces as beta_interfaces
+ from grpc.framework.common import cardinality
+ from grpc.framework.interfaces.face import utilities as face_utilities
+except ImportError:
+ pass
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py
new file mode 100644
index 000000000..07cb78fe0
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py
@@ -0,0 +1,2 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common.proto
new file mode 100644
index 000000000..b71a2e32e
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common.proto
@@ -0,0 +1,84 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.v1beta1;
+
+import "google/protobuf/timestamp.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "CommonProto";
+option java_package = "com.google.firestore.v1beta1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
+option ruby_package = "Google::Cloud::Firestore::V1beta1";
+
+// A set of field paths on a document.
+// Used to restrict a get or update operation on a document to a subset of its
+// fields.
+// This is different from standard field masks, as this is always scoped to a
+// [Document][google.firestore.v1beta1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1beta1.Value].
+message DocumentMask {
+ // The list of field paths in the mask. See [Document.fields][google.firestore.v1beta1.Document.fields] for a field
+ // path syntax reference.
+ repeated string field_paths = 1;
+}
+
+// A precondition on a document, used for conditional operations.
+message Precondition {
+ // The type of precondition.
+ oneof condition_type {
+ // When set to `true`, the target document must exist.
+ // When set to `false`, the target document must not exist.
+ bool exists = 1;
+
+ // When set, the target document must exist and have been last updated at
+ // that time.
+ google.protobuf.Timestamp update_time = 2;
+ }
+}
+
+// Options for creating a new transaction.
+message TransactionOptions {
+ // Options for a transaction that can be used to read and write documents.
+ message ReadWrite {
+ // An optional transaction to retry.
+ bytes retry_transaction = 1;
+ }
+
+ // Options for a transaction that can only be used to read documents.
+ message ReadOnly {
+ // The consistency mode for this transaction. If not set, defaults to strong
+ // consistency.
+ oneof consistency_selector {
+ // Reads documents at the given time.
+ // This may not be older than 60 seconds.
+ google.protobuf.Timestamp read_time = 2;
+ }
+ }
+
+ // The mode of the transaction.
+ oneof mode {
+ // The transaction can only be used for read operations.
+ ReadOnly read_only = 2;
+
+ // The transaction can be used for both read and write operations.
+ ReadWrite read_write = 3;
+ }
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common_pb2.py
new file mode 100644
index 000000000..4ac2d2d1f
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common_pb2.py
@@ -0,0 +1,456 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/common.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/common.proto",
+ package="google.firestore.v1beta1",
+ syntax="proto3",
+ serialized_options=b"\n\034com.google.firestore.v1beta1B\013CommonProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1\352\002!Google::Cloud::Firestore::V1beta1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n1google/cloud/firestore_v1beta1/proto/common.proto\x12\x18google.firestore.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"#\n\x0c\x44ocumentMask\x12\x13\n\x0b\x66ield_paths\x18\x01 \x03(\t"e\n\x0cPrecondition\x12\x10\n\x06\x65xists\x18\x01 \x01(\x08H\x00\x12\x31\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x10\n\x0e\x63ondition_type"\xb3\x02\n\x12TransactionOptions\x12J\n\tread_only\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.TransactionOptions.ReadOnlyH\x00\x12L\n\nread_write\x18\x03 \x01(\x0b\x32\x36.google.firestore.v1beta1.TransactionOptions.ReadWriteH\x00\x1a&\n\tReadWrite\x12\x19\n\x11retry_transaction\x18\x01 \x01(\x0c\x1aS\n\x08ReadOnly\x12/\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selectorB\x06\n\x04modeB\xdd\x01\n\x1c\x63om.google.firestore.v1beta1B\x0b\x43ommonProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1\xea\x02!Google::Cloud::Firestore::V1beta1b\x06proto3',
+ dependencies=[
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_DOCUMENTMASK = _descriptor.Descriptor(
+ name="DocumentMask",
+ full_name="google.firestore.v1beta1.DocumentMask",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field_paths",
+ full_name="google.firestore.v1beta1.DocumentMask.field_paths",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=142,
+ serialized_end=177,
+)
+
+
+_PRECONDITION = _descriptor.Descriptor(
+ name="Precondition",
+ full_name="google.firestore.v1beta1.Precondition",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="exists",
+ full_name="google.firestore.v1beta1.Precondition.exists",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_time",
+ full_name="google.firestore.v1beta1.Precondition.update_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="condition_type",
+ full_name="google.firestore.v1beta1.Precondition.condition_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=179,
+ serialized_end=280,
+)
+
+
+_TRANSACTIONOPTIONS_READWRITE = _descriptor.Descriptor(
+ name="ReadWrite",
+ full_name="google.firestore.v1beta1.TransactionOptions.ReadWrite",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="retry_transaction",
+ full_name="google.firestore.v1beta1.TransactionOptions.ReadWrite.retry_transaction",
+ index=0,
+ number=1,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=459,
+ serialized_end=497,
+)
+
+_TRANSACTIONOPTIONS_READONLY = _descriptor.Descriptor(
+ name="ReadOnly",
+ full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly.read_time",
+ index=0,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="consistency_selector",
+ full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly.consistency_selector",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=499,
+ serialized_end=582,
+)
+
+_TRANSACTIONOPTIONS = _descriptor.Descriptor(
+ name="TransactionOptions",
+ full_name="google.firestore.v1beta1.TransactionOptions",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="read_only",
+ full_name="google.firestore.v1beta1.TransactionOptions.read_only",
+ index=0,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_write",
+ full_name="google.firestore.v1beta1.TransactionOptions.read_write",
+ index=1,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_TRANSACTIONOPTIONS_READWRITE, _TRANSACTIONOPTIONS_READONLY,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="mode",
+ full_name="google.firestore.v1beta1.TransactionOptions.mode",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=283,
+ serialized_end=590,
+)
+
+_PRECONDITION.fields_by_name[
+ "update_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_PRECONDITION.oneofs_by_name["condition_type"].fields.append(
+ _PRECONDITION.fields_by_name["exists"]
+)
+_PRECONDITION.fields_by_name["exists"].containing_oneof = _PRECONDITION.oneofs_by_name[
+ "condition_type"
+]
+_PRECONDITION.oneofs_by_name["condition_type"].fields.append(
+ _PRECONDITION.fields_by_name["update_time"]
+)
+_PRECONDITION.fields_by_name[
+ "update_time"
+].containing_oneof = _PRECONDITION.oneofs_by_name["condition_type"]
+_TRANSACTIONOPTIONS_READWRITE.containing_type = _TRANSACTIONOPTIONS
+_TRANSACTIONOPTIONS_READONLY.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_TRANSACTIONOPTIONS_READONLY.containing_type = _TRANSACTIONOPTIONS
+_TRANSACTIONOPTIONS_READONLY.oneofs_by_name["consistency_selector"].fields.append(
+ _TRANSACTIONOPTIONS_READONLY.fields_by_name["read_time"]
+)
+_TRANSACTIONOPTIONS_READONLY.fields_by_name[
+ "read_time"
+].containing_oneof = _TRANSACTIONOPTIONS_READONLY.oneofs_by_name["consistency_selector"]
+_TRANSACTIONOPTIONS.fields_by_name[
+ "read_only"
+].message_type = _TRANSACTIONOPTIONS_READONLY
+_TRANSACTIONOPTIONS.fields_by_name[
+ "read_write"
+].message_type = _TRANSACTIONOPTIONS_READWRITE
+_TRANSACTIONOPTIONS.oneofs_by_name["mode"].fields.append(
+ _TRANSACTIONOPTIONS.fields_by_name["read_only"]
+)
+_TRANSACTIONOPTIONS.fields_by_name[
+ "read_only"
+].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name["mode"]
+_TRANSACTIONOPTIONS.oneofs_by_name["mode"].fields.append(
+ _TRANSACTIONOPTIONS.fields_by_name["read_write"]
+)
+_TRANSACTIONOPTIONS.fields_by_name[
+ "read_write"
+].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name["mode"]
+DESCRIPTOR.message_types_by_name["DocumentMask"] = _DOCUMENTMASK
+DESCRIPTOR.message_types_by_name["Precondition"] = _PRECONDITION
+DESCRIPTOR.message_types_by_name["TransactionOptions"] = _TRANSACTIONOPTIONS
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+DocumentMask = _reflection.GeneratedProtocolMessageType(
+ "DocumentMask",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTMASK,
+ "__module__": "google.cloud.firestore_v1beta1.proto.common_pb2",
+ "__doc__": """A set of field paths on a document. Used to restrict a get or update
+ operation on a document to a subset of its fields. This is different
+ from standard field masks, as this is always scoped to a
+ [Document][google.firestore.v1beta1.Document], and takes in account
+ the dynamic nature of [Value][google.firestore.v1beta1.Value].
+
+ Attributes:
+ field_paths:
+ The list of field paths in the mask. See
+ [Document.fields][google.firestore.v1beta1.Document.fields]
+ for a field path syntax reference.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentMask)
+ },
+)
+_sym_db.RegisterMessage(DocumentMask)
+
+Precondition = _reflection.GeneratedProtocolMessageType(
+ "Precondition",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _PRECONDITION,
+ "__module__": "google.cloud.firestore_v1beta1.proto.common_pb2",
+ "__doc__": """A precondition on a document, used for conditional operations.
+
+ Attributes:
+ condition_type:
+ The type of precondition.
+ exists:
+ When set to ``true``, the target document must exist. When set
+ to ``false``, the target document must not exist.
+ update_time:
+ When set, the target document must exist and have been last
+ updated at that time.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Precondition)
+ },
+)
+_sym_db.RegisterMessage(Precondition)
+
+TransactionOptions = _reflection.GeneratedProtocolMessageType(
+ "TransactionOptions",
+ (_message.Message,),
+ {
+ "ReadWrite": _reflection.GeneratedProtocolMessageType(
+ "ReadWrite",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _TRANSACTIONOPTIONS_READWRITE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.common_pb2",
+ "__doc__": """Options for a transaction that can be used to read and write
+ documents.
+
+ Attributes:
+ retry_transaction:
+ An optional transaction to retry.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadWrite)
+ },
+ ),
+ "ReadOnly": _reflection.GeneratedProtocolMessageType(
+ "ReadOnly",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _TRANSACTIONOPTIONS_READONLY,
+ "__module__": "google.cloud.firestore_v1beta1.proto.common_pb2",
+ "__doc__": """Options for a transaction that can only be used to read documents.
+
+ Attributes:
+ consistency_selector:
+ The consistency mode for this transaction. If not set,
+ defaults to strong consistency.
+ read_time:
+ Reads documents at the given time. This may not be older than
+ 60 seconds.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadOnly)
+ },
+ ),
+ "DESCRIPTOR": _TRANSACTIONOPTIONS,
+ "__module__": "google.cloud.firestore_v1beta1.proto.common_pb2",
+ "__doc__": """Options for creating a new transaction.
+
+ Attributes:
+ mode:
+ The mode of the transaction.
+ read_only:
+ The transaction can only be used for read operations.
+ read_write:
+ The transaction can be used for both read and write
+ operations.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions)
+ },
+)
+_sym_db.RegisterMessage(TransactionOptions)
+_sym_db.RegisterMessage(TransactionOptions.ReadWrite)
+_sym_db.RegisterMessage(TransactionOptions.ReadOnly)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document.proto
new file mode 100644
index 000000000..38d81af96
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document.proto
@@ -0,0 +1,151 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.v1beta1;
+
+import "google/protobuf/struct.proto";
+import "google/protobuf/timestamp.proto";
+import "google/type/latlng.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "DocumentProto";
+option java_package = "com.google.firestore.v1beta1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
+option ruby_package = "Google::Cloud::Firestore::V1beta1";
+
+// A Firestore document.
+//
+// Must not exceed 1 MiB - 4 bytes.
+message Document {
+ // The resource name of the document, for example
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string name = 1;
+
+ // The document's fields.
+ //
+ // The map keys represent field names.
+ //
+ // A simple field name contains only characters `a` to `z`, `A` to `Z`,
+ // `0` to `9`, or `_`, and must not start with `0` to `9`. For example,
+ // `foo_bar_17`.
+ //
+ // Field names matching the regular expression `__.*__` are reserved. Reserved
+ // field names are forbidden except in certain documented contexts. The map
+ // keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be
+ // empty.
+ //
+ // Field paths may be used in other contexts to refer to structured fields
+ // defined here. For `map_value`, the field path is represented by the simple
+ // or quoted field names of the containing fields, delimited by `.`. For
+ // example, the structured field
+ // `"foo" : { map_value: { "x&y" : { string_value: "hello" }}}` would be
+ // represented by the field path `foo.x&y`.
+ //
+ // Within a field path, a quoted field name starts and ends with `` ` `` and
+ // may contain any character. Some characters, including `` ` ``, must be
+ // escaped using a `\`. For example, `` `x&y` `` represents `x&y` and
+ // `` `bak\`tik` `` represents `` bak`tik ``.
+ map fields = 2;
+
+ // Output only. The time at which the document was created.
+ //
+ // This value increases monotonically when a document is deleted then
+ // recreated. It can also be compared to values from other documents and
+ // the `read_time` of a query.
+ google.protobuf.Timestamp create_time = 3;
+
+ // Output only. The time at which the document was last changed.
+ //
+ // This value is initially set to the `create_time` then increases
+ // monotonically with each change to the document. It can also be
+ // compared to values from other documents and the `read_time` of a query.
+ google.protobuf.Timestamp update_time = 4;
+}
+
+// A message that can hold any of the supported value types.
+message Value {
+ // Must have a value set.
+ oneof value_type {
+ // A null value.
+ google.protobuf.NullValue null_value = 11;
+
+ // A boolean value.
+ bool boolean_value = 1;
+
+ // An integer value.
+ int64 integer_value = 2;
+
+ // A double value.
+ double double_value = 3;
+
+ // A timestamp value.
+ //
+ // Precise only to microseconds. When stored, any additional precision is
+ // rounded down.
+ google.protobuf.Timestamp timestamp_value = 10;
+
+ // A string value.
+ //
+ // The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes.
+ // Only the first 1,500 bytes of the UTF-8 representation are considered by
+ // queries.
+ string string_value = 17;
+
+ // A bytes value.
+ //
+ // Must not exceed 1 MiB - 89 bytes.
+ // Only the first 1,500 bytes are considered by queries.
+ bytes bytes_value = 18;
+
+ // A reference to a document. For example:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string reference_value = 5;
+
+ // A geo point value representing a point on the surface of Earth.
+ google.type.LatLng geo_point_value = 8;
+
+ // An array value.
+ //
+ // Cannot directly contain another array value, though can contain an
+ // map which contains another array.
+ ArrayValue array_value = 9;
+
+ // A map value.
+ MapValue map_value = 6;
+ }
+}
+
+// An array value.
+message ArrayValue {
+ // Values in the array.
+ repeated Value values = 1;
+}
+
+// A map value.
+message MapValue {
+ // The map's fields.
+ //
+ // The map keys represent field names. Field names matching the regular
+ // expression `__.*__` are reserved. Reserved field names are forbidden except
+ // in certain documented contexts. The map keys, represented as UTF-8, must
+ // not exceed 1,500 bytes and cannot be empty.
+ map fields = 1;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document_pb2.py
new file mode 100644
index 000000000..b4b150e6c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document_pb2.py
@@ -0,0 +1,815 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/document.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/document.proto",
+ package="google.firestore.v1beta1",
+ syntax="proto3",
+ serialized_options=b"\n\034com.google.firestore.v1beta1B\rDocumentProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1\352\002!Google::Cloud::Firestore::V1beta1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n3google/cloud/firestore_v1beta1/proto/document.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\x1a\x1cgoogle/api/annotations.proto"\x8a\x02\n\x08\x44ocument\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x06\x66ields\x18\x02 \x03(\x0b\x32..google.firestore.v1beta1.Document.FieldsEntry\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01"\xb8\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x12 \x01(\x0cH\x00\x12\x19\n\x0freference_value\x18\x05 \x01(\tH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x37\n\tmap_value\x18\x06 \x01(\x0b\x32".google.firestore.v1beta1.MapValueH\x00\x42\x0c\n\nvalue_type"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x9a\x01\n\x08MapValue\x12>\n\x06\x66ields\x18\x01 \x03(\x0b\x32..google.firestore.v1beta1.MapValue.FieldsEntry\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\x42\xdf\x01\n\x1c\x63om.google.firestore.v1beta1B\rDocumentProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1\xea\x02!Google::Cloud::Firestore::V1beta1b\x06proto3',
+ dependencies=[
+ google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ google_dot_type_dot_latlng__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_DOCUMENT_FIELDSENTRY = _descriptor.Descriptor(
+ name="FieldsEntry",
+ full_name="google.firestore.v1beta1.Document.FieldsEntry",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="key",
+ full_name="google.firestore.v1beta1.Document.FieldsEntry.key",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="value",
+ full_name="google.firestore.v1beta1.Document.FieldsEntry.value",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=b"8\001",
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=389,
+ serialized_end=467,
+)
+
+_DOCUMENT = _descriptor.Descriptor(
+ name="Document",
+ full_name="google.firestore.v1beta1.Document",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.v1beta1.Document.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.firestore.v1beta1.Document.fields",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="create_time",
+ full_name="google.firestore.v1beta1.Document.create_time",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_time",
+ full_name="google.firestore.v1beta1.Document.update_time",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_DOCUMENT_FIELDSENTRY,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=201,
+ serialized_end=467,
+)
+
+
+_VALUE = _descriptor.Descriptor(
+ name="Value",
+ full_name="google.firestore.v1beta1.Value",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="null_value",
+ full_name="google.firestore.v1beta1.Value.null_value",
+ index=0,
+ number=11,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="boolean_value",
+ full_name="google.firestore.v1beta1.Value.boolean_value",
+ index=1,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="integer_value",
+ full_name="google.firestore.v1beta1.Value.integer_value",
+ index=2,
+ number=2,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="double_value",
+ full_name="google.firestore.v1beta1.Value.double_value",
+ index=3,
+ number=3,
+ type=1,
+ cpp_type=5,
+ label=1,
+ has_default_value=False,
+ default_value=float(0),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="timestamp_value",
+ full_name="google.firestore.v1beta1.Value.timestamp_value",
+ index=4,
+ number=10,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="string_value",
+ full_name="google.firestore.v1beta1.Value.string_value",
+ index=5,
+ number=17,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="bytes_value",
+ full_name="google.firestore.v1beta1.Value.bytes_value",
+ index=6,
+ number=18,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="reference_value",
+ full_name="google.firestore.v1beta1.Value.reference_value",
+ index=7,
+ number=5,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="geo_point_value",
+ full_name="google.firestore.v1beta1.Value.geo_point_value",
+ index=8,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="array_value",
+ full_name="google.firestore.v1beta1.Value.array_value",
+ index=9,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="map_value",
+ full_name="google.firestore.v1beta1.Value.map_value",
+ index=10,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="value_type",
+ full_name="google.firestore.v1beta1.Value.value_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=470,
+ serialized_end=910,
+)
+
+
+_ARRAYVALUE = _descriptor.Descriptor(
+ name="ArrayValue",
+ full_name="google.firestore.v1beta1.ArrayValue",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="values",
+ full_name="google.firestore.v1beta1.ArrayValue.values",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=912,
+ serialized_end=973,
+)
+
+
+_MAPVALUE_FIELDSENTRY = _descriptor.Descriptor(
+ name="FieldsEntry",
+ full_name="google.firestore.v1beta1.MapValue.FieldsEntry",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="key",
+ full_name="google.firestore.v1beta1.MapValue.FieldsEntry.key",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="value",
+ full_name="google.firestore.v1beta1.MapValue.FieldsEntry.value",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=b"8\001",
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=389,
+ serialized_end=467,
+)
+
+_MAPVALUE = _descriptor.Descriptor(
+ name="MapValue",
+ full_name="google.firestore.v1beta1.MapValue",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.firestore.v1beta1.MapValue.fields",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_MAPVALUE_FIELDSENTRY,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=976,
+ serialized_end=1130,
+)
+
+_DOCUMENT_FIELDSENTRY.fields_by_name["value"].message_type = _VALUE
+_DOCUMENT_FIELDSENTRY.containing_type = _DOCUMENT
+_DOCUMENT.fields_by_name["fields"].message_type = _DOCUMENT_FIELDSENTRY
+_DOCUMENT.fields_by_name[
+ "create_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_DOCUMENT.fields_by_name[
+ "update_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_VALUE.fields_by_name[
+ "null_value"
+].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE
+_VALUE.fields_by_name[
+ "timestamp_value"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_VALUE.fields_by_name[
+ "geo_point_value"
+].message_type = google_dot_type_dot_latlng__pb2._LATLNG
+_VALUE.fields_by_name["array_value"].message_type = _ARRAYVALUE
+_VALUE.fields_by_name["map_value"].message_type = _MAPVALUE
+_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["null_value"])
+_VALUE.fields_by_name["null_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(
+ _VALUE.fields_by_name["boolean_value"]
+)
+_VALUE.fields_by_name["boolean_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(
+ _VALUE.fields_by_name["integer_value"]
+)
+_VALUE.fields_by_name["integer_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["double_value"])
+_VALUE.fields_by_name["double_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(
+ _VALUE.fields_by_name["timestamp_value"]
+)
+_VALUE.fields_by_name["timestamp_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["string_value"])
+_VALUE.fields_by_name["string_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["bytes_value"])
+_VALUE.fields_by_name["bytes_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(
+ _VALUE.fields_by_name["reference_value"]
+)
+_VALUE.fields_by_name["reference_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(
+ _VALUE.fields_by_name["geo_point_value"]
+)
+_VALUE.fields_by_name["geo_point_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["array_value"])
+_VALUE.fields_by_name["array_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["map_value"])
+_VALUE.fields_by_name["map_value"].containing_oneof = _VALUE.oneofs_by_name[
+ "value_type"
+]
+_ARRAYVALUE.fields_by_name["values"].message_type = _VALUE
+_MAPVALUE_FIELDSENTRY.fields_by_name["value"].message_type = _VALUE
+_MAPVALUE_FIELDSENTRY.containing_type = _MAPVALUE
+_MAPVALUE.fields_by_name["fields"].message_type = _MAPVALUE_FIELDSENTRY
+DESCRIPTOR.message_types_by_name["Document"] = _DOCUMENT
+DESCRIPTOR.message_types_by_name["Value"] = _VALUE
+DESCRIPTOR.message_types_by_name["ArrayValue"] = _ARRAYVALUE
+DESCRIPTOR.message_types_by_name["MapValue"] = _MAPVALUE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Document = _reflection.GeneratedProtocolMessageType(
+ "Document",
+ (_message.Message,),
+ {
+ "FieldsEntry": _reflection.GeneratedProtocolMessageType(
+ "FieldsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENT_FIELDSENTRY,
+ "__module__": "google.cloud.firestore_v1beta1.proto.document_pb2"
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document.FieldsEntry)
+ },
+ ),
+ "DESCRIPTOR": _DOCUMENT,
+ "__module__": "google.cloud.firestore_v1beta1.proto.document_pb2",
+ "__doc__": """A Firestore document. Must not exceed 1 MiB - 4 bytes.
+
+ Attributes:
+ name:
+ The resource name of the document, for example ``projects/{pro
+ ject_id}/databases/{database_id}/documents/{document_path}``.
+ fields:
+ The document’s fields. The map keys represent field names. A
+ simple field name contains only characters ``a`` to ``z``,
+ ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start
+ with ``0`` to ``9``. For example, ``foo_bar_17``. Field names
+ matching the regular expression ``__.*__`` are reserved.
+ Reserved field names are forbidden except in certain
+ documented contexts. The map keys, represented as UTF-8, must
+ not exceed 1,500 bytes and cannot be empty. Field paths may
+ be used in other contexts to refer to structured fields
+ defined here. For ``map_value``, the field path is represented
+ by the simple or quoted field names of the containing fields,
+ delimited by ``.``. For example, the structured field ``"foo"
+ : { map_value: { "x&y" : { string_value: "hello" }}}`` would
+ be represented by the field path ``foo.x&y``. Within a field
+ path, a quoted field name starts and ends with :literal:`\``
+ and may contain any character. Some characters, including
+ :literal:`\``, must be escaped using a ``\``. For example,
+ :literal:`\`x&y\`` represents ``x&y`` and
+ :literal:`\`bak\`tik\`` represents :literal:`bak`tik`.
+ create_time:
+ Output only. The time at which the document was created. This
+ value increases monotonically when a document is deleted then
+ recreated. It can also be compared to values from other
+ documents and the ``read_time`` of a query.
+ update_time:
+ Output only. The time at which the document was last changed.
+ This value is initially set to the ``create_time`` then
+ increases monotonically with each change to the document. It
+ can also be compared to values from other documents and the
+ ``read_time`` of a query.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document)
+ },
+)
+_sym_db.RegisterMessage(Document)
+_sym_db.RegisterMessage(Document.FieldsEntry)
+
+Value = _reflection.GeneratedProtocolMessageType(
+ "Value",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _VALUE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.document_pb2",
+ "__doc__": """A message that can hold any of the supported value types.
+
+ Attributes:
+ value_type:
+ Must have a value set.
+ null_value:
+ A null value.
+ boolean_value:
+ A boolean value.
+ integer_value:
+ An integer value.
+ double_value:
+ A double value.
+ timestamp_value:
+ A timestamp value. Precise only to microseconds. When stored,
+ any additional precision is rounded down.
+ string_value:
+ A string value. The string, represented as UTF-8, must not
+ exceed 1 MiB - 89 bytes. Only the first 1,500 bytes of the
+ UTF-8 representation are considered by queries.
+ bytes_value:
+ A bytes value. Must not exceed 1 MiB - 89 bytes. Only the
+ first 1,500 bytes are considered by queries.
+ reference_value:
+ A reference to a document. For example: ``projects/{project_id
+ }/databases/{database_id}/documents/{document_path}``.
+ geo_point_value:
+ A geo point value representing a point on the surface of
+ Earth.
+ array_value:
+ An array value. Cannot directly contain another array value,
+ though can contain an map which contains another array.
+ map_value:
+ A map value.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Value)
+ },
+)
+_sym_db.RegisterMessage(Value)
+
+ArrayValue = _reflection.GeneratedProtocolMessageType(
+ "ArrayValue",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ARRAYVALUE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.document_pb2",
+ "__doc__": """An array value.
+
+ Attributes:
+ values:
+ Values in the array.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ArrayValue)
+ },
+)
+_sym_db.RegisterMessage(ArrayValue)
+
+MapValue = _reflection.GeneratedProtocolMessageType(
+ "MapValue",
+ (_message.Message,),
+ {
+ "FieldsEntry": _reflection.GeneratedProtocolMessageType(
+ "FieldsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _MAPVALUE_FIELDSENTRY,
+ "__module__": "google.cloud.firestore_v1beta1.proto.document_pb2"
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue.FieldsEntry)
+ },
+ ),
+ "DESCRIPTOR": _MAPVALUE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.document_pb2",
+ "__doc__": """A map value.
+
+ Attributes:
+ fields:
+ The map’s fields. The map keys represent field names. Field
+ names matching the regular expression ``__.*__`` are reserved.
+ Reserved field names are forbidden except in certain
+ documented contexts. The map keys, represented as UTF-8, must
+ not exceed 1,500 bytes and cannot be empty.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue)
+ },
+)
+_sym_db.RegisterMessage(MapValue)
+_sym_db.RegisterMessage(MapValue.FieldsEntry)
+
+
+DESCRIPTOR._options = None
+_DOCUMENT_FIELDSENTRY._options = None
+_MAPVALUE_FIELDSENTRY._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py
new file mode 100644
index 000000000..957acef26
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py
@@ -0,0 +1,62 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.cloud.firestore_v1beta1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
+)
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto",
+ package="google.firestore.v1beta1",
+ syntax="proto3",
+ serialized_pb=_b(
+ "\nEgoogle/cloud/firestore_v1beta1/proto/event_flow_document_change.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.protoB\xa2\x01\n\x1c\x63om.google.firestore.v1beta1B\x1c\x45ventFlowDocumentChangeProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1b\x06proto3"
+ ),
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(
+ descriptor_pb2.FileOptions(),
+ _b(
+ "\n\034com.google.firestore.v1beta1B\034EventFlowDocumentChangeProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\252\002\036Google.Cloud.Firestore.V1Beta1"
+ ),
+)
+try:
+ # THESE ELEMENTS WILL BE DEPRECATED.
+ # Please use the generated *_pb2_grpc.py files instead.
+ import grpc
+ from grpc.beta import implementations as beta_implementations
+ from grpc.beta import interfaces as beta_interfaces
+ from grpc.framework.common import cardinality
+ from grpc.framework.interfaces.face import utilities as face_utilities
+except ImportError:
+ pass
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py
new file mode 100644
index 000000000..07cb78fe0
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py
@@ -0,0 +1,2 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/field.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/field.proto
new file mode 100644
index 000000000..9d1534eb1
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/field.proto
@@ -0,0 +1,95 @@
+// Copyright 2018 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1beta2;
+
+import "google/api/annotations.proto";
+import "google/firestore/admin/v1beta2/index.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta2";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta2;admin";
+option java_multiple_files = true;
+option java_outer_classname = "FieldProto";
+option java_package = "com.google.firestore.admin.v1beta2";
+option objc_class_prefix = "GCFS";
+
+
+// Represents a single field in the database.
+//
+// Fields are grouped by their "Collection Group", which represent all
+// collections in the database with the same id.
+message Field {
+ // The index configuration for this field.
+ message IndexConfig {
+ // The indexes supported for this field.
+ repeated Index indexes = 1;
+
+ // Output only.
+ // When true, the `Field`'s index configuration is set from the
+ // configuration specified by the `ancestor_field`.
+ // When false, the `Field`'s index configuration is defined explicitly.
+ bool uses_ancestor_config = 2;
+
+ // Output only.
+ // Specifies the resource name of the `Field` from which this field's
+ // index configuration is set (when `uses_ancestor_config` is true),
+ // or from which it *would* be set if this field had no index configuration
+ // (when `uses_ancestor_config` is false).
+ string ancestor_field = 3;
+
+ // Output only
+ // When true, the `Field`'s index configuration is in the process of being
+ // reverted. Once complete, the index config will transition to the same
+ // state as the field specified by `ancestor_field`, at which point
+ // `uses_ancestor_config` will be `true` and `reverting` will be `false`.
+ bool reverting = 4;
+ }
+
+ // A field name of the form
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
+ //
+ // A field path may be a simple field name, e.g. `address` or a path to fields
+ // within map_value , e.g. `address.city`,
+ // or a special field path. The only valid special field is `*`, which
+ // represents any field.
+ //
+ // Field paths may be quoted using ` (backtick). The only character that needs
+ // to be escaped within a quoted field path is the backtick character itself,
+ // escaped using a backslash. Special characters in field paths that
+ // must be quoted include: `*`, `.`,
+ // ``` (backtick), `[`, `]`, as well as any ascii symbolic characters.
+ //
+ // Examples:
+ // (Note: Comments here are written in markdown syntax, so there is an
+ // additional layer of backticks to represent a code block)
+ // `\`address.city\`` represents a field named `address.city`, not the map key
+ // `city` in the field `address`.
+ // `\`*\`` represents a field named `*`, not any field.
+ //
+ // A special `Field` contains the default indexing settings for all fields.
+ // This field's resource name is:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`
+ // Indexes defined on this `Field` will be applied to all fields which do not
+ // have their own `Field` index configuration.
+ string name = 1;
+
+ // The index configuration for this field. If unset, field indexing will
+ // revert to the configuration defined by the `ancestor_field`. To
+ // explicitly remove all indexes for this field, specify an index config
+ // with an empty list of indexes.
+ IndexConfig index_config = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore.proto
new file mode 100644
index 000000000..5cdccb7ea
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore.proto
@@ -0,0 +1,766 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.v1beta1;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/firestore/v1beta1/common.proto";
+import "google/firestore/v1beta1/document.proto";
+import "google/firestore/v1beta1/query.proto";
+import "google/firestore/v1beta1/write.proto";
+import "google/protobuf/empty.proto";
+import "google/protobuf/timestamp.proto";
+import "google/rpc/status.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "FirestoreProto";
+option java_package = "com.google.firestore.v1beta1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
+option ruby_package = "Google::Cloud::Firestore::V1beta1";
+
+// Specification of the Firestore API.
+
+// The Cloud Firestore service.
+//
+// This service exposes several types of comparable timestamps:
+//
+// * `create_time` - The time at which a document was created. Changes only
+// when a document is deleted, then re-created. Increases in a strict
+// monotonic fashion.
+// * `update_time` - The time at which a document was last updated. Changes
+// every time a document is modified. Does not change when a write results
+// in no modifications. Increases in a strict monotonic fashion.
+// * `read_time` - The time at which a particular state was observed. Used
+// to denote a consistent snapshot of the database or the time at which a
+// Document was observed to not exist.
+// * `commit_time` - The time at which the writes in a transaction were
+// committed. Any read with an equal or greater `read_time` is guaranteed
+// to see the effects of the transaction.
+service Firestore {
+ option (google.api.default_host) = "firestore.googleapis.com";
+ option (google.api.oauth_scopes) =
+ "https://www.googleapis.com/auth/cloud-platform,"
+ "https://www.googleapis.com/auth/datastore";
+
+ // Gets a single document.
+ rpc GetDocument(GetDocumentRequest) returns (Document) {
+ option (google.api.http) = {
+ get: "/v1beta1/{name=projects/*/databases/*/documents/*/**}"
+ };
+ }
+
+ // Lists documents.
+ rpc ListDocuments(ListDocumentsRequest) returns (ListDocumentsResponse) {
+ option (google.api.http) = {
+ get: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}"
+ };
+ }
+
+ // Creates a new document.
+ rpc CreateDocument(CreateDocumentRequest) returns (Document) {
+ option (google.api.http) = {
+ post: "/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}"
+ body: "document"
+ };
+ }
+
+ // Updates or inserts a document.
+ rpc UpdateDocument(UpdateDocumentRequest) returns (Document) {
+ option (google.api.http) = {
+ patch: "/v1beta1/{document.name=projects/*/databases/*/documents/*/**}"
+ body: "document"
+ };
+ option (google.api.method_signature) = "document,update_mask";
+ }
+
+ // Deletes a document.
+ rpc DeleteDocument(DeleteDocumentRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v1beta1/{name=projects/*/databases/*/documents/*/**}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Gets multiple documents.
+ //
+ // Documents returned by this method are not guaranteed to be returned in the
+ // same order that they were requested.
+ rpc BatchGetDocuments(BatchGetDocumentsRequest) returns (stream BatchGetDocumentsResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{database=projects/*/databases/*}/documents:batchGet"
+ body: "*"
+ };
+ }
+
+ // Starts a new transaction.
+ rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction"
+ body: "*"
+ };
+ option (google.api.method_signature) = "database";
+ }
+
+ // Commits a transaction, while optionally updating documents.
+ rpc Commit(CommitRequest) returns (CommitResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{database=projects/*/databases/*}/documents:commit"
+ body: "*"
+ };
+ option (google.api.method_signature) = "database,writes";
+ }
+
+ // Rolls back a transaction.
+ rpc Rollback(RollbackRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ post: "/v1beta1/{database=projects/*/databases/*}/documents:rollback"
+ body: "*"
+ };
+ option (google.api.method_signature) = "database,transaction";
+ }
+
+ // Runs a query.
+ rpc RunQuery(RunQueryRequest) returns (stream RunQueryResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{parent=projects/*/databases/*/documents}:runQuery"
+ body: "*"
+ additional_bindings {
+ post: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery"
+ body: "*"
+ }
+ };
+ }
+
+ // Streams batches of document updates and deletes, in order.
+ rpc Write(stream WriteRequest) returns (stream WriteResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{database=projects/*/databases/*}/documents:write"
+ body: "*"
+ };
+ }
+
+ // Listens to changes.
+ rpc Listen(stream ListenRequest) returns (stream ListenResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{database=projects/*/databases/*}/documents:listen"
+ body: "*"
+ };
+ }
+
+ // Lists all the collection IDs underneath a document.
+ rpc ListCollectionIds(ListCollectionIdsRequest) returns (ListCollectionIdsResponse) {
+ option (google.api.http) = {
+ post: "/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds"
+ body: "*"
+ additional_bindings {
+ post: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent";
+ }
+}
+
+// The request for [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument].
+message GetDocumentRequest {
+ // Required. The resource name of the Document to get. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The fields to return. If not set, returns all fields.
+ //
+ // If the document has a field that is not present in this mask, that field
+ // will not be returned in the response.
+ DocumentMask mask = 2;
+
+ // The consistency mode for this transaction.
+ // If not set, defaults to strong consistency.
+ oneof consistency_selector {
+ // Reads the document in a transaction.
+ bytes transaction = 3;
+
+ // Reads the version of the document at the given time.
+ // This may not be older than 60 seconds.
+ google.protobuf.Timestamp read_time = 5;
+ }
+}
+
+// The request for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
+message ListDocumentsRequest {
+ // Required. The parent resource name. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents` or
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // For example:
+ // `projects/my-project/databases/my-database/documents` or
+ // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
+ string parent = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The collection ID, relative to `parent`, to list. For example: `chatrooms`
+ // or `messages`.
+ string collection_id = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // The maximum number of documents to return.
+ int32 page_size = 3;
+
+ // The `next_page_token` value returned from a previous List request, if any.
+ string page_token = 4;
+
+ // The order to sort results by. For example: `priority desc, name`.
+ string order_by = 6;
+
+ // The fields to return. If not set, returns all fields.
+ //
+ // If a document has a field that is not present in this mask, that field
+ // will not be returned in the response.
+ DocumentMask mask = 7;
+
+ // The consistency mode for this transaction.
+ // If not set, defaults to strong consistency.
+ oneof consistency_selector {
+ // Reads documents in a transaction.
+ bytes transaction = 8;
+
+ // Reads documents as they were at the given time.
+ // This may not be older than 60 seconds.
+ google.protobuf.Timestamp read_time = 10;
+ }
+
+ // If the list should show missing documents. A missing document is a
+ // document that does not exist but has sub-documents. These documents will
+ // be returned with a key but will not have fields, [Document.create_time][google.firestore.v1beta1.Document.create_time],
+ // or [Document.update_time][google.firestore.v1beta1.Document.update_time] set.
+ //
+ // Requests with `show_missing` may not specify `where` or
+ // `order_by`.
+ bool show_missing = 12;
+}
+
+// The response for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
+message ListDocumentsResponse {
+ // The Documents found.
+ repeated Document documents = 1;
+
+ // The next page token.
+ string next_page_token = 2;
+}
+
+// The request for [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument].
+message CreateDocumentRequest {
+ // Required. The parent resource. For example:
+ // `projects/{project_id}/databases/{database_id}/documents` or
+ // `projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`
+ string parent = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The collection ID, relative to `parent`, to list. For example: `chatrooms`.
+ string collection_id = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // The client-assigned document ID to use for this document.
+ //
+ // Optional. If not specified, an ID will be assigned by the service.
+ string document_id = 3;
+
+ // Required. The document to create. `name` must not be set.
+ Document document = 4 [(google.api.field_behavior) = REQUIRED];
+
+ // The fields to return. If not set, returns all fields.
+ //
+ // If the document has a field that is not present in this mask, that field
+ // will not be returned in the response.
+ DocumentMask mask = 5;
+}
+
+// The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument].
+message UpdateDocumentRequest {
+ // Required. The updated document.
+ // Creates the document if it does not already exist.
+ Document document = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The fields to update.
+ // None of the field paths in the mask may contain a reserved name.
+ //
+ // If the document exists on the server and has fields not referenced in the
+ // mask, they are left unchanged.
+ // Fields referenced in the mask, but not present in the input document, are
+ // deleted from the document on the server.
+ DocumentMask update_mask = 2;
+
+ // The fields to return. If not set, returns all fields.
+ //
+ // If the document has a field that is not present in this mask, that field
+ // will not be returned in the response.
+ DocumentMask mask = 3;
+
+ // An optional precondition on the document.
+ // The request will fail if this is set and not met by the target document.
+ Precondition current_document = 4;
+}
+
+// The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument].
+message DeleteDocumentRequest {
+ // Required. The resource name of the Document to delete. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // An optional precondition on the document.
+ // The request will fail if this is set and not met by the target document.
+ Precondition current_document = 2;
+}
+
+// The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
+message BatchGetDocumentsRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The names of the documents to retrieve. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // The request will fail if any of the document is not a child resource of the
+ // given `database`. Duplicate names will be elided.
+ repeated string documents = 2;
+
+ // The fields to return. If not set, returns all fields.
+ //
+ // If a document has a field that is not present in this mask, that field will
+ // not be returned in the response.
+ DocumentMask mask = 3;
+
+ // The consistency mode for this transaction.
+ // If not set, defaults to strong consistency.
+ oneof consistency_selector {
+ // Reads documents in a transaction.
+ bytes transaction = 4;
+
+ // Starts a new transaction and reads the documents.
+ // Defaults to a read-only transaction.
+ // The new transaction ID will be returned as the first response in the
+ // stream.
+ TransactionOptions new_transaction = 5;
+
+ // Reads documents as they were at the given time.
+ // This may not be older than 60 seconds.
+ google.protobuf.Timestamp read_time = 7;
+ }
+}
+
+// The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
+message BatchGetDocumentsResponse {
+ // A single result.
+ // This can be empty if the server is just returning a transaction.
+ oneof result {
+ // A document that was requested.
+ Document found = 1;
+
+ // A document name that was requested but does not exist. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string missing = 2;
+ }
+
+ // The transaction that was started as part of this request.
+ // Will only be set in the first response, and only if
+ // [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] was set in the request.
+ bytes transaction = 3;
+
+ // The time at which the document was read.
+ // This may be monotically increasing, in this case the previous documents in
+ // the result stream are guaranteed not to have changed between their
+ // read_time and this one.
+ google.protobuf.Timestamp read_time = 4;
+}
+
+// The request for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
+message BeginTransactionRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The options for the transaction.
+ // Defaults to a read-write transaction.
+ TransactionOptions options = 2;
+}
+
+// The response for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
+message BeginTransactionResponse {
+ // The transaction that was started.
+ bytes transaction = 1;
+}
+
+// The request for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
+message CommitRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The writes to apply.
+ //
+ // Always executed atomically and in order.
+ repeated Write writes = 2;
+
+ // If set, applies all writes in this transaction, and commits it.
+ bytes transaction = 3;
+}
+
+// The response for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
+message CommitResponse {
+ // The result of applying the writes.
+ //
+ // This i-th write result corresponds to the i-th write in the
+ // request.
+ repeated WriteResult write_results = 1;
+
+ // The time at which the commit occurred.
+ google.protobuf.Timestamp commit_time = 2;
+}
+
+// The request for [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback].
+message RollbackRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The transaction to roll back.
+ bytes transaction = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// The request for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
+message RunQueryRequest {
+ // Required. The parent resource name. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents` or
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // For example:
+ // `projects/my-project/databases/my-database/documents` or
+ // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
+ string parent = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The query to run.
+ oneof query_type {
+ // A structured query.
+ StructuredQuery structured_query = 2;
+ }
+
+ // The consistency mode for this transaction.
+ // If not set, defaults to strong consistency.
+ oneof consistency_selector {
+ // Reads documents in a transaction.
+ bytes transaction = 5;
+
+ // Starts a new transaction and reads the documents.
+ // Defaults to a read-only transaction.
+ // The new transaction ID will be returned as the first response in the
+ // stream.
+ TransactionOptions new_transaction = 6;
+
+ // Reads documents as they were at the given time.
+ // This may not be older than 60 seconds.
+ google.protobuf.Timestamp read_time = 7;
+ }
+}
+
+// The response for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
+message RunQueryResponse {
+ // The transaction that was started as part of this request.
+ // Can only be set in the first response, and only if
+ // [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] was set in the request.
+ // If set, no other fields will be set in this response.
+ bytes transaction = 2;
+
+ // A query result.
+ // Not set when reporting partial progress.
+ Document document = 1;
+
+ // The time at which the document was read. This may be monotonically
+ // increasing; in this case, the previous documents in the result stream are
+ // guaranteed not to have changed between their `read_time` and this one.
+ //
+ // If the query returns no results, a response with `read_time` and no
+ // `document` will be sent, and this represents the time at which the query
+ // was run.
+ google.protobuf.Timestamp read_time = 3;
+
+ // The number of results that have been skipped due to an offset between
+ // the last response and the current response.
+ int32 skipped_results = 4;
+}
+
+// The request for [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
+//
+// The first request creates a stream, or resumes an existing one from a token.
+//
+// When creating a new stream, the server replies with a response containing
+// only an ID and a token, to use in the next request.
+//
+// When resuming a stream, the server first streams any responses later than the
+// given token, then a response containing only an up-to-date token, to use in
+// the next request.
+message WriteRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ // This is only required in the first message.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The ID of the write stream to resume.
+ // This may only be set in the first message. When left empty, a new write
+ // stream will be created.
+ string stream_id = 2;
+
+ // The writes to apply.
+ //
+ // Always executed atomically and in order.
+ // This must be empty on the first request.
+ // This may be empty on the last request.
+ // This must not be empty on all other requests.
+ repeated Write writes = 3;
+
+ // A stream token that was previously sent by the server.
+ //
+ // The client should set this field to the token from the most recent
+ // [WriteResponse][google.firestore.v1beta1.WriteResponse] it has received. This acknowledges that the client has
+ // received responses up to this token. After sending this token, earlier
+ // tokens may not be used anymore.
+ //
+ // The server may close the stream if there are too many unacknowledged
+ // responses.
+ //
+ // Leave this field unset when creating a new stream. To resume a stream at
+ // a specific point, set this field and the `stream_id` field.
+ //
+ // Leave this field unset when creating a new stream.
+ bytes stream_token = 4;
+
+ // Labels associated with this write request.
+ map labels = 5;
+}
+
+// The response for [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
+message WriteResponse {
+ // The ID of the stream.
+ // Only set on the first message, when a new stream was created.
+ string stream_id = 1;
+
+ // A token that represents the position of this response in the stream.
+ // This can be used by a client to resume the stream at this point.
+ //
+ // This field is always set.
+ bytes stream_token = 2;
+
+ // The result of applying the writes.
+ //
+ // This i-th write result corresponds to the i-th write in the
+ // request.
+ repeated WriteResult write_results = 3;
+
+ // The time at which the commit occurred.
+ google.protobuf.Timestamp commit_time = 4;
+}
+
+// A request for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]
+message ListenRequest {
+ // Required. The database name. In the format:
+ // `projects/{project_id}/databases/{database_id}`.
+ string database = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The supported target changes.
+ oneof target_change {
+ // A target to add to this stream.
+ Target add_target = 2;
+
+ // The ID of a target to remove from this stream.
+ int32 remove_target = 3;
+ }
+
+ // Labels associated with this target change.
+ map labels = 4;
+}
+
+// The response for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen].
+message ListenResponse {
+ // The supported responses.
+ oneof response_type {
+ // Targets have changed.
+ TargetChange target_change = 2;
+
+ // A [Document][google.firestore.v1beta1.Document] has changed.
+ DocumentChange document_change = 3;
+
+ // A [Document][google.firestore.v1beta1.Document] has been deleted.
+ DocumentDelete document_delete = 4;
+
+ // A [Document][google.firestore.v1beta1.Document] has been removed from a target (because it is no longer
+ // relevant to that target).
+ DocumentRemove document_remove = 6;
+
+ // A filter to apply to the set of documents previously returned for the
+ // given target.
+ //
+ // Returned when documents may have been removed from the given target, but
+ // the exact documents are unknown.
+ ExistenceFilter filter = 5;
+ }
+}
+
+// A specification of a set of documents to listen to.
+message Target {
+ // A target specified by a set of documents names.
+ message DocumentsTarget {
+ // The names of the documents to retrieve. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // The request will fail if any of the document is not a child resource of
+ // the given `database`. Duplicate names will be elided.
+ repeated string documents = 2;
+ }
+
+ // A target specified by a query.
+ message QueryTarget {
+ // The parent resource name. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents` or
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // For example:
+ // `projects/my-project/databases/my-database/documents` or
+ // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
+ string parent = 1;
+
+ // The query to run.
+ oneof query_type {
+ // A structured query.
+ StructuredQuery structured_query = 2;
+ }
+ }
+
+ // The type of target to listen to.
+ oneof target_type {
+ // A target specified by a query.
+ QueryTarget query = 2;
+
+ // A target specified by a set of document names.
+ DocumentsTarget documents = 3;
+ }
+
+ // When to start listening.
+ //
+ // If not specified, all matching Documents are returned before any
+ // subsequent changes.
+ oneof resume_type {
+ // A resume token from a prior [TargetChange][google.firestore.v1beta1.TargetChange] for an identical target.
+ //
+ // Using a resume token with a different target is unsupported and may fail.
+ bytes resume_token = 4;
+
+ // Start listening after a specific `read_time`.
+ //
+ // The client must know the state of matching documents at this time.
+ google.protobuf.Timestamp read_time = 11;
+ }
+
+ // The target ID that identifies the target on the stream. Must be a positive
+ // number and non-zero.
+ int32 target_id = 5;
+
+ // If the target should be removed once it is current and consistent.
+ bool once = 6;
+}
+
+// Targets being watched have changed.
+message TargetChange {
+ // The type of change.
+ enum TargetChangeType {
+ // No change has occurred. Used only to send an updated `resume_token`.
+ NO_CHANGE = 0;
+
+ // The targets have been added.
+ ADD = 1;
+
+ // The targets have been removed.
+ REMOVE = 2;
+
+ // The targets reflect all changes committed before the targets were added
+ // to the stream.
+ //
+ // This will be sent after or with a `read_time` that is greater than or
+ // equal to the time at which the targets were added.
+ //
+ // Listeners can wait for this change if read-after-write semantics
+ // are desired.
+ CURRENT = 3;
+
+ // The targets have been reset, and a new initial state for the targets
+ // will be returned in subsequent changes.
+ //
+ // After the initial state is complete, `CURRENT` will be returned even
+ // if the target was previously indicated to be `CURRENT`.
+ RESET = 4;
+ }
+
+ // The type of change that occurred.
+ TargetChangeType target_change_type = 1;
+
+ // The target IDs of targets that have changed.
+ //
+ // If empty, the change applies to all targets.
+ //
+ // The order of the target IDs is not defined.
+ repeated int32 target_ids = 2;
+
+ // The error that resulted in this change, if applicable.
+ google.rpc.Status cause = 3;
+
+ // A token that can be used to resume the stream for the given `target_ids`,
+ // or all targets if `target_ids` is empty.
+ //
+ // Not set on every target change.
+ bytes resume_token = 4;
+
+ // The consistent `read_time` for the given `target_ids` (omitted when the
+ // target_ids are not at a consistent snapshot).
+ //
+ // The stream is guaranteed to send a `read_time` with `target_ids` empty
+ // whenever the entire stream reaches a new consistent snapshot. ADD,
+ // CURRENT, and RESET messages are guaranteed to (eventually) result in a
+ // new consistent snapshot (while NO_CHANGE and REMOVE messages are not).
+ //
+ // For a given stream, `read_time` is guaranteed to be monotonically
+ // increasing.
+ google.protobuf.Timestamp read_time = 6;
+}
+
+// The request for [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
+message ListCollectionIdsRequest {
+ // Required. The parent document. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ // For example:
+ // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
+ string parent = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The maximum number of results to return.
+ int32 page_size = 2;
+
+ // A page token. Must be a value from
+ // [ListCollectionIdsResponse][google.firestore.v1beta1.ListCollectionIdsResponse].
+ string page_token = 3;
+}
+
+// The response from [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
+message ListCollectionIdsResponse {
+ // The collection ids.
+ repeated string collection_ids = 1;
+
+ // A page token that may be used to continue the list.
+ string next_page_token = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_admin.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_admin.proto
new file mode 100644
index 000000000..15ce94da6
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_admin.proto
@@ -0,0 +1,365 @@
+// Copyright 2018 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1beta1;
+
+import "google/api/annotations.proto";
+import "google/firestore/admin/v1beta1/index.proto";
+import "google/longrunning/operations.proto";
+import "google/protobuf/empty.proto";
+import "google/protobuf/timestamp.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "FirestoreAdminProto";
+option java_package = "com.google.firestore.admin.v1beta1";
+option objc_class_prefix = "GCFS";
+
+
+// The Cloud Firestore Admin API.
+//
+// This API provides several administrative services for Cloud Firestore.
+//
+// # Concepts
+//
+// Project, Database, Namespace, Collection, and Document are used as defined in
+// the Google Cloud Firestore API.
+//
+// Operation: An Operation represents work being performed in the background.
+//
+//
+// # Services
+//
+// ## Index
+//
+// The index service manages Cloud Firestore indexes.
+//
+// Index creation is performed asynchronously.
+// An Operation resource is created for each such asynchronous operation.
+// The state of the operation (including any errors encountered)
+// may be queried via the Operation resource.
+//
+// ## Metadata
+//
+// Provides metadata and statistical information about data in Cloud Firestore.
+// The data provided as part of this API may be stale.
+//
+// ## Operation
+//
+// The Operations collection provides a record of actions performed for the
+// specified Project (including any Operations in progress). Operations are not
+// created directly but through calls on other collections or resources.
+//
+// An Operation that is not yet done may be cancelled. The request to cancel is
+// asynchronous and the Operation may continue to run for some time after the
+// request to cancel is made.
+//
+// An Operation that is done may be deleted so that it is no longer listed as
+// part of the Operation collection.
+//
+// Operations are created by service `FirestoreAdmin`, but are accessed via
+// service `google.longrunning.Operations`.
+service FirestoreAdmin {
+ // Creates the specified index.
+ // A newly created index's initial state is `CREATING`. On completion of the
+ // returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
+ // If the index already exists, the call will return an `ALREADY_EXISTS`
+ // status.
+ //
+ // During creation, the process could result in an error, in which case the
+ // index will move to the `ERROR` state. The process can be recovered by
+ // fixing the data that caused the error, removing the index with
+ // [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
+ // [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+ //
+ // Indexes with a single field cannot be created.
+ rpc CreateIndex(CreateIndexRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{parent=projects/*/databases/*}/indexes"
+ body: "index"
+ };
+ }
+
+ // Lists the indexes that match the specified filters.
+ rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) {
+ option (google.api.http) = {
+ get: "/v1beta1/{parent=projects/*/databases/*}/indexes"
+ };
+ }
+
+ // Gets an index.
+ rpc GetIndex(GetIndexRequest) returns (Index) {
+ option (google.api.http) = {
+ get: "/v1beta1/{name=projects/*/databases/*/indexes/*}"
+ };
+ }
+
+ // Deletes an index.
+ rpc DeleteIndex(DeleteIndexRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v1beta1/{name=projects/*/databases/*/indexes/*}"
+ };
+ }
+
+ // Exports a copy of all or a subset of documents from Google Cloud Firestore
+ // to another storage system, such as Google Cloud Storage. Recent updates to
+ // documents may not be reflected in the export. The export occurs in the
+ // background and its progress can be monitored and managed via the
+ // Operation resource that is created. The output of an export may only be
+ // used once the associated operation is done. If an export operation is
+ // cancelled before completion it may leave partial data behind in Google
+ // Cloud Storage.
+ rpc ExportDocuments(ExportDocumentsRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{name=projects/*/databases/*}:exportDocuments"
+ body: "*"
+ };
+ }
+
+ // Imports documents into Google Cloud Firestore. Existing documents with the
+ // same name are overwritten. The import occurs in the background and its
+ // progress can be monitored and managed via the Operation resource that is
+ // created. If an ImportDocuments operation is cancelled, it is possible
+ // that a subset of the data has already been imported to Cloud Firestore.
+ rpc ImportDocuments(ImportDocumentsRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{name=projects/*/databases/*}:importDocuments"
+ body: "*"
+ };
+ }
+}
+
+// Metadata for index operations. This metadata populates
+// the metadata field of [google.longrunning.Operation][google.longrunning.Operation].
+message IndexOperationMetadata {
+ // The type of index operation.
+ enum OperationType {
+ // Unspecified. Never set by server.
+ OPERATION_TYPE_UNSPECIFIED = 0;
+
+ // The operation is creating the index. Initiated by a `CreateIndex` call.
+ CREATING_INDEX = 1;
+ }
+
+ // The time that work began on the operation.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time the operation ended, either successfully or otherwise. Unset if
+ // the operation is still active.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The index resource that this operation is acting on. For example:
+ // `projects/{project_id}/databases/{database_id}/indexes/{index_id}`
+ string index = 3;
+
+ // The type of index operation.
+ OperationType operation_type = 4;
+
+ // True if the [google.longrunning.Operation] was cancelled. If the
+ // cancellation is in progress, cancelled will be true but
+ // [google.longrunning.Operation.done][google.longrunning.Operation.done] will be false.
+ bool cancelled = 5;
+
+ // Progress of the existing operation, measured in number of documents.
+ Progress document_progress = 6;
+}
+
+// Measures the progress of a particular metric.
+message Progress {
+ // An estimate of how much work has been completed. Note that this may be
+ // greater than `work_estimated`.
+ int64 work_completed = 1;
+
+ // An estimate of how much work needs to be performed. Zero if the
+ // work estimate is unavailable. May change as work progresses.
+ int64 work_estimated = 2;
+}
+
+// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
+message CreateIndexRequest {
+ // The name of the database this index will apply to. For example:
+ // `projects/{project_id}/databases/{database_id}`
+ string parent = 1;
+
+ // The index to create. The name and state fields are output only and will be
+ // ignored. Certain single field indexes cannot be created or deleted.
+ Index index = 2;
+}
+
+// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex].
+message GetIndexRequest {
+ // The name of the index. For example:
+ // `projects/{project_id}/databases/{database_id}/indexes/{index_id}`
+ string name = 1;
+}
+
+// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
+message ListIndexesRequest {
+ // The database name. For example:
+ // `projects/{project_id}/databases/{database_id}`
+ string parent = 1;
+
+ string filter = 2;
+
+ // The standard List page size.
+ int32 page_size = 3;
+
+ // The standard List page token.
+ string page_token = 4;
+}
+
+// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex].
+message DeleteIndexRequest {
+ // The index name. For example:
+ // `projects/{project_id}/databases/{database_id}/indexes/{index_id}`
+ string name = 1;
+}
+
+// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
+message ListIndexesResponse {
+ // The indexes.
+ repeated Index indexes = 1;
+
+ // The standard List next-page token.
+ string next_page_token = 2;
+}
+
+// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta1.FirestoreAdmin.ExportDocuments].
+message ExportDocumentsRequest {
+ // Database to export. Should be of the form:
+ // `projects/{project_id}/databases/{database_id}`.
+ string name = 1;
+
+ // Which collection ids to export. Unspecified means all collections.
+ repeated string collection_ids = 3;
+
+ // The output URI. Currently only supports Google Cloud Storage URIs of the
+ // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name
+ // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional
+ // Google Cloud Storage namespace path. When
+ // choosing a name, be sure to consider Google Cloud Storage naming
+ // guidelines: https://cloud.google.com/storage/docs/naming.
+ // If the URI is a bucket (without a namespace path), a prefix will be
+ // generated based on the start time.
+ string output_uri_prefix = 4;
+}
+
+// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta1.FirestoreAdmin.ImportDocuments].
+message ImportDocumentsRequest {
+ // Database to import into. Should be of the form:
+ // `projects/{project_id}/databases/{database_id}`.
+ string name = 1;
+
+ // Which collection ids to import. Unspecified means all collections included
+ // in the import.
+ repeated string collection_ids = 3;
+
+ // Location of the exported files.
+ // This must match the output_uri_prefix of an ExportDocumentsResponse from
+ // an export that has completed successfully.
+ // See:
+ // [google.firestore.admin.v1beta1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1beta1.ExportDocumentsResponse.output_uri_prefix].
+ string input_uri_prefix = 4;
+}
+
+// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
+message ExportDocumentsResponse {
+ // Location of the output files. This can be used to begin an import
+ // into Cloud Firestore (this project or another project) after the operation
+ // completes successfully.
+ string output_uri_prefix = 1;
+}
+
+// Metadata for ExportDocuments operations.
+message ExportDocumentsMetadata {
+ // The time that work began on the operation.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time the operation ended, either successfully or otherwise. Unset if
+ // the operation is still active.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The state of the export operation.
+ OperationState operation_state = 3;
+
+ // An estimate of the number of documents processed.
+ Progress progress_documents = 4;
+
+ // An estimate of the number of bytes processed.
+ Progress progress_bytes = 5;
+
+ // Which collection ids are being exported.
+ repeated string collection_ids = 6;
+
+ // Where the entities are being exported to.
+ string output_uri_prefix = 7;
+}
+
+// Metadata for ImportDocuments operations.
+message ImportDocumentsMetadata {
+ // The time that work began on the operation.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time the operation ended, either successfully or otherwise. Unset if
+ // the operation is still active.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The state of the import operation.
+ OperationState operation_state = 3;
+
+ // An estimate of the number of documents processed.
+ Progress progress_documents = 4;
+
+ // An estimate of the number of bytes processed.
+ Progress progress_bytes = 5;
+
+ // Which collection ids are being imported.
+ repeated string collection_ids = 6;
+
+ // The location of the documents being imported.
+ string input_uri_prefix = 7;
+}
+
+// The various possible states for an ongoing Operation.
+enum OperationState {
+ // Unspecified.
+ STATE_UNSPECIFIED = 0;
+
+ // Request is being prepared for processing.
+ INITIALIZING = 1;
+
+ // Request is actively being processed.
+ PROCESSING = 2;
+
+ // Request is in the process of being cancelled after user called
+ // google.longrunning.Operations.CancelOperation on the operation.
+ CANCELLING = 3;
+
+ // Request has been processed and is in its finalization stage.
+ FINALIZING = 4;
+
+ // Request has completed successfully.
+ SUCCESSFUL = 5;
+
+ // Request has finished being processed, but encountered an error.
+ FAILED = 6;
+
+ // Request has finished being cancelled after user called
+ // google.longrunning.Operations.CancelOperation.
+ CANCELLED = 7;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_pb2.py
new file mode 100644
index 000000000..3356808a2
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_pb2.py
@@ -0,0 +1,3914 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/firestore.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.api import client_pb2 as google_dot_api_dot_client__pb2
+from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
+from google.cloud.firestore_v1beta1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ write_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/firestore.proto",
+ package="google.firestore.v1beta1",
+ syntax="proto3",
+ serialized_options=b"\n\034com.google.firestore.v1beta1B\016FirestoreProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1\352\002!Google::Cloud::Firestore::V1beta1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n4google/cloud/firestore_v1beta1/proto/firestore.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x30google/cloud/firestore_v1beta1/proto/write.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xbd\x01\n\x12GetDocumentRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x04mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x03 \x01(\x0cH\x00\x12/\n\tread_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xac\x02\n\x14ListDocumentsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1a\n\rcollection_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x34\n\x04mask\x18\x07 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x08 \x01(\x0cH\x00\x12/\n\tread_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x14\n\x0cshow_missing\x18\x0c \x01(\x08\x42\x16\n\x14\x63onsistency_selector"g\n\x15ListDocumentsResponse\x12\x35\n\tdocuments\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xce\x01\n\x15\x43reateDocumentRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1a\n\rcollection_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x0b\x64ocument_id\x18\x03 \x01(\t\x12\x39\n\x08\x64ocument\x18\x04 \x01(\x0b\x32".google.firestore.v1beta1.DocumentB\x03\xe0\x41\x02\x12\x34\n\x04mask\x18\x05 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask"\x87\x02\n\x15UpdateDocumentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentB\x03\xe0\x41\x02\x12;\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"l\n\x15\x44\x65leteDocumentRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x10\x63urrent_document\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"\xa3\x02\n\x18\x42\x61tchGetDocumentsRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tdocuments\x18\x02 \x03(\t\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x04 \x01(\x0cH\x00\x12G\n\x0fnew_transaction\x18\x05 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x00\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xb1\x01\n\x19\x42\x61tchGetDocumentsResponse\x12\x33\n\x05\x66ound\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x11\n\x07missing\x18\x02 \x01(\tH\x00\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\n\x06result"o\n\x17\x42\x65ginTransactionRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12=\n\x07options\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptions"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c"l\n\rCommitRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12/\n\x06writes\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c"\x7f\n\x0e\x43ommitResponse\x12<\n\rwrite_results\x18\x01 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"B\n\x0fRollbackRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0btransaction\x18\x02 \x01(\x0c\x42\x03\xe0\x41\x02"\xa4\x02\n\x0fRunQueryRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x12\x15\n\x0btransaction\x18\x05 \x01(\x0cH\x01\x12G\n\x0fnew_transaction\x18\x06 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x01\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x42\x0c\n\nquery_typeB\x16\n\x14\x63onsistency_selector"\xa5\x01\n\x10RunQueryResponse\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0fskipped_results\x18\x04 \x01(\x05"\xf2\x01\n\x0cWriteRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12/\n\x06writes\x18\x03 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x14\n\x0cstream_token\x18\x04 \x01(\x0c\x12\x42\n\x06labels\x18\x05 \x03(\x0b\x32\x32.google.firestore.v1beta1.WriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa7\x01\n\rWriteResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x14\n\x0cstream_token\x18\x02 \x01(\x0c\x12<\n\rwrite_results\x18\x03 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xfc\x01\n\rListenRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\nadd_target\x18\x02 \x01(\x0b\x32 .google.firestore.v1beta1.TargetH\x00\x12\x17\n\rremove_target\x18\x03 \x01(\x05H\x00\x12\x43\n\x06labels\x18\x04 \x03(\x0b\x32\x33.google.firestore.v1beta1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xee\x02\n\x0eListenResponse\x12?\n\rtarget_change\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.TargetChangeH\x00\x12\x43\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentChangeH\x00\x12\x43\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentDeleteH\x00\x12\x43\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentRemoveH\x00\x12;\n\x06\x66ilter\x18\x05 \x01(\x0b\x32).google.firestore.v1beta1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xb0\x03\n\x06Target\x12=\n\x05query\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.Target.QueryTargetH\x00\x12\x45\n\tdocuments\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1ar\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaf\x02\n\x0cTargetChange\x12S\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x37.google.firestore.v1beta1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"V\n\x18ListCollectionIdsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x9b\x15\n\tFirestore\x12\x9e\x01\n\x0bGetDocument\x12,.google.firestore.v1beta1.GetDocumentRequest\x1a".google.firestore.v1beta1.Document"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc1\x01\n\rListDocuments\x12..google.firestore.v1beta1.ListDocumentsRequest\x1a/.google.firestore.v1beta1.ListDocumentsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbe\x01\n\x0e\x43reateDocument\x12/.google.firestore.v1beta1.CreateDocumentRequest\x1a".google.firestore.v1beta1.Document"W\x82\xd3\xe4\x93\x02Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xce\x01\n\x0eUpdateDocument\x12/.google.firestore.v1beta1.UpdateDocumentRequest\x1a".google.firestore.v1beta1.Document"g\x82\xd3\xe4\x93\x02J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\xda\x41\x14\x64ocument,update_mask\x12\x9f\x01\n\x0e\x44\x65leteDocument\x12/.google.firestore.v1beta1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\xda\x41\x04name\x12\xc8\x01\n\x11\x42\x61tchGetDocuments\x12\x32.google.firestore.v1beta1.BatchGetDocumentsRequest\x1a\x33.google.firestore.v1beta1.BatchGetDocumentsResponse"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xd6\x01\n\x10\x42\x65ginTransaction\x12\x31.google.firestore.v1beta1.BeginTransactionRequest\x1a\x32.google.firestore.v1beta1.BeginTransactionResponse"[\x82\xd3\xe4\x93\x02J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\xda\x41\x08\x64\x61tabase\x12\xb5\x01\n\x06\x43ommit\x12\'.google.firestore.v1beta1.CommitRequest\x1a(.google.firestore.v1beta1.CommitResponse"X\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\x01*\xda\x41\x0f\x64\x61tabase,writes\x12\xae\x01\n\x08Rollback\x12).google.firestore.v1beta1.RollbackRequest\x1a\x16.google.protobuf.Empty"_\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\x01*\xda\x41\x14\x64\x61tabase,transaction\x12\xf4\x01\n\x08RunQuery\x12).google.firestore.v1beta1.RunQueryRequest\x1a*.google.firestore.v1beta1.RunQueryResponse"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\x01*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xa3\x01\n\x05Write\x12&.google.firestore.v1beta1.WriteRequest\x1a\'.google.firestore.v1beta1.WriteResponse"E\x82\xd3\xe4\x93\x02?":/v1beta1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\xa7\x01\n\x06Listen\x12\'.google.firestore.v1beta1.ListenRequest\x1a(.google.firestore.v1beta1.ListenResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\xa8\x02\n\x11ListCollectionIds\x12\x32.google.firestore.v1beta1.ListCollectionIdsRequest\x1a\x33.google.firestore.v1beta1.ListCollectionIdsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\x99\x01"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*\xda\x41\x06parent\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xe0\x01\n\x1c\x63om.google.firestore.v1beta1B\x0e\x46irestoreProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1\xea\x02!Google::Cloud::Firestore::V1beta1b\x06proto3',
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_api_dot_client__pb2.DESCRIPTOR,
+ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ google_dot_rpc_dot_status__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_TARGETCHANGE_TARGETCHANGETYPE = _descriptor.EnumDescriptor(
+ name="TargetChangeType",
+ full_name="google.firestore.v1beta1.TargetChange.TargetChangeType",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="NO_CHANGE",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ADD",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="REMOVE",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CURRENT",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="RESET",
+ index=4,
+ number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=4752,
+ serialized_end=4830,
+)
+_sym_db.RegisterEnumDescriptor(_TARGETCHANGE_TARGETCHANGETYPE)
+
+
+_GETDOCUMENTREQUEST = _descriptor.Descriptor(
+ name="GetDocumentRequest",
+ full_name="google.firestore.v1beta1.GetDocumentRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.v1beta1.GetDocumentRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mask",
+ full_name="google.firestore.v1beta1.GetDocumentRequest.mask",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.GetDocumentRequest.transaction",
+ index=2,
+ number=3,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.GetDocumentRequest.read_time",
+ index=3,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="consistency_selector",
+ full_name="google.firestore.v1beta1.GetDocumentRequest.consistency_selector",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=462,
+ serialized_end=651,
+)
+
+
+_LISTDOCUMENTSREQUEST = _descriptor.Descriptor(
+ name="ListDocumentsRequest",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_id",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.collection_id",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_size",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.page_size",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_token",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.page_token",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="order_by",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.order_by",
+ index=4,
+ number=6,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mask",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.mask",
+ index=5,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.transaction",
+ index=6,
+ number=8,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.read_time",
+ index=7,
+ number=10,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="show_missing",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.show_missing",
+ index=8,
+ number=12,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="consistency_selector",
+ full_name="google.firestore.v1beta1.ListDocumentsRequest.consistency_selector",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=654,
+ serialized_end=954,
+)
+
+
+_LISTDOCUMENTSRESPONSE = _descriptor.Descriptor(
+ name="ListDocumentsResponse",
+ full_name="google.firestore.v1beta1.ListDocumentsResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="documents",
+ full_name="google.firestore.v1beta1.ListDocumentsResponse.documents",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="next_page_token",
+ full_name="google.firestore.v1beta1.ListDocumentsResponse.next_page_token",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=956,
+ serialized_end=1059,
+)
+
+
+_CREATEDOCUMENTREQUEST = _descriptor.Descriptor(
+ name="CreateDocumentRequest",
+ full_name="google.firestore.v1beta1.CreateDocumentRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.v1beta1.CreateDocumentRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="collection_id",
+ full_name="google.firestore.v1beta1.CreateDocumentRequest.collection_id",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document_id",
+ full_name="google.firestore.v1beta1.CreateDocumentRequest.document_id",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.CreateDocumentRequest.document",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mask",
+ full_name="google.firestore.v1beta1.CreateDocumentRequest.mask",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1062,
+ serialized_end=1268,
+)
+
+
+_UPDATEDOCUMENTREQUEST = _descriptor.Descriptor(
+ name="UpdateDocumentRequest",
+ full_name="google.firestore.v1beta1.UpdateDocumentRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.UpdateDocumentRequest.document",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_mask",
+ full_name="google.firestore.v1beta1.UpdateDocumentRequest.update_mask",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mask",
+ full_name="google.firestore.v1beta1.UpdateDocumentRequest.mask",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="current_document",
+ full_name="google.firestore.v1beta1.UpdateDocumentRequest.current_document",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1271,
+ serialized_end=1534,
+)
+
+
+_DELETEDOCUMENTREQUEST = _descriptor.Descriptor(
+ name="DeleteDocumentRequest",
+ full_name="google.firestore.v1beta1.DeleteDocumentRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.firestore.v1beta1.DeleteDocumentRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="current_document",
+ full_name="google.firestore.v1beta1.DeleteDocumentRequest.current_document",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1536,
+ serialized_end=1644,
+)
+
+
+_BATCHGETDOCUMENTSREQUEST = _descriptor.Descriptor(
+ name="BatchGetDocumentsRequest",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="database",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.database",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="documents",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.documents",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mask",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.mask",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.transaction",
+ index=3,
+ number=4,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="new_transaction",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.read_time",
+ index=5,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="consistency_selector",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.consistency_selector",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=1647,
+ serialized_end=1938,
+)
+
+
+_BATCHGETDOCUMENTSRESPONSE = _descriptor.Descriptor(
+ name="BatchGetDocumentsResponse",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="found",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.found",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="missing",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.missing",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.transaction",
+ index=2,
+ number=3,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.read_time",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="result",
+ full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.result",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=1941,
+ serialized_end=2118,
+)
+
+
+_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor(
+ name="BeginTransactionRequest",
+ full_name="google.firestore.v1beta1.BeginTransactionRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="database",
+ full_name="google.firestore.v1beta1.BeginTransactionRequest.database",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="options",
+ full_name="google.firestore.v1beta1.BeginTransactionRequest.options",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2120,
+ serialized_end=2231,
+)
+
+
+_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor(
+ name="BeginTransactionResponse",
+ full_name="google.firestore.v1beta1.BeginTransactionResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.BeginTransactionResponse.transaction",
+ index=0,
+ number=1,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2233,
+ serialized_end=2280,
+)
+
+
+_COMMITREQUEST = _descriptor.Descriptor(
+ name="CommitRequest",
+ full_name="google.firestore.v1beta1.CommitRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="database",
+ full_name="google.firestore.v1beta1.CommitRequest.database",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="writes",
+ full_name="google.firestore.v1beta1.CommitRequest.writes",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.CommitRequest.transaction",
+ index=2,
+ number=3,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2282,
+ serialized_end=2390,
+)
+
+
+_COMMITRESPONSE = _descriptor.Descriptor(
+ name="CommitResponse",
+ full_name="google.firestore.v1beta1.CommitResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="write_results",
+ full_name="google.firestore.v1beta1.CommitResponse.write_results",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="commit_time",
+ full_name="google.firestore.v1beta1.CommitResponse.commit_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2392,
+ serialized_end=2519,
+)
+
+
+_ROLLBACKREQUEST = _descriptor.Descriptor(
+ name="RollbackRequest",
+ full_name="google.firestore.v1beta1.RollbackRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="database",
+ full_name="google.firestore.v1beta1.RollbackRequest.database",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.RollbackRequest.transaction",
+ index=1,
+ number=2,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2521,
+ serialized_end=2587,
+)
+
+
+_RUNQUERYREQUEST = _descriptor.Descriptor(
+ name="RunQueryRequest",
+ full_name="google.firestore.v1beta1.RunQueryRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.v1beta1.RunQueryRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="structured_query",
+ full_name="google.firestore.v1beta1.RunQueryRequest.structured_query",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.RunQueryRequest.transaction",
+ index=2,
+ number=5,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="new_transaction",
+ full_name="google.firestore.v1beta1.RunQueryRequest.new_transaction",
+ index=3,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.RunQueryRequest.read_time",
+ index=4,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="query_type",
+ full_name="google.firestore.v1beta1.RunQueryRequest.query_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ _descriptor.OneofDescriptor(
+ name="consistency_selector",
+ full_name="google.firestore.v1beta1.RunQueryRequest.consistency_selector",
+ index=1,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=2590,
+ serialized_end=2882,
+)
+
+
+_RUNQUERYRESPONSE = _descriptor.Descriptor(
+ name="RunQueryResponse",
+ full_name="google.firestore.v1beta1.RunQueryResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="transaction",
+ full_name="google.firestore.v1beta1.RunQueryResponse.transaction",
+ index=0,
+ number=2,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.RunQueryResponse.document",
+ index=1,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.RunQueryResponse.read_time",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="skipped_results",
+ full_name="google.firestore.v1beta1.RunQueryResponse.skipped_results",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2885,
+ serialized_end=3050,
+)
+
+
+_WRITEREQUEST_LABELSENTRY = _descriptor.Descriptor(
+ name="LabelsEntry",
+ full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="key",
+ full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry.key",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="value",
+ full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry.value",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=b"8\001",
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3250,
+ serialized_end=3295,
+)
+
+_WRITEREQUEST = _descriptor.Descriptor(
+ name="WriteRequest",
+ full_name="google.firestore.v1beta1.WriteRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="database",
+ full_name="google.firestore.v1beta1.WriteRequest.database",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="stream_id",
+ full_name="google.firestore.v1beta1.WriteRequest.stream_id",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="writes",
+ full_name="google.firestore.v1beta1.WriteRequest.writes",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="stream_token",
+ full_name="google.firestore.v1beta1.WriteRequest.stream_token",
+ index=3,
+ number=4,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="labels",
+ full_name="google.firestore.v1beta1.WriteRequest.labels",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_WRITEREQUEST_LABELSENTRY,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3053,
+ serialized_end=3295,
+)
+
+
+_WRITERESPONSE = _descriptor.Descriptor(
+ name="WriteResponse",
+ full_name="google.firestore.v1beta1.WriteResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="stream_id",
+ full_name="google.firestore.v1beta1.WriteResponse.stream_id",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="stream_token",
+ full_name="google.firestore.v1beta1.WriteResponse.stream_token",
+ index=1,
+ number=2,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="write_results",
+ full_name="google.firestore.v1beta1.WriteResponse.write_results",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="commit_time",
+ full_name="google.firestore.v1beta1.WriteResponse.commit_time",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3298,
+ serialized_end=3465,
+)
+
+
+_LISTENREQUEST_LABELSENTRY = _descriptor.Descriptor(
+ name="LabelsEntry",
+ full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="key",
+ full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry.key",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="value",
+ full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry.value",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=b"8\001",
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=3250,
+ serialized_end=3295,
+)
+
+_LISTENREQUEST = _descriptor.Descriptor(
+ name="ListenRequest",
+ full_name="google.firestore.v1beta1.ListenRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="database",
+ full_name="google.firestore.v1beta1.ListenRequest.database",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="add_target",
+ full_name="google.firestore.v1beta1.ListenRequest.add_target",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="remove_target",
+ full_name="google.firestore.v1beta1.ListenRequest.remove_target",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="labels",
+ full_name="google.firestore.v1beta1.ListenRequest.labels",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_LISTENREQUEST_LABELSENTRY,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="target_change",
+ full_name="google.firestore.v1beta1.ListenRequest.target_change",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=3468,
+ serialized_end=3720,
+)
+
+
+_LISTENRESPONSE = _descriptor.Descriptor(
+ name="ListenResponse",
+ full_name="google.firestore.v1beta1.ListenResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="target_change",
+ full_name="google.firestore.v1beta1.ListenResponse.target_change",
+ index=0,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document_change",
+ full_name="google.firestore.v1beta1.ListenResponse.document_change",
+ index=1,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document_delete",
+ full_name="google.firestore.v1beta1.ListenResponse.document_delete",
+ index=2,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="document_remove",
+ full_name="google.firestore.v1beta1.ListenResponse.document_remove",
+ index=3,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="filter",
+ full_name="google.firestore.v1beta1.ListenResponse.filter",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="response_type",
+ full_name="google.firestore.v1beta1.ListenResponse.response_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=3723,
+ serialized_end=4089,
+)
+
+
+_TARGET_DOCUMENTSTARGET = _descriptor.Descriptor(
+ name="DocumentsTarget",
+ full_name="google.firestore.v1beta1.Target.DocumentsTarget",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="documents",
+ full_name="google.firestore.v1beta1.Target.DocumentsTarget.documents",
+ index=0,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=4342,
+ serialized_end=4378,
+)
+
+_TARGET_QUERYTARGET = _descriptor.Descriptor(
+ name="QueryTarget",
+ full_name="google.firestore.v1beta1.Target.QueryTarget",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.v1beta1.Target.QueryTarget.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="structured_query",
+ full_name="google.firestore.v1beta1.Target.QueryTarget.structured_query",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="query_type",
+ full_name="google.firestore.v1beta1.Target.QueryTarget.query_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=4380,
+ serialized_end=4494,
+)
+
+_TARGET = _descriptor.Descriptor(
+ name="Target",
+ full_name="google.firestore.v1beta1.Target",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="google.firestore.v1beta1.Target.query",
+ index=0,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="documents",
+ full_name="google.firestore.v1beta1.Target.documents",
+ index=1,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="resume_token",
+ full_name="google.firestore.v1beta1.Target.resume_token",
+ index=2,
+ number=4,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.Target.read_time",
+ index=3,
+ number=11,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="target_id",
+ full_name="google.firestore.v1beta1.Target.target_id",
+ index=4,
+ number=5,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="once",
+ full_name="google.firestore.v1beta1.Target.once",
+ index=5,
+ number=6,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_TARGET_DOCUMENTSTARGET, _TARGET_QUERYTARGET,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="target_type",
+ full_name="google.firestore.v1beta1.Target.target_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ _descriptor.OneofDescriptor(
+ name="resume_type",
+ full_name="google.firestore.v1beta1.Target.resume_type",
+ index=1,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=4092,
+ serialized_end=4524,
+)
+
+
+_TARGETCHANGE = _descriptor.Descriptor(
+ name="TargetChange",
+ full_name="google.firestore.v1beta1.TargetChange",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="target_change_type",
+ full_name="google.firestore.v1beta1.TargetChange.target_change_type",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="target_ids",
+ full_name="google.firestore.v1beta1.TargetChange.target_ids",
+ index=1,
+ number=2,
+ type=5,
+ cpp_type=1,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cause",
+ full_name="google.firestore.v1beta1.TargetChange.cause",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="resume_token",
+ full_name="google.firestore.v1beta1.TargetChange.resume_token",
+ index=3,
+ number=4,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"",
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.TargetChange.read_time",
+ index=4,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_TARGETCHANGE_TARGETCHANGETYPE,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=4527,
+ serialized_end=4830,
+)
+
+
+_LISTCOLLECTIONIDSREQUEST = _descriptor.Descriptor(
+ name="ListCollectionIdsRequest",
+ full_name="google.firestore.v1beta1.ListCollectionIdsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="parent",
+ full_name="google.firestore.v1beta1.ListCollectionIdsRequest.parent",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=b"\340A\002",
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_size",
+ full_name="google.firestore.v1beta1.ListCollectionIdsRequest.page_size",
+ index=1,
+ number=2,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_token",
+ full_name="google.firestore.v1beta1.ListCollectionIdsRequest.page_token",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=4832,
+ serialized_end=4918,
+)
+
+
+_LISTCOLLECTIONIDSRESPONSE = _descriptor.Descriptor(
+ name="ListCollectionIdsResponse",
+ full_name="google.firestore.v1beta1.ListCollectionIdsResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="collection_ids",
+ full_name="google.firestore.v1beta1.ListCollectionIdsResponse.collection_ids",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="next_page_token",
+ full_name="google.firestore.v1beta1.ListCollectionIdsResponse.next_page_token",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=4920,
+ serialized_end=4996,
+)
+
+_GETDOCUMENTREQUEST.fields_by_name[
+ "mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_GETDOCUMENTREQUEST.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _GETDOCUMENTREQUEST.fields_by_name["transaction"]
+)
+_GETDOCUMENTREQUEST.fields_by_name[
+ "transaction"
+].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"]
+_GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _GETDOCUMENTREQUEST.fields_by_name["read_time"]
+)
+_GETDOCUMENTREQUEST.fields_by_name[
+ "read_time"
+].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"]
+_LISTDOCUMENTSREQUEST.fields_by_name[
+ "mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_LISTDOCUMENTSREQUEST.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _LISTDOCUMENTSREQUEST.fields_by_name["transaction"]
+)
+_LISTDOCUMENTSREQUEST.fields_by_name[
+ "transaction"
+].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
+_LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _LISTDOCUMENTSREQUEST.fields_by_name["read_time"]
+)
+_LISTDOCUMENTSREQUEST.fields_by_name[
+ "read_time"
+].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
+_LISTDOCUMENTSRESPONSE.fields_by_name[
+ "documents"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_CREATEDOCUMENTREQUEST.fields_by_name[
+ "document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_CREATEDOCUMENTREQUEST.fields_by_name[
+ "mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_UPDATEDOCUMENTREQUEST.fields_by_name[
+ "document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_UPDATEDOCUMENTREQUEST.fields_by_name[
+ "update_mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_UPDATEDOCUMENTREQUEST.fields_by_name[
+ "mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_UPDATEDOCUMENTREQUEST.fields_by_name[
+ "current_document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_DELETEDOCUMENTREQUEST.fields_by_name[
+ "current_document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_BATCHGETDOCUMENTSREQUEST.fields_by_name[
+ "mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_BATCHGETDOCUMENTSREQUEST.fields_by_name[
+ "new_transaction"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS
+)
+_BATCHGETDOCUMENTSREQUEST.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _BATCHGETDOCUMENTSREQUEST.fields_by_name["transaction"]
+)
+_BATCHGETDOCUMENTSREQUEST.fields_by_name[
+ "transaction"
+].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
+_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _BATCHGETDOCUMENTSREQUEST.fields_by_name["new_transaction"]
+)
+_BATCHGETDOCUMENTSREQUEST.fields_by_name[
+ "new_transaction"
+].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
+_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _BATCHGETDOCUMENTSREQUEST.fields_by_name["read_time"]
+)
+_BATCHGETDOCUMENTSREQUEST.fields_by_name[
+ "read_time"
+].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
+_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
+ "found"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"].fields.append(
+ _BATCHGETDOCUMENTSRESPONSE.fields_by_name["found"]
+)
+_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
+ "found"
+].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"]
+_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"].fields.append(
+ _BATCHGETDOCUMENTSRESPONSE.fields_by_name["missing"]
+)
+_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
+ "missing"
+].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"]
+_BEGINTRANSACTIONREQUEST.fields_by_name[
+ "options"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS
+)
+_COMMITREQUEST.fields_by_name[
+ "writes"
+].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE
+_COMMITRESPONSE.fields_by_name[
+ "write_results"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT
+)
+_COMMITRESPONSE.fields_by_name[
+ "commit_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_RUNQUERYREQUEST.fields_by_name[
+ "structured_query"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
+)
+_RUNQUERYREQUEST.fields_by_name[
+ "new_transaction"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS
+)
+_RUNQUERYREQUEST.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_RUNQUERYREQUEST.oneofs_by_name["query_type"].fields.append(
+ _RUNQUERYREQUEST.fields_by_name["structured_query"]
+)
+_RUNQUERYREQUEST.fields_by_name[
+ "structured_query"
+].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["query_type"]
+_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _RUNQUERYREQUEST.fields_by_name["transaction"]
+)
+_RUNQUERYREQUEST.fields_by_name[
+ "transaction"
+].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"]
+_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _RUNQUERYREQUEST.fields_by_name["new_transaction"]
+)
+_RUNQUERYREQUEST.fields_by_name[
+ "new_transaction"
+].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"]
+_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append(
+ _RUNQUERYREQUEST.fields_by_name["read_time"]
+)
+_RUNQUERYREQUEST.fields_by_name[
+ "read_time"
+].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"]
+_RUNQUERYRESPONSE.fields_by_name[
+ "document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_RUNQUERYRESPONSE.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_WRITEREQUEST_LABELSENTRY.containing_type = _WRITEREQUEST
+_WRITEREQUEST.fields_by_name[
+ "writes"
+].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE
+_WRITEREQUEST.fields_by_name["labels"].message_type = _WRITEREQUEST_LABELSENTRY
+_WRITERESPONSE.fields_by_name[
+ "write_results"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT
+)
+_WRITERESPONSE.fields_by_name[
+ "commit_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_LISTENREQUEST_LABELSENTRY.containing_type = _LISTENREQUEST
+_LISTENREQUEST.fields_by_name["add_target"].message_type = _TARGET
+_LISTENREQUEST.fields_by_name["labels"].message_type = _LISTENREQUEST_LABELSENTRY
+_LISTENREQUEST.oneofs_by_name["target_change"].fields.append(
+ _LISTENREQUEST.fields_by_name["add_target"]
+)
+_LISTENREQUEST.fields_by_name[
+ "add_target"
+].containing_oneof = _LISTENREQUEST.oneofs_by_name["target_change"]
+_LISTENREQUEST.oneofs_by_name["target_change"].fields.append(
+ _LISTENREQUEST.fields_by_name["remove_target"]
+)
+_LISTENREQUEST.fields_by_name[
+ "remove_target"
+].containing_oneof = _LISTENREQUEST.oneofs_by_name["target_change"]
+_LISTENRESPONSE.fields_by_name["target_change"].message_type = _TARGETCHANGE
+_LISTENRESPONSE.fields_by_name[
+ "document_change"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTCHANGE
+)
+_LISTENRESPONSE.fields_by_name[
+ "document_delete"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTDELETE
+)
+_LISTENRESPONSE.fields_by_name[
+ "document_remove"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTREMOVE
+)
+_LISTENRESPONSE.fields_by_name[
+ "filter"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._EXISTENCEFILTER
+)
+_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
+ _LISTENRESPONSE.fields_by_name["target_change"]
+)
+_LISTENRESPONSE.fields_by_name[
+ "target_change"
+].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
+_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
+ _LISTENRESPONSE.fields_by_name["document_change"]
+)
+_LISTENRESPONSE.fields_by_name[
+ "document_change"
+].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
+_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
+ _LISTENRESPONSE.fields_by_name["document_delete"]
+)
+_LISTENRESPONSE.fields_by_name[
+ "document_delete"
+].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
+_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
+ _LISTENRESPONSE.fields_by_name["document_remove"]
+)
+_LISTENRESPONSE.fields_by_name[
+ "document_remove"
+].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
+_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
+ _LISTENRESPONSE.fields_by_name["filter"]
+)
+_LISTENRESPONSE.fields_by_name[
+ "filter"
+].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
+_TARGET_DOCUMENTSTARGET.containing_type = _TARGET
+_TARGET_QUERYTARGET.fields_by_name[
+ "structured_query"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
+)
+_TARGET_QUERYTARGET.containing_type = _TARGET
+_TARGET_QUERYTARGET.oneofs_by_name["query_type"].fields.append(
+ _TARGET_QUERYTARGET.fields_by_name["structured_query"]
+)
+_TARGET_QUERYTARGET.fields_by_name[
+ "structured_query"
+].containing_oneof = _TARGET_QUERYTARGET.oneofs_by_name["query_type"]
+_TARGET.fields_by_name["query"].message_type = _TARGET_QUERYTARGET
+_TARGET.fields_by_name["documents"].message_type = _TARGET_DOCUMENTSTARGET
+_TARGET.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_TARGET.oneofs_by_name["target_type"].fields.append(_TARGET.fields_by_name["query"])
+_TARGET.fields_by_name["query"].containing_oneof = _TARGET.oneofs_by_name["target_type"]
+_TARGET.oneofs_by_name["target_type"].fields.append(_TARGET.fields_by_name["documents"])
+_TARGET.fields_by_name["documents"].containing_oneof = _TARGET.oneofs_by_name[
+ "target_type"
+]
+_TARGET.oneofs_by_name["resume_type"].fields.append(
+ _TARGET.fields_by_name["resume_token"]
+)
+_TARGET.fields_by_name["resume_token"].containing_oneof = _TARGET.oneofs_by_name[
+ "resume_type"
+]
+_TARGET.oneofs_by_name["resume_type"].fields.append(_TARGET.fields_by_name["read_time"])
+_TARGET.fields_by_name["read_time"].containing_oneof = _TARGET.oneofs_by_name[
+ "resume_type"
+]
+_TARGETCHANGE.fields_by_name[
+ "target_change_type"
+].enum_type = _TARGETCHANGE_TARGETCHANGETYPE
+_TARGETCHANGE.fields_by_name[
+ "cause"
+].message_type = google_dot_rpc_dot_status__pb2._STATUS
+_TARGETCHANGE.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_TARGETCHANGE_TARGETCHANGETYPE.containing_type = _TARGETCHANGE
+DESCRIPTOR.message_types_by_name["GetDocumentRequest"] = _GETDOCUMENTREQUEST
+DESCRIPTOR.message_types_by_name["ListDocumentsRequest"] = _LISTDOCUMENTSREQUEST
+DESCRIPTOR.message_types_by_name["ListDocumentsResponse"] = _LISTDOCUMENTSRESPONSE
+DESCRIPTOR.message_types_by_name["CreateDocumentRequest"] = _CREATEDOCUMENTREQUEST
+DESCRIPTOR.message_types_by_name["UpdateDocumentRequest"] = _UPDATEDOCUMENTREQUEST
+DESCRIPTOR.message_types_by_name["DeleteDocumentRequest"] = _DELETEDOCUMENTREQUEST
+DESCRIPTOR.message_types_by_name["BatchGetDocumentsRequest"] = _BATCHGETDOCUMENTSREQUEST
+DESCRIPTOR.message_types_by_name[
+ "BatchGetDocumentsResponse"
+] = _BATCHGETDOCUMENTSRESPONSE
+DESCRIPTOR.message_types_by_name["BeginTransactionRequest"] = _BEGINTRANSACTIONREQUEST
+DESCRIPTOR.message_types_by_name["BeginTransactionResponse"] = _BEGINTRANSACTIONRESPONSE
+DESCRIPTOR.message_types_by_name["CommitRequest"] = _COMMITREQUEST
+DESCRIPTOR.message_types_by_name["CommitResponse"] = _COMMITRESPONSE
+DESCRIPTOR.message_types_by_name["RollbackRequest"] = _ROLLBACKREQUEST
+DESCRIPTOR.message_types_by_name["RunQueryRequest"] = _RUNQUERYREQUEST
+DESCRIPTOR.message_types_by_name["RunQueryResponse"] = _RUNQUERYRESPONSE
+DESCRIPTOR.message_types_by_name["WriteRequest"] = _WRITEREQUEST
+DESCRIPTOR.message_types_by_name["WriteResponse"] = _WRITERESPONSE
+DESCRIPTOR.message_types_by_name["ListenRequest"] = _LISTENREQUEST
+DESCRIPTOR.message_types_by_name["ListenResponse"] = _LISTENRESPONSE
+DESCRIPTOR.message_types_by_name["Target"] = _TARGET
+DESCRIPTOR.message_types_by_name["TargetChange"] = _TARGETCHANGE
+DESCRIPTOR.message_types_by_name["ListCollectionIdsRequest"] = _LISTCOLLECTIONIDSREQUEST
+DESCRIPTOR.message_types_by_name[
+ "ListCollectionIdsResponse"
+] = _LISTCOLLECTIONIDSRESPONSE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+GetDocumentRequest = _reflection.GeneratedProtocolMessageType(
+ "GetDocumentRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETDOCUMENTREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.GetDocument][google.firestore.v1beta1.Fires
+ tore.GetDocument].
+
+ Attributes:
+ name:
+ Required. The resource name of the Document to get. In the
+ format: ``projects/{project_id}/databases/{database_id}/docume
+ nts/{document_path}``.
+ mask:
+ The fields to return. If not set, returns all fields. If the
+ document has a field that is not present in this mask, that
+ field will not be returned in the response.
+ consistency_selector:
+ The consistency mode for this transaction. If not set,
+ defaults to strong consistency.
+ transaction:
+ Reads the document in a transaction.
+ read_time:
+ Reads the version of the document at the given time. This may
+ not be older than 60 seconds.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.GetDocumentRequest)
+ },
+)
+_sym_db.RegisterMessage(GetDocumentRequest)
+
+ListDocumentsRequest = _reflection.GeneratedProtocolMessageType(
+ "ListDocumentsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTDOCUMENTSREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.ListDocuments][google.firestore.v1beta1.Fir
+ estore.ListDocuments].
+
+ Attributes:
+ parent:
+ Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{doc
+ ument_path}``. For example: ``projects/my-
+ project/databases/my-database/documents`` or ``projects/my-
+ project/databases/my-database/documents/chatrooms/my-
+ chatroom``
+ collection_id:
+ Required. The collection ID, relative to ``parent``, to list.
+ For example: ``chatrooms`` or ``messages``.
+ page_size:
+ The maximum number of documents to return.
+ page_token:
+ The ``next_page_token`` value returned from a previous List
+ request, if any.
+ order_by:
+ The order to sort results by. For example: ``priority desc,
+ name``.
+ mask:
+ The fields to return. If not set, returns all fields. If a
+ document has a field that is not present in this mask, that
+ field will not be returned in the response.
+ consistency_selector:
+ The consistency mode for this transaction. If not set,
+ defaults to strong consistency.
+ transaction:
+ Reads documents in a transaction.
+ read_time:
+ Reads documents as they were at the given time. This may not
+ be older than 60 seconds.
+ show_missing:
+ If the list should show missing documents. A missing document
+ is a document that does not exist but has sub-documents. These
+ documents will be returned with a key but will not have
+ fields, [Document.create_time][google.firestore.v1beta1.Docume
+ nt.create_time], or [Document.update_time][google.firestore.v1
+ beta1.Document.update_time] set. Requests with
+ ``show_missing`` may not specify ``where`` or ``order_by``.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsRequest)
+ },
+)
+_sym_db.RegisterMessage(ListDocumentsRequest)
+
+ListDocumentsResponse = _reflection.GeneratedProtocolMessageType(
+ "ListDocumentsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTDOCUMENTSRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response for [Firestore.ListDocuments][google.firestore.v1beta1.Fi
+ restore.ListDocuments].
+
+ Attributes:
+ documents:
+ The Documents found.
+ next_page_token:
+ The next page token.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsResponse)
+ },
+)
+_sym_db.RegisterMessage(ListDocumentsResponse)
+
+CreateDocumentRequest = _reflection.GeneratedProtocolMessageType(
+ "CreateDocumentRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CREATEDOCUMENTREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.CreateDocument][google.firestore.v1beta1.Fi
+ restore.CreateDocument].
+
+ Attributes:
+ parent:
+ Required. The parent resource. For example:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/chat
+ rooms/{chatroom_id}``
+ collection_id:
+ Required. The collection ID, relative to ``parent``, to list.
+ For example: ``chatrooms``.
+ document_id:
+ The client-assigned document ID to use for this document.
+ Optional. If not specified, an ID will be assigned by the
+ service.
+ document:
+ Required. The document to create. ``name`` must not be set.
+ mask:
+ The fields to return. If not set, returns all fields. If the
+ document has a field that is not present in this mask, that
+ field will not be returned in the response.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CreateDocumentRequest)
+ },
+)
+_sym_db.RegisterMessage(CreateDocumentRequest)
+
+UpdateDocumentRequest = _reflection.GeneratedProtocolMessageType(
+ "UpdateDocumentRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _UPDATEDOCUMENTREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Fi
+ restore.UpdateDocument].
+
+ Attributes:
+ document:
+ Required. The updated document. Creates the document if it
+ does not already exist.
+ update_mask:
+ The fields to update. None of the field paths in the mask may
+ contain a reserved name. If the document exists on the server
+ and has fields not referenced in the mask, they are left
+ unchanged. Fields referenced in the mask, but not present in
+ the input document, are deleted from the document on the
+ server.
+ mask:
+ The fields to return. If not set, returns all fields. If the
+ document has a field that is not present in this mask, that
+ field will not be returned in the response.
+ current_document:
+ An optional precondition on the document. The request will
+ fail if this is set and not met by the target document.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.UpdateDocumentRequest)
+ },
+)
+_sym_db.RegisterMessage(UpdateDocumentRequest)
+
+DeleteDocumentRequest = _reflection.GeneratedProtocolMessageType(
+ "DeleteDocumentRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DELETEDOCUMENTREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Fi
+ restore.DeleteDocument].
+
+ Attributes:
+ name:
+ Required. The resource name of the Document to delete. In the
+ format: ``projects/{project_id}/databases/{database_id}/docume
+ nts/{document_path}``.
+ current_document:
+ An optional precondition on the document. The request will
+ fail if this is set and not met by the target document.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DeleteDocumentRequest)
+ },
+)
+_sym_db.RegisterMessage(DeleteDocumentRequest)
+
+BatchGetDocumentsRequest = _reflection.GeneratedProtocolMessageType(
+ "BatchGetDocumentsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _BATCHGETDOCUMENTSREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1
+ .Firestore.BatchGetDocuments].
+
+ Attributes:
+ database:
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ documents:
+ The names of the documents to retrieve. In the format: ``proje
+ cts/{project_id}/databases/{database_id}/documents/{document_p
+ ath}``. The request will fail if any of the document is not a
+ child resource of the given ``database``. Duplicate names will
+ be elided.
+ mask:
+ The fields to return. If not set, returns all fields. If a
+ document has a field that is not present in this mask, that
+ field will not be returned in the response.
+ consistency_selector:
+ The consistency mode for this transaction. If not set,
+ defaults to strong consistency.
+ transaction:
+ Reads documents in a transaction.
+ new_transaction:
+ Starts a new transaction and reads the documents. Defaults to
+ a read-only transaction. The new transaction ID will be
+ returned as the first response in the stream.
+ read_time:
+ Reads documents as they were at the given time. This may not
+ be older than 60 seconds.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsRequest)
+ },
+)
+_sym_db.RegisterMessage(BatchGetDocumentsRequest)
+
+BatchGetDocumentsResponse = _reflection.GeneratedProtocolMessageType(
+ "BatchGetDocumentsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _BATCHGETDOCUMENTSRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The streamed response for [Firestore.BatchGetDocuments][google.firesto
+ re.v1beta1.Firestore.BatchGetDocuments].
+
+ Attributes:
+ result:
+ A single result. This can be empty if the server is just
+ returning a transaction.
+ found:
+ A document that was requested.
+ missing:
+ A document name that was requested but does not exist. In the
+ format: ``projects/{project_id}/databases/{database_id}/docume
+ nts/{document_path}``.
+ transaction:
+ The transaction that was started as part of this request. Will
+ only be set in the first response, and only if [BatchGetDocume
+ ntsRequest.new_transaction][google.firestore.v1beta1.BatchGetD
+ ocumentsRequest.new_transaction] was set in the request.
+ read_time:
+ The time at which the document was read. This may be
+ monotically increasing, in this case the previous documents in
+ the result stream are guaranteed not to have changed between
+ their read_time and this one.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsResponse)
+ },
+)
+_sym_db.RegisterMessage(BatchGetDocumentsResponse)
+
+BeginTransactionRequest = _reflection.GeneratedProtocolMessageType(
+ "BeginTransactionRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _BEGINTRANSACTIONREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.BeginTransaction][google.firestore.v1beta1.
+ Firestore.BeginTransaction].
+
+ Attributes:
+ database:
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ options:
+ The options for the transaction. Defaults to a read-write
+ transaction.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionRequest)
+ },
+)
+_sym_db.RegisterMessage(BeginTransactionRequest)
+
+BeginTransactionResponse = _reflection.GeneratedProtocolMessageType(
+ "BeginTransactionResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _BEGINTRANSACTIONRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response for [Firestore.BeginTransaction][google.firestore.v1beta1
+ .Firestore.BeginTransaction].
+
+ Attributes:
+ transaction:
+ The transaction that was started.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionResponse)
+ },
+)
+_sym_db.RegisterMessage(BeginTransactionResponse)
+
+CommitRequest = _reflection.GeneratedProtocolMessageType(
+ "CommitRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _COMMITREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for
+ [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
+
+ Attributes:
+ database:
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ writes:
+ The writes to apply. Always executed atomically and in order.
+ transaction:
+ If set, applies all writes in this transaction, and commits
+ it.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitRequest)
+ },
+)
+_sym_db.RegisterMessage(CommitRequest)
+
+CommitResponse = _reflection.GeneratedProtocolMessageType(
+ "CommitResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _COMMITRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response for
+ [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
+
+ Attributes:
+ write_results:
+ The result of applying the writes. This i-th write result
+ corresponds to the i-th write in the request.
+ commit_time:
+ The time at which the commit occurred.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitResponse)
+ },
+)
+_sym_db.RegisterMessage(CommitResponse)
+
+RollbackRequest = _reflection.GeneratedProtocolMessageType(
+ "RollbackRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ROLLBACKREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for
+ [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback].
+
+ Attributes:
+ database:
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ transaction:
+ Required. The transaction to roll back.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RollbackRequest)
+ },
+)
+_sym_db.RegisterMessage(RollbackRequest)
+
+RunQueryRequest = _reflection.GeneratedProtocolMessageType(
+ "RunQueryRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _RUNQUERYREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for
+ [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
+
+ Attributes:
+ parent:
+ Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{doc
+ ument_path}``. For example: ``projects/my-
+ project/databases/my-database/documents`` or ``projects/my-
+ project/databases/my-database/documents/chatrooms/my-
+ chatroom``
+ query_type:
+ The query to run.
+ structured_query:
+ A structured query.
+ consistency_selector:
+ The consistency mode for this transaction. If not set,
+ defaults to strong consistency.
+ transaction:
+ Reads documents in a transaction.
+ new_transaction:
+ Starts a new transaction and reads the documents. Defaults to
+ a read-only transaction. The new transaction ID will be
+ returned as the first response in the stream.
+ read_time:
+ Reads documents as they were at the given time. This may not
+ be older than 60 seconds.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryRequest)
+ },
+)
+_sym_db.RegisterMessage(RunQueryRequest)
+
+RunQueryResponse = _reflection.GeneratedProtocolMessageType(
+ "RunQueryResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _RUNQUERYRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response for
+ [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
+
+ Attributes:
+ transaction:
+ The transaction that was started as part of this request. Can
+ only be set in the first response, and only if [RunQueryReques
+ t.new_transaction][google.firestore.v1beta1.RunQueryRequest.ne
+ w_transaction] was set in the request. If set, no other fields
+ will be set in this response.
+ document:
+ A query result. Not set when reporting partial progress.
+ read_time:
+ The time at which the document was read. This may be
+ monotonically increasing; in this case, the previous documents
+ in the result stream are guaranteed not to have changed
+ between their ``read_time`` and this one. If the query
+ returns no results, a response with ``read_time`` and no
+ ``document`` will be sent, and this represents the time at
+ which the query was run.
+ skipped_results:
+ The number of results that have been skipped due to an offset
+ between the last response and the current response.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryResponse)
+ },
+)
+_sym_db.RegisterMessage(RunQueryResponse)
+
+WriteRequest = _reflection.GeneratedProtocolMessageType(
+ "WriteRequest",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WRITEREQUEST_LABELSENTRY,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2"
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest.LabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _WRITEREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for
+ [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. The
+ first request creates a stream, or resumes an existing one from a
+ token. When creating a new stream, the server replies with a response
+ containing only an ID and a token, to use in the next request. When
+ resuming a stream, the server first streams any responses later than
+ the given token, then a response containing only an up-to-date token,
+ to use in the next request.
+
+ Attributes:
+ database:
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``. This is
+ only required in the first message.
+ stream_id:
+ The ID of the write stream to resume. This may only be set in
+ the first message. When left empty, a new write stream will be
+ created.
+ writes:
+ The writes to apply. Always executed atomically and in order.
+ This must be empty on the first request. This may be empty on
+ the last request. This must not be empty on all other
+ requests.
+ stream_token:
+ A stream token that was previously sent by the server. The
+ client should set this field to the token from the most recent
+ [WriteResponse][google.firestore.v1beta1.WriteResponse] it has
+ received. This acknowledges that the client has received
+ responses up to this token. After sending this token, earlier
+ tokens may not be used anymore. The server may close the
+ stream if there are too many unacknowledged responses. Leave
+ this field unset when creating a new stream. To resume a
+ stream at a specific point, set this field and the
+ ``stream_id`` field. Leave this field unset when creating a
+ new stream.
+ labels:
+ Labels associated with this write request.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest)
+ },
+)
+_sym_db.RegisterMessage(WriteRequest)
+_sym_db.RegisterMessage(WriteRequest.LabelsEntry)
+
+WriteResponse = _reflection.GeneratedProtocolMessageType(
+ "WriteResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WRITERESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response for
+ [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
+
+ Attributes:
+ stream_id:
+ The ID of the stream. Only set on the first message, when a
+ new stream was created.
+ stream_token:
+ A token that represents the position of this response in the
+ stream. This can be used by a client to resume the stream at
+ this point. This field is always set.
+ write_results:
+ The result of applying the writes. This i-th write result
+ corresponds to the i-th write in the request.
+ commit_time:
+ The time at which the commit occurred.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResponse)
+ },
+)
+_sym_db.RegisterMessage(WriteResponse)
+
+ListenRequest = _reflection.GeneratedProtocolMessageType(
+ "ListenRequest",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTENREQUEST_LABELSENTRY,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2"
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest.LabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _LISTENREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """A request for
+ [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]
+
+ Attributes:
+ database:
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ target_change:
+ The supported target changes.
+ add_target:
+ A target to add to this stream.
+ remove_target:
+ The ID of a target to remove from this stream.
+ labels:
+ Labels associated with this target change.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest)
+ },
+)
+_sym_db.RegisterMessage(ListenRequest)
+_sym_db.RegisterMessage(ListenRequest.LabelsEntry)
+
+ListenResponse = _reflection.GeneratedProtocolMessageType(
+ "ListenResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTENRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response for
+ [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen].
+
+ Attributes:
+ response_type:
+ The supported responses.
+ target_change:
+ Targets have changed.
+ document_change:
+ A [Document][google.firestore.v1beta1.Document] has changed.
+ document_delete:
+ A [Document][google.firestore.v1beta1.Document] has been
+ deleted.
+ document_remove:
+ A [Document][google.firestore.v1beta1.Document] has been
+ removed from a target (because it is no longer relevant to
+ that target).
+ filter:
+ A filter to apply to the set of documents previously returned
+ for the given target. Returned when documents may have been
+ removed from the given target, but the exact documents are
+ unknown.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenResponse)
+ },
+)
+_sym_db.RegisterMessage(ListenResponse)
+
+Target = _reflection.GeneratedProtocolMessageType(
+ "Target",
+ (_message.Message,),
+ {
+ "DocumentsTarget": _reflection.GeneratedProtocolMessageType(
+ "DocumentsTarget",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _TARGET_DOCUMENTSTARGET,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """A target specified by a set of documents names.
+
+ Attributes:
+ documents:
+ The names of the documents to retrieve. In the format: ``proje
+ cts/{project_id}/databases/{database_id}/documents/{document_p
+ ath}``. The request will fail if any of the document is not a
+ child resource of the given ``database``. Duplicate names will
+ be elided.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.DocumentsTarget)
+ },
+ ),
+ "QueryTarget": _reflection.GeneratedProtocolMessageType(
+ "QueryTarget",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _TARGET_QUERYTARGET,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """A target specified by a query.
+
+ Attributes:
+ parent:
+ The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents`` or
+ ``projects/{project_id}/databases/{database_id}/documents/{doc
+ ument_path}``. For example: ``projects/my-
+ project/databases/my-database/documents`` or ``projects/my-
+ project/databases/my-database/documents/chatrooms/my-
+ chatroom``
+ query_type:
+ The query to run.
+ structured_query:
+ A structured query.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.QueryTarget)
+ },
+ ),
+ "DESCRIPTOR": _TARGET,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """A specification of a set of documents to listen to.
+
+ Attributes:
+ target_type:
+ The type of target to listen to.
+ query:
+ A target specified by a query.
+ documents:
+ A target specified by a set of document names.
+ resume_type:
+ When to start listening. If not specified, all matching
+ Documents are returned before any subsequent changes.
+ resume_token:
+ A resume token from a prior
+ [TargetChange][google.firestore.v1beta1.TargetChange] for an
+ identical target. Using a resume token with a different
+ target is unsupported and may fail.
+ read_time:
+ Start listening after a specific ``read_time``. The client
+ must know the state of matching documents at this time.
+ target_id:
+ The target ID that identifies the target on the stream. Must
+ be a positive number and non-zero.
+ once:
+ If the target should be removed once it is current and
+ consistent.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target)
+ },
+)
+_sym_db.RegisterMessage(Target)
+_sym_db.RegisterMessage(Target.DocumentsTarget)
+_sym_db.RegisterMessage(Target.QueryTarget)
+
+TargetChange = _reflection.GeneratedProtocolMessageType(
+ "TargetChange",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _TARGETCHANGE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """Targets being watched have changed.
+
+ Attributes:
+ target_change_type:
+ The type of change that occurred.
+ target_ids:
+ The target IDs of targets that have changed. If empty, the
+ change applies to all targets. The order of the target IDs is
+ not defined.
+ cause:
+ The error that resulted in this change, if applicable.
+ resume_token:
+ A token that can be used to resume the stream for the given
+ ``target_ids``, or all targets if ``target_ids`` is empty.
+ Not set on every target change.
+ read_time:
+ The consistent ``read_time`` for the given ``target_ids``
+ (omitted when the target_ids are not at a consistent
+ snapshot). The stream is guaranteed to send a ``read_time``
+ with ``target_ids`` empty whenever the entire stream reaches a
+ new consistent snapshot. ADD, CURRENT, and RESET messages are
+ guaranteed to (eventually) result in a new consistent snapshot
+ (while NO_CHANGE and REMOVE messages are not). For a given
+ stream, ``read_time`` is guaranteed to be monotonically
+ increasing.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TargetChange)
+ },
+)
+_sym_db.RegisterMessage(TargetChange)
+
+ListCollectionIdsRequest = _reflection.GeneratedProtocolMessageType(
+ "ListCollectionIdsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTCOLLECTIONIDSREQUEST,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The request for [Firestore.ListCollectionIds][google.firestore.v1beta1
+ .Firestore.ListCollectionIds].
+
+ Attributes:
+ parent:
+ Required. The parent document. In the format: ``projects/{proj
+ ect_id}/databases/{database_id}/documents/{document_path}``.
+ For example: ``projects/my-project/databases/my-
+ database/documents/chatrooms/my-chatroom``
+ page_size:
+ The maximum number of results to return.
+ page_token:
+ A page token. Must be a value from [ListCollectionIdsResponse]
+ [google.firestore.v1beta1.ListCollectionIdsResponse].
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsRequest)
+ },
+)
+_sym_db.RegisterMessage(ListCollectionIdsRequest)
+
+ListCollectionIdsResponse = _reflection.GeneratedProtocolMessageType(
+ "ListCollectionIdsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTCOLLECTIONIDSRESPONSE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.firestore_pb2",
+ "__doc__": """The response from [Firestore.ListCollectionIds][google.firestore.v1bet
+ a1.Firestore.ListCollectionIds].
+
+ Attributes:
+ collection_ids:
+ The collection ids.
+ next_page_token:
+ A page token that may be used to continue the list.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsResponse)
+ },
+)
+_sym_db.RegisterMessage(ListCollectionIdsResponse)
+
+
+DESCRIPTOR._options = None
+_GETDOCUMENTREQUEST.fields_by_name["name"]._options = None
+_LISTDOCUMENTSREQUEST.fields_by_name["parent"]._options = None
+_LISTDOCUMENTSREQUEST.fields_by_name["collection_id"]._options = None
+_CREATEDOCUMENTREQUEST.fields_by_name["parent"]._options = None
+_CREATEDOCUMENTREQUEST.fields_by_name["collection_id"]._options = None
+_CREATEDOCUMENTREQUEST.fields_by_name["document"]._options = None
+_UPDATEDOCUMENTREQUEST.fields_by_name["document"]._options = None
+_DELETEDOCUMENTREQUEST.fields_by_name["name"]._options = None
+_BATCHGETDOCUMENTSREQUEST.fields_by_name["database"]._options = None
+_BEGINTRANSACTIONREQUEST.fields_by_name["database"]._options = None
+_COMMITREQUEST.fields_by_name["database"]._options = None
+_ROLLBACKREQUEST.fields_by_name["database"]._options = None
+_ROLLBACKREQUEST.fields_by_name["transaction"]._options = None
+_RUNQUERYREQUEST.fields_by_name["parent"]._options = None
+_WRITEREQUEST_LABELSENTRY._options = None
+_WRITEREQUEST.fields_by_name["database"]._options = None
+_LISTENREQUEST_LABELSENTRY._options = None
+_LISTENREQUEST.fields_by_name["database"]._options = None
+_LISTCOLLECTIONIDSREQUEST.fields_by_name["parent"]._options = None
+
+_FIRESTORE = _descriptor.ServiceDescriptor(
+ name="Firestore",
+ full_name="google.firestore.v1beta1.Firestore",
+ file=DESCRIPTOR,
+ index=0,
+ serialized_options=b"\312A\030firestore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore",
+ create_key=_descriptor._internal_create_key,
+ serialized_start=4999,
+ serialized_end=7714,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="GetDocument",
+ full_name="google.firestore.v1beta1.Firestore.GetDocument",
+ index=0,
+ containing_service=None,
+ input_type=_GETDOCUMENTREQUEST,
+ output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT,
+ serialized_options=b"\202\323\344\223\0027\0225/v1beta1/{name=projects/*/databases/*/documents/*/**}",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="ListDocuments",
+ full_name="google.firestore.v1beta1.Firestore.ListDocuments",
+ index=1,
+ containing_service=None,
+ input_type=_LISTDOCUMENTSREQUEST,
+ output_type=_LISTDOCUMENTSRESPONSE,
+ serialized_options=b"\202\323\344\223\002I\022G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="CreateDocument",
+ full_name="google.firestore.v1beta1.Firestore.CreateDocument",
+ index=2,
+ containing_service=None,
+ input_type=_CREATEDOCUMENTREQUEST,
+ output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT,
+ serialized_options=b'\202\323\344\223\002Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\010document',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="UpdateDocument",
+ full_name="google.firestore.v1beta1.Firestore.UpdateDocument",
+ index=3,
+ containing_service=None,
+ input_type=_UPDATEDOCUMENTREQUEST,
+ output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT,
+ serialized_options=b"\202\323\344\223\002J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\010document\332A\024document,update_mask",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="DeleteDocument",
+ full_name="google.firestore.v1beta1.Firestore.DeleteDocument",
+ index=4,
+ containing_service=None,
+ input_type=_DELETEDOCUMENTREQUEST,
+ output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+ serialized_options=b"\202\323\344\223\0027*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\332A\004name",
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="BatchGetDocuments",
+ full_name="google.firestore.v1beta1.Firestore.BatchGetDocuments",
+ index=5,
+ containing_service=None,
+ input_type=_BATCHGETDOCUMENTSREQUEST,
+ output_type=_BATCHGETDOCUMENTSRESPONSE,
+ serialized_options=b'\202\323\344\223\002B"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\001*',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="BeginTransaction",
+ full_name="google.firestore.v1beta1.Firestore.BeginTransaction",
+ index=6,
+ containing_service=None,
+ input_type=_BEGINTRANSACTIONREQUEST,
+ output_type=_BEGINTRANSACTIONRESPONSE,
+ serialized_options=b'\202\323\344\223\002J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\001*\332A\010database',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="Commit",
+ full_name="google.firestore.v1beta1.Firestore.Commit",
+ index=7,
+ containing_service=None,
+ input_type=_COMMITREQUEST,
+ output_type=_COMMITRESPONSE,
+ serialized_options=b'\202\323\344\223\002@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\001*\332A\017database,writes',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="Rollback",
+ full_name="google.firestore.v1beta1.Firestore.Rollback",
+ index=8,
+ containing_service=None,
+ input_type=_ROLLBACKREQUEST,
+ output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+ serialized_options=b'\202\323\344\223\002B"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\001*\332A\024database,transaction',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="RunQuery",
+ full_name="google.firestore.v1beta1.Firestore.RunQuery",
+ index=9,
+ containing_service=None,
+ input_type=_RUNQUERYREQUEST,
+ output_type=_RUNQUERYRESPONSE,
+ serialized_options=b'\202\323\344\223\002\207\001";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\001*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\001*',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="Write",
+ full_name="google.firestore.v1beta1.Firestore.Write",
+ index=10,
+ containing_service=None,
+ input_type=_WRITEREQUEST,
+ output_type=_WRITERESPONSE,
+ serialized_options=b'\202\323\344\223\002?":/v1beta1/{database=projects/*/databases/*}/documents:write:\001*',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="Listen",
+ full_name="google.firestore.v1beta1.Firestore.Listen",
+ index=11,
+ containing_service=None,
+ input_type=_LISTENREQUEST,
+ output_type=_LISTENRESPONSE,
+ serialized_options=b'\202\323\344\223\002@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\001*',
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name="ListCollectionIds",
+ full_name="google.firestore.v1beta1.Firestore.ListCollectionIds",
+ index=12,
+ containing_service=None,
+ input_type=_LISTCOLLECTIONIDSREQUEST,
+ output_type=_LISTCOLLECTIONIDSRESPONSE,
+ serialized_options=b'\202\323\344\223\002\231\001"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\001*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\001*\332A\006parent',
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+)
+_sym_db.RegisterServiceDescriptor(_FIRESTORE)
+
+DESCRIPTOR.services_by_name["Firestore"] = _FIRESTORE
+
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py
new file mode 100644
index 000000000..88d0a3cf3
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py
@@ -0,0 +1,669 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from google.cloud.firestore_v1beta1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+
+class FirestoreStub(object):
+ """Specification of the Firestore API.
+
+ The Cloud Firestore service.
+
+ This service exposes several types of comparable timestamps:
+
+ * `create_time` - The time at which a document was created. Changes only
+ when a document is deleted, then re-created. Increases in a strict
+ monotonic fashion.
+ * `update_time` - The time at which a document was last updated. Changes
+ every time a document is modified. Does not change when a write results
+ in no modifications. Increases in a strict monotonic fashion.
+ * `read_time` - The time at which a particular state was observed. Used
+ to denote a consistent snapshot of the database or the time at which a
+ Document was observed to not exist.
+ * `commit_time` - The time at which the writes in a transaction were
+ committed. Any read with an equal or greater `read_time` is guaranteed
+ to see the effects of the transaction.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetDocument = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/GetDocument",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
+ )
+ self.ListDocuments = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/ListDocuments",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.FromString,
+ )
+ self.CreateDocument = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/CreateDocument",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
+ )
+ self.UpdateDocument = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/UpdateDocument",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
+ )
+ self.DeleteDocument = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/DeleteDocument",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+ self.BatchGetDocuments = channel.unary_stream(
+ "/google.firestore.v1beta1.Firestore/BatchGetDocuments",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.FromString,
+ )
+ self.BeginTransaction = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/BeginTransaction",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.FromString,
+ )
+ self.Commit = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/Commit",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.FromString,
+ )
+ self.Rollback = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/Rollback",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+ self.RunQuery = channel.unary_stream(
+ "/google.firestore.v1beta1.Firestore/RunQuery",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.FromString,
+ )
+ self.Write = channel.stream_stream(
+ "/google.firestore.v1beta1.Firestore/Write",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.FromString,
+ )
+ self.Listen = channel.stream_stream(
+ "/google.firestore.v1beta1.Firestore/Listen",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.FromString,
+ )
+ self.ListCollectionIds = channel.unary_unary(
+ "/google.firestore.v1beta1.Firestore/ListCollectionIds",
+ request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.FromString,
+ )
+
+
+class FirestoreServicer(object):
+ """Specification of the Firestore API.
+
+ The Cloud Firestore service.
+
+ This service exposes several types of comparable timestamps:
+
+ * `create_time` - The time at which a document was created. Changes only
+ when a document is deleted, then re-created. Increases in a strict
+ monotonic fashion.
+ * `update_time` - The time at which a document was last updated. Changes
+ every time a document is modified. Does not change when a write results
+ in no modifications. Increases in a strict monotonic fashion.
+ * `read_time` - The time at which a particular state was observed. Used
+ to denote a consistent snapshot of the database or the time at which a
+ Document was observed to not exist.
+ * `commit_time` - The time at which the writes in a transaction were
+ committed. Any read with an equal or greater `read_time` is guaranteed
+ to see the effects of the transaction.
+ """
+
+ def GetDocument(self, request, context):
+ """Gets a single document.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListDocuments(self, request, context):
+ """Lists documents.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def CreateDocument(self, request, context):
+ """Creates a new document.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def UpdateDocument(self, request, context):
+ """Updates or inserts a document.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteDocument(self, request, context):
+ """Deletes a document.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def BatchGetDocuments(self, request, context):
+ """Gets multiple documents.
+
+ Documents returned by this method are not guaranteed to be returned in the
+ same order that they were requested.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def BeginTransaction(self, request, context):
+ """Starts a new transaction.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Commit(self, request, context):
+ """Commits a transaction, while optionally updating documents.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Rollback(self, request, context):
+ """Rolls back a transaction.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def RunQuery(self, request, context):
+ """Runs a query.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Write(self, request_iterator, context):
+ """Streams batches of document updates and deletes, in order.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Listen(self, request_iterator, context):
+ """Listens to changes.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListCollectionIds(self, request, context):
+ """Lists all the collection IDs underneath a document.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_FirestoreServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "GetDocument": grpc.unary_unary_rpc_method_handler(
+ servicer.GetDocument,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString,
+ ),
+ "ListDocuments": grpc.unary_unary_rpc_method_handler(
+ servicer.ListDocuments,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.SerializeToString,
+ ),
+ "CreateDocument": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateDocument,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString,
+ ),
+ "UpdateDocument": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateDocument,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString,
+ ),
+ "DeleteDocument": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteDocument,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "BatchGetDocuments": grpc.unary_stream_rpc_method_handler(
+ servicer.BatchGetDocuments,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.SerializeToString,
+ ),
+ "BeginTransaction": grpc.unary_unary_rpc_method_handler(
+ servicer.BeginTransaction,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.SerializeToString,
+ ),
+ "Commit": grpc.unary_unary_rpc_method_handler(
+ servicer.Commit,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.SerializeToString,
+ ),
+ "Rollback": grpc.unary_unary_rpc_method_handler(
+ servicer.Rollback,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "RunQuery": grpc.unary_stream_rpc_method_handler(
+ servicer.RunQuery,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.SerializeToString,
+ ),
+ "Write": grpc.stream_stream_rpc_method_handler(
+ servicer.Write,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.SerializeToString,
+ ),
+ "Listen": grpc.stream_stream_rpc_method_handler(
+ servicer.Listen,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.SerializeToString,
+ ),
+ "ListCollectionIds": grpc.unary_unary_rpc_method_handler(
+ servicer.ListCollectionIds,
+ request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.FromString,
+ response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.firestore.v1beta1.Firestore", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class Firestore(object):
+ """Specification of the Firestore API.
+
+ The Cloud Firestore service.
+
+ This service exposes several types of comparable timestamps:
+
+ * `create_time` - The time at which a document was created. Changes only
+ when a document is deleted, then re-created. Increases in a strict
+ monotonic fashion.
+ * `update_time` - The time at which a document was last updated. Changes
+ every time a document is modified. Does not change when a write results
+ in no modifications. Increases in a strict monotonic fashion.
+ * `read_time` - The time at which a particular state was observed. Used
+ to denote a consistent snapshot of the database or the time at which a
+ Document was observed to not exist.
+ * `commit_time` - The time at which the writes in a transaction were
+ committed. Any read with an equal or greater `read_time` is guaranteed
+ to see the effects of the transaction.
+ """
+
+ @staticmethod
+ def GetDocument(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/GetDocument",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListDocuments(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/ListDocuments",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def CreateDocument(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/CreateDocument",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateDocument(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/UpdateDocument",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteDocument(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/DeleteDocument",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def BatchGetDocuments(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/BatchGetDocuments",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def BeginTransaction(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/BeginTransaction",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def Commit(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/Commit",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def Rollback(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/Rollback",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def RunQuery(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/RunQuery",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def Write(
+ request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.stream_stream(
+ request_iterator,
+ target,
+ "/google.firestore.v1beta1.Firestore/Write",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def Listen(
+ request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.stream_stream(
+ request_iterator,
+ target,
+ "/google.firestore.v1beta1.Firestore/Listen",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListCollectionIds(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.firestore.v1beta1.Firestore/ListCollectionIds",
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.SerializeToString,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/index.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/index.proto
new file mode 100644
index 000000000..c5784e0ea
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/index.proto
@@ -0,0 +1,102 @@
+// Copyright 2018 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1beta1;
+
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "IndexProto";
+option java_package = "com.google.firestore.admin.v1beta1";
+option objc_class_prefix = "GCFS";
+
+
+// A field of an index.
+message IndexField {
+ // The mode determines how a field is indexed.
+ enum Mode {
+ // The mode is unspecified.
+ MODE_UNSPECIFIED = 0;
+
+ // The field's values are indexed so as to support sequencing in
+ // ascending order and also query by <, >, <=, >=, and =.
+ ASCENDING = 2;
+
+ // The field's values are indexed so as to support sequencing in
+ // descending order and also query by <, >, <=, >=, and =.
+ DESCENDING = 3;
+
+ // The field's array values are indexed so as to support membership using
+ // ARRAY_CONTAINS queries.
+ ARRAY_CONTAINS = 4;
+ }
+
+ // The path of the field. Must match the field path specification described
+ // by [google.firestore.v1beta1.Document.fields][fields].
+ // Special field path `__name__` may be used by itself or at the end of a
+ // path. `__type__` may be used only at the end of path.
+ string field_path = 1;
+
+ // The field's mode.
+ Mode mode = 2;
+}
+
+// An index definition.
+message Index {
+ // The state of an index. During index creation, an index will be in the
+ // `CREATING` state. If the index is created successfully, it will transition
+ // to the `READY` state. If the index is not able to be created, it will
+ // transition to the `ERROR` state.
+ enum State {
+ // The state is unspecified.
+ STATE_UNSPECIFIED = 0;
+
+ // The index is being created.
+ // There is an active long-running operation for the index.
+ // The index is updated when writing a document.
+ // Some index data may exist.
+ CREATING = 3;
+
+ // The index is ready to be used.
+ // The index is updated when writing a document.
+ // The index is fully populated from all stored documents it applies to.
+ READY = 2;
+
+ // The index was being created, but something went wrong.
+ // There is no active long-running operation for the index,
+ // and the most recently finished long-running operation failed.
+ // The index is not updated when writing a document.
+ // Some index data may exist.
+ ERROR = 5;
+ }
+
+ // The resource name of the index.
+ // Output only.
+ string name = 1;
+
+ // The collection ID to which this index applies. Required.
+ string collection_id = 2;
+
+ // The fields to index.
+ repeated IndexField fields = 3;
+
+ // The state of the index.
+ // Output only.
+ State state = 6;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/location.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/location.proto
new file mode 100644
index 000000000..db7e8544b
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/location.proto
@@ -0,0 +1,34 @@
+// Copyright 2018 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1beta1;
+
+import "google/api/annotations.proto";
+import "google/type/latlng.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin";
+option java_multiple_files = true;
+option java_outer_classname = "LocationProto";
+option java_package = "com.google.firestore.admin.v1beta1";
+option objc_class_prefix = "GCFS";
+
+
+// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
+message LocationMetadata {
+
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/operation.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/operation.proto
new file mode 100644
index 000000000..c2a1b001e
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/operation.proto
@@ -0,0 +1,203 @@
+// Copyright 2018 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.admin.v1beta2;
+
+import "google/api/annotations.proto";
+import "google/firestore/admin/v1beta2/index.proto";
+import "google/protobuf/timestamp.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta2";
+option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta2;admin";
+option java_multiple_files = true;
+option java_outer_classname = "OperationProto";
+option java_package = "com.google.firestore.admin.v1beta2";
+option objc_class_prefix = "GCFS";
+
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta2.FirestoreAdmin.CreateIndex].
+message IndexOperationMetadata {
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The index resource that this operation is acting on. For example:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
+ string index = 3;
+
+ // The state of the operation.
+ OperationState state = 4;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 5;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 6;
+}
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.UpdateField][google.firestore.admin.v1beta2.FirestoreAdmin.UpdateField].
+message FieldOperationMetadata {
+ // Information about an index configuration change.
+ message IndexConfigDelta {
+ // Specifies how the index is changing.
+ enum ChangeType {
+ // The type of change is not specified or known.
+ CHANGE_TYPE_UNSPECIFIED = 0;
+
+ // The single field index is being added.
+ ADD = 1;
+
+ // The single field index is being removed.
+ REMOVE = 2;
+ }
+
+ // Specifies how the index is changing.
+ ChangeType change_type = 1;
+
+ // The index being changed.
+ Index index = 2;
+ }
+
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The field resource that this operation is acting on. For example:
+ // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
+ string field = 3;
+
+ // A list of [IndexConfigDelta][google.firestore.admin.v1beta2.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this
+ // operation.
+ repeated IndexConfigDelta index_config_deltas = 4;
+
+ // The state of the operation.
+ OperationState state = 5;
+
+ // The progress, in documents, of this operation.
+ Progress document_progress = 6;
+
+ // The progress, in bytes, of this operation.
+ Progress bytes_progress = 7;
+}
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ExportDocuments].
+message ExportDocumentsMetadata {
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The state of the export operation.
+ OperationState operation_state = 3;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 4;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 5;
+
+ // Which collection ids are being exported.
+ repeated string collection_ids = 6;
+
+ // Where the entities are being exported to.
+ string output_uri_prefix = 7;
+}
+
+// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
+// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ImportDocuments].
+message ImportDocumentsMetadata {
+ // The time this operation started.
+ google.protobuf.Timestamp start_time = 1;
+
+ // The time this operation completed. Will be unset if operation still in
+ // progress.
+ google.protobuf.Timestamp end_time = 2;
+
+ // The state of the import operation.
+ OperationState operation_state = 3;
+
+ // The progress, in documents, of this operation.
+ Progress progress_documents = 4;
+
+ // The progress, in bytes, of this operation.
+ Progress progress_bytes = 5;
+
+ // Which collection ids are being imported.
+ repeated string collection_ids = 6;
+
+ // The location of the documents being imported.
+ string input_uri_prefix = 7;
+}
+
+// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
+message ExportDocumentsResponse {
+ // Location of the output files. This can be used to begin an import
+ // into Cloud Firestore (this project or another project) after the operation
+ // completes successfully.
+ string output_uri_prefix = 1;
+}
+
+// Describes the progress of the operation.
+// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1beta2.Progress]
+// is used.
+message Progress {
+ // The amount of work estimated.
+ int64 estimated_work = 1;
+
+ // The amount of work completed.
+ int64 completed_work = 2;
+}
+
+// Describes the state of the operation.
+enum OperationState {
+ // Unspecified.
+ OPERATION_STATE_UNSPECIFIED = 0;
+
+ // Request is being prepared for processing.
+ INITIALIZING = 1;
+
+ // Request is actively being processed.
+ PROCESSING = 2;
+
+ // Request is in the process of being cancelled after user called
+ // google.longrunning.Operations.CancelOperation on the operation.
+ CANCELLING = 3;
+
+ // Request has been processed and is in its finalization stage.
+ FINALIZING = 4;
+
+ // Request has completed successfully.
+ SUCCESSFUL = 5;
+
+ // Request has finished being processed, but encountered an error.
+ FAILED = 6;
+
+ // Request has finished being cancelled after user called
+ // google.longrunning.Operations.CancelOperation.
+ CANCELLED = 7;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query.proto
new file mode 100644
index 000000000..5f9c3ab93
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query.proto
@@ -0,0 +1,244 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.v1beta1;
+
+import "google/firestore/v1beta1/document.proto";
+import "google/protobuf/wrappers.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "QueryProto";
+option java_package = "com.google.firestore.v1beta1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
+option ruby_package = "Google::Cloud::Firestore::V1beta1";
+
+// A Firestore query.
+message StructuredQuery {
+ // A selection of a collection, such as `messages as m1`.
+ message CollectionSelector {
+ // The collection ID.
+ // When set, selects only collections with this ID.
+ string collection_id = 2;
+
+ // When false, selects only collections that are immediate children of
+ // the `parent` specified in the containing `RunQueryRequest`.
+ // When true, selects all descendant collections.
+ bool all_descendants = 3;
+ }
+
+ // A filter.
+ message Filter {
+ // The type of filter.
+ oneof filter_type {
+ // A composite filter.
+ CompositeFilter composite_filter = 1;
+
+ // A filter on a document field.
+ FieldFilter field_filter = 2;
+
+ // A filter that takes exactly one argument.
+ UnaryFilter unary_filter = 3;
+ }
+ }
+
+ // A filter that merges multiple other filters using the given operator.
+ message CompositeFilter {
+ // A composite filter operator.
+ enum Operator {
+ // Unspecified. This value must not be used.
+ OPERATOR_UNSPECIFIED = 0;
+
+ // The results are required to satisfy each of the combined filters.
+ AND = 1;
+ }
+
+ // The operator for combining multiple filters.
+ Operator op = 1;
+
+ // The list of filters to combine.
+ // Must contain at least one filter.
+ repeated Filter filters = 2;
+ }
+
+ // A filter on a specific field.
+ message FieldFilter {
+ // A field filter operator.
+ enum Operator {
+ // Unspecified. This value must not be used.
+ OPERATOR_UNSPECIFIED = 0;
+
+ // Less than. Requires that the field come first in `order_by`.
+ LESS_THAN = 1;
+
+ // Less than or equal. Requires that the field come first in `order_by`.
+ LESS_THAN_OR_EQUAL = 2;
+
+ // Greater than. Requires that the field come first in `order_by`.
+ GREATER_THAN = 3;
+
+ // Greater than or equal. Requires that the field come first in
+ // `order_by`.
+ GREATER_THAN_OR_EQUAL = 4;
+
+ // Equal.
+ EQUAL = 5;
+
+ // Contains. Requires that the field is an array.
+ ARRAY_CONTAINS = 7;
+
+ // In. Requires that `value` is a non-empty ArrayValue with at most 10
+ // values.
+ IN = 8;
+
+ // Contains any. Requires that the field is an array and
+ // `value` is a non-empty ArrayValue with at most 10 values.
+ ARRAY_CONTAINS_ANY = 9;
+ }
+
+ // The field to filter by.
+ FieldReference field = 1;
+
+ // The operator to filter by.
+ Operator op = 2;
+
+ // The value to compare to.
+ Value value = 3;
+ }
+
+ // A filter with a single operand.
+ message UnaryFilter {
+ // A unary operator.
+ enum Operator {
+ // Unspecified. This value must not be used.
+ OPERATOR_UNSPECIFIED = 0;
+
+ // Test if a field is equal to NaN.
+ IS_NAN = 2;
+
+ // Test if an expression evaluates to Null.
+ IS_NULL = 3;
+ }
+
+ // The unary operator to apply.
+ Operator op = 1;
+
+ // The argument to the filter.
+ oneof operand_type {
+ // The field to which to apply the operator.
+ FieldReference field = 2;
+ }
+ }
+
+ // An order on a field.
+ message Order {
+ // The field to order by.
+ FieldReference field = 1;
+
+ // The direction to order by. Defaults to `ASCENDING`.
+ Direction direction = 2;
+ }
+
+ // A reference to a field, such as `max(messages.time) as max_time`.
+ message FieldReference {
+ string field_path = 2;
+ }
+
+ // The projection of document's fields to return.
+ message Projection {
+ // The fields to return.
+ //
+ // If empty, all fields are returned. To only return the name
+ // of the document, use `['__name__']`.
+ repeated FieldReference fields = 2;
+ }
+
+ // A sort direction.
+ enum Direction {
+ // Unspecified.
+ DIRECTION_UNSPECIFIED = 0;
+
+ // Ascending.
+ ASCENDING = 1;
+
+ // Descending.
+ DESCENDING = 2;
+ }
+
+ // The projection to return.
+ Projection select = 1;
+
+ // The collections to query.
+ repeated CollectionSelector from = 2;
+
+ // The filter to apply.
+ Filter where = 3;
+
+ // The order to apply to the query results.
+ //
+ // Firestore guarantees a stable ordering through the following rules:
+ //
+ // * Any field required to appear in `order_by`, that is not already
+ // specified in `order_by`, is appended to the order in field name order
+ // by default.
+ // * If an order on `__name__` is not specified, it is appended by default.
+ //
+ // Fields are appended with the same sort direction as the last order
+ // specified, or 'ASCENDING' if no order was specified. For example:
+ //
+ // * `SELECT * FROM Foo ORDER BY A` becomes
+ // `SELECT * FROM Foo ORDER BY A, __name__`
+ // * `SELECT * FROM Foo ORDER BY A DESC` becomes
+ // `SELECT * FROM Foo ORDER BY A DESC, __name__ DESC`
+ // * `SELECT * FROM Foo WHERE A > 1` becomes
+ // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`
+ repeated Order order_by = 4;
+
+ // A starting point for the query results.
+ Cursor start_at = 7;
+
+ // A end point for the query results.
+ Cursor end_at = 8;
+
+ // The number of results to skip.
+ //
+ // Applies before limit, but after all other constraints. Must be >= 0 if
+ // specified.
+ int32 offset = 6;
+
+ // The maximum number of results to return.
+ //
+ // Applies after all other constraints.
+ // Must be >= 0 if specified.
+ google.protobuf.Int32Value limit = 5;
+}
+
+// A position in a query result set.
+message Cursor {
+ // The values that represent a position, in the order they appear in
+ // the order by clause of a query.
+ //
+ // Can contain fewer values than specified in the order by clause.
+ repeated Value values = 1;
+
+ // If the position is just before or just after the given values, relative
+ // to the sort order defined by the query.
+ bool before = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query_pb2.py
new file mode 100644
index 000000000..67f1a7283
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query_pb2.py
@@ -0,0 +1,1284 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/query.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1beta1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
+)
+from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/query.proto",
+ package="google.firestore.v1beta1",
+ syntax="proto3",
+ serialized_options=b"\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1\352\002!Google::Cloud::Firestore::V1beta1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xd9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\x8c\x03\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\xb7\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x12\x06\n\x02IN\x10\x08\x12\x16\n\x12\x41RRAY_CONTAINS_ANY\x10\t\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xdc\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1\xea\x02!Google::Cloud::Firestore::V1beta1b\x06proto3',
+ dependencies=[
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor(
+ name="Operator",
+ full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="OPERATOR_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="AND",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1161,
+ serialized_end=1206,
+)
+_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR)
+
+_STRUCTUREDQUERY_FIELDFILTER_OPERATOR = _descriptor.EnumDescriptor(
+ name="Operator",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="OPERATOR_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="LESS_THAN",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="LESS_THAN_OR_EQUAL",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="GREATER_THAN",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="GREATER_THAN_OR_EQUAL",
+ index=4,
+ number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="EQUAL",
+ index=5,
+ number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ARRAY_CONTAINS",
+ index=6,
+ number=7,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="IN",
+ index=7,
+ number=8,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ARRAY_CONTAINS_ANY",
+ index=8,
+ number=9,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1422,
+ serialized_end=1605,
+)
+_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_FIELDFILTER_OPERATOR)
+
+_STRUCTUREDQUERY_UNARYFILTER_OPERATOR = _descriptor.EnumDescriptor(
+ name="Operator",
+ full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="OPERATOR_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="IS_NAN",
+ index=1,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="IS_NULL",
+ index=2,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1774,
+ serialized_end=1835,
+)
+_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_UNARYFILTER_OPERATOR)
+
+_STRUCTUREDQUERY_DIRECTION = _descriptor.EnumDescriptor(
+ name="Direction",
+ full_name="google.firestore.v1beta1.StructuredQuery.Direction",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="DIRECTION_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ASCENDING",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="DESCENDING",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2134,
+ serialized_end=2203,
+)
+_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_DIRECTION)
+
+
+_STRUCTUREDQUERY_COLLECTIONSELECTOR = _descriptor.Descriptor(
+ name="CollectionSelector",
+ full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="collection_id",
+ full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector.collection_id",
+ index=0,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="all_descendants",
+ full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector.all_descendants",
+ index=1,
+ number=3,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=653,
+ serialized_end=721,
+)
+
+_STRUCTUREDQUERY_FILTER = _descriptor.Descriptor(
+ name="Filter",
+ full_name="google.firestore.v1beta1.StructuredQuery.Filter",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="composite_filter",
+ full_name="google.firestore.v1beta1.StructuredQuery.Filter.composite_filter",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field_filter",
+ full_name="google.firestore.v1beta1.StructuredQuery.Filter.field_filter",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="unary_filter",
+ full_name="google.firestore.v1beta1.StructuredQuery.Filter.unary_filter",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="filter_type",
+ full_name="google.firestore.v1beta1.StructuredQuery.Filter.filter_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=724,
+ serialized_end=992,
+)
+
+_STRUCTUREDQUERY_COMPOSITEFILTER = _descriptor.Descriptor(
+ name="CompositeFilter",
+ full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="op",
+ full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.op",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="filters",
+ full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.filters",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=995,
+ serialized_end=1206,
+)
+
+_STRUCTUREDQUERY_FIELDFILTER = _descriptor.Descriptor(
+ name="FieldFilter",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.field",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="op",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.op",
+ index=1,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="value",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.value",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_STRUCTUREDQUERY_FIELDFILTER_OPERATOR,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1209,
+ serialized_end=1605,
+)
+
+_STRUCTUREDQUERY_UNARYFILTER = _descriptor.Descriptor(
+ name="UnaryFilter",
+ full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="op",
+ full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.op",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.field",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_STRUCTUREDQUERY_UNARYFILTER_OPERATOR,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="operand_type",
+ full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.operand_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=1608,
+ serialized_end=1851,
+)
+
+_STRUCTUREDQUERY_ORDER = _descriptor.Descriptor(
+ name="Order",
+ full_name="google.firestore.v1beta1.StructuredQuery.Order",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="google.firestore.v1beta1.StructuredQuery.Order.field",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="direction",
+ full_name="google.firestore.v1beta1.StructuredQuery.Order.direction",
+ index=1,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1854,
+ serialized_end=2006,
+)
+
+_STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor(
+ name="FieldReference",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldReference",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field_path",
+ full_name="google.firestore.v1beta1.StructuredQuery.FieldReference.field_path",
+ index=0,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2008,
+ serialized_end=2044,
+)
+
+_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor(
+ name="Projection",
+ full_name="google.firestore.v1beta1.StructuredQuery.Projection",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields",
+ index=0,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2046,
+ serialized_end=2132,
+)
+
+_STRUCTUREDQUERY = _descriptor.Descriptor(
+ name="StructuredQuery",
+ full_name="google.firestore.v1beta1.StructuredQuery",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="select",
+ full_name="google.firestore.v1beta1.StructuredQuery.select",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="from",
+ full_name="google.firestore.v1beta1.StructuredQuery.from",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="where",
+ full_name="google.firestore.v1beta1.StructuredQuery.where",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="order_by",
+ full_name="google.firestore.v1beta1.StructuredQuery.order_by",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_at",
+ full_name="google.firestore.v1beta1.StructuredQuery.start_at",
+ index=4,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_at",
+ full_name="google.firestore.v1beta1.StructuredQuery.end_at",
+ index=5,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="offset",
+ full_name="google.firestore.v1beta1.StructuredQuery.offset",
+ index=6,
+ number=6,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="limit",
+ full_name="google.firestore.v1beta1.StructuredQuery.limit",
+ index=7,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[
+ _STRUCTUREDQUERY_COLLECTIONSELECTOR,
+ _STRUCTUREDQUERY_FILTER,
+ _STRUCTUREDQUERY_COMPOSITEFILTER,
+ _STRUCTUREDQUERY_FIELDFILTER,
+ _STRUCTUREDQUERY_UNARYFILTER,
+ _STRUCTUREDQUERY_ORDER,
+ _STRUCTUREDQUERY_FIELDREFERENCE,
+ _STRUCTUREDQUERY_PROJECTION,
+ ],
+ enum_types=[_STRUCTUREDQUERY_DIRECTION,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=194,
+ serialized_end=2203,
+)
+
+
+_CURSOR = _descriptor.Descriptor(
+ name="Cursor",
+ full_name="google.firestore.v1beta1.Cursor",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="values",
+ full_name="google.firestore.v1beta1.Cursor.values",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="before",
+ full_name="google.firestore.v1beta1.Cursor.before",
+ index=1,
+ number=2,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2205,
+ serialized_end=2278,
+)
+
+_STRUCTUREDQUERY_COLLECTIONSELECTOR.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_FILTER.fields_by_name[
+ "composite_filter"
+].message_type = _STRUCTUREDQUERY_COMPOSITEFILTER
+_STRUCTUREDQUERY_FILTER.fields_by_name[
+ "field_filter"
+].message_type = _STRUCTUREDQUERY_FIELDFILTER
+_STRUCTUREDQUERY_FILTER.fields_by_name[
+ "unary_filter"
+].message_type = _STRUCTUREDQUERY_UNARYFILTER
+_STRUCTUREDQUERY_FILTER.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append(
+ _STRUCTUREDQUERY_FILTER.fields_by_name["composite_filter"]
+)
+_STRUCTUREDQUERY_FILTER.fields_by_name[
+ "composite_filter"
+].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"]
+_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append(
+ _STRUCTUREDQUERY_FILTER.fields_by_name["field_filter"]
+)
+_STRUCTUREDQUERY_FILTER.fields_by_name[
+ "field_filter"
+].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"]
+_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append(
+ _STRUCTUREDQUERY_FILTER.fields_by_name["unary_filter"]
+)
+_STRUCTUREDQUERY_FILTER.fields_by_name[
+ "unary_filter"
+].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"]
+_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name[
+ "op"
+].enum_type = _STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR
+_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name[
+ "filters"
+].message_type = _STRUCTUREDQUERY_FILTER
+_STRUCTUREDQUERY_COMPOSITEFILTER.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR.containing_type = (
+ _STRUCTUREDQUERY_COMPOSITEFILTER
+)
+_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[
+ "field"
+].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
+_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[
+ "op"
+].enum_type = _STRUCTUREDQUERY_FIELDFILTER_OPERATOR
+_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[
+ "value"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
+)
+_STRUCTUREDQUERY_FIELDFILTER.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_FIELDFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_FIELDFILTER
+_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[
+ "op"
+].enum_type = _STRUCTUREDQUERY_UNARYFILTER_OPERATOR
+_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[
+ "field"
+].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
+_STRUCTUREDQUERY_UNARYFILTER.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_UNARYFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_UNARYFILTER
+_STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name["operand_type"].fields.append(
+ _STRUCTUREDQUERY_UNARYFILTER.fields_by_name["field"]
+)
+_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[
+ "field"
+].containing_oneof = _STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name["operand_type"]
+_STRUCTUREDQUERY_ORDER.fields_by_name[
+ "field"
+].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
+_STRUCTUREDQUERY_ORDER.fields_by_name[
+ "direction"
+].enum_type = _STRUCTUREDQUERY_DIRECTION
+_STRUCTUREDQUERY_ORDER.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY_PROJECTION.fields_by_name[
+ "fields"
+].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
+_STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY
+_STRUCTUREDQUERY.fields_by_name["select"].message_type = _STRUCTUREDQUERY_PROJECTION
+_STRUCTUREDQUERY.fields_by_name[
+ "from"
+].message_type = _STRUCTUREDQUERY_COLLECTIONSELECTOR
+_STRUCTUREDQUERY.fields_by_name["where"].message_type = _STRUCTUREDQUERY_FILTER
+_STRUCTUREDQUERY.fields_by_name["order_by"].message_type = _STRUCTUREDQUERY_ORDER
+_STRUCTUREDQUERY.fields_by_name["start_at"].message_type = _CURSOR
+_STRUCTUREDQUERY.fields_by_name["end_at"].message_type = _CURSOR
+_STRUCTUREDQUERY.fields_by_name[
+ "limit"
+].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
+_STRUCTUREDQUERY_DIRECTION.containing_type = _STRUCTUREDQUERY
+_CURSOR.fields_by_name[
+ "values"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
+)
+DESCRIPTOR.message_types_by_name["StructuredQuery"] = _STRUCTUREDQUERY
+DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+StructuredQuery = _reflection.GeneratedProtocolMessageType(
+ "StructuredQuery",
+ (_message.Message,),
+ {
+ "CollectionSelector": _reflection.GeneratedProtocolMessageType(
+ "CollectionSelector",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_COLLECTIONSELECTOR,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A selection of a collection, such as ``messages as m1``.
+
+ Attributes:
+ collection_id:
+ The collection ID. When set, selects only collections with
+ this ID.
+ all_descendants:
+ When false, selects only collections that are immediate
+ children of the ``parent`` specified in the containing
+ ``RunQueryRequest``. When true, selects all descendant
+ collections.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CollectionSelector)
+ },
+ ),
+ "Filter": _reflection.GeneratedProtocolMessageType(
+ "Filter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_FILTER,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A filter.
+
+ Attributes:
+ filter_type:
+ The type of filter.
+ composite_filter:
+ A composite filter.
+ field_filter:
+ A filter on a document field.
+ unary_filter:
+ A filter that takes exactly one argument.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Filter)
+ },
+ ),
+ "CompositeFilter": _reflection.GeneratedProtocolMessageType(
+ "CompositeFilter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_COMPOSITEFILTER,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A filter that merges multiple other filters using the given operator.
+
+ Attributes:
+ op:
+ The operator for combining multiple filters.
+ filters:
+ The list of filters to combine. Must contain at least one
+ filter.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CompositeFilter)
+ },
+ ),
+ "FieldFilter": _reflection.GeneratedProtocolMessageType(
+ "FieldFilter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_FIELDFILTER,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A filter on a specific field.
+
+ Attributes:
+ field:
+ The field to filter by.
+ op:
+ The operator to filter by.
+ value:
+ The value to compare to.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldFilter)
+ },
+ ),
+ "UnaryFilter": _reflection.GeneratedProtocolMessageType(
+ "UnaryFilter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_UNARYFILTER,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A filter with a single operand.
+
+ Attributes:
+ op:
+ The unary operator to apply.
+ operand_type:
+ The argument to the filter.
+ field:
+ The field to which to apply the operator.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.UnaryFilter)
+ },
+ ),
+ "Order": _reflection.GeneratedProtocolMessageType(
+ "Order",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_ORDER,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """An order on a field.
+
+ Attributes:
+ field:
+ The field to order by.
+ direction:
+ The direction to order by. Defaults to ``ASCENDING``.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Order)
+ },
+ ),
+ "FieldReference": _reflection.GeneratedProtocolMessageType(
+ "FieldReference",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_FIELDREFERENCE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A reference to a field, such as ``max(messages.time) as max_time``.""",
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference)
+ },
+ ),
+ "Projection": _reflection.GeneratedProtocolMessageType(
+ "Projection",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STRUCTUREDQUERY_PROJECTION,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """The projection of document’s fields to return.
+
+ Attributes:
+ fields:
+ The fields to return. If empty, all fields are returned. To
+ only return the name of the document, use ``['__name__']``.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection)
+ },
+ ),
+ "DESCRIPTOR": _STRUCTUREDQUERY,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A Firestore query.
+
+ Attributes:
+ select:
+ The projection to return.
+ from:
+ The collections to query.
+ where:
+ The filter to apply.
+ order_by:
+ The order to apply to the query results. Firestore guarantees
+ a stable ordering through the following rules: - Any field
+ required to appear in ``order_by``, that is not already
+ specified in ``order_by``, is appended to the order in field
+ name order by default. - If an order on ``__name__`` is
+ not specified, it is appended by default. Fields are
+ appended with the same sort direction as the last order
+ specified, or ‘ASCENDING’ if no order was specified. For
+ example: - ``SELECT * FROM Foo ORDER BY A`` becomes
+ ``SELECT * FROM Foo ORDER BY A, __name__`` - ``SELECT * FROM
+ Foo ORDER BY A DESC`` becomes ``SELECT * FROM Foo ORDER BY
+ A DESC, __name__ DESC`` - ``SELECT * FROM Foo WHERE A > 1``
+ becomes ``SELECT * FROM Foo WHERE A > 1 ORDER BY A,
+ __name__``
+ start_at:
+ A starting point for the query results.
+ end_at:
+ A end point for the query results.
+ offset:
+ The number of results to skip. Applies before limit, but
+ after all other constraints. Must be >= 0 if specified.
+ limit:
+ The maximum number of results to return. Applies after all
+ other constraints. Must be >= 0 if specified.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery)
+ },
+)
+_sym_db.RegisterMessage(StructuredQuery)
+_sym_db.RegisterMessage(StructuredQuery.CollectionSelector)
+_sym_db.RegisterMessage(StructuredQuery.Filter)
+_sym_db.RegisterMessage(StructuredQuery.CompositeFilter)
+_sym_db.RegisterMessage(StructuredQuery.FieldFilter)
+_sym_db.RegisterMessage(StructuredQuery.UnaryFilter)
+_sym_db.RegisterMessage(StructuredQuery.Order)
+_sym_db.RegisterMessage(StructuredQuery.FieldReference)
+_sym_db.RegisterMessage(StructuredQuery.Projection)
+
+Cursor = _reflection.GeneratedProtocolMessageType(
+ "Cursor",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CURSOR,
+ "__module__": "google.cloud.firestore_v1beta1.proto.query_pb2",
+ "__doc__": """A position in a query result set.
+
+ Attributes:
+ values:
+ The values that represent a position, in the order they appear
+ in the order by clause of a query. Can contain fewer values
+ than specified in the order by clause.
+ before:
+ If the position is just before or just after the given values,
+ relative to the sort order defined by the query.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Cursor)
+ },
+)
+_sym_db.RegisterMessage(Cursor)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py
new file mode 100644
index 000000000..18dc58706
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py
@@ -0,0 +1,2190 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: test_v1beta1.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1beta1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2,
+)
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="test_v1beta1.proto",
+ package="tests.v1beta1",
+ syntax="proto3",
+ serialized_pb=_b(
+ '\n\x12test_v1beta1.proto\x12\rtests.v1beta1\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"/\n\tTestSuite\x12"\n\x05tests\x18\x01 \x03(\x0b\x32\x13.tests.v1beta1.Test"\x88\x03\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12%\n\x03get\x18\x02 \x01(\x0b\x32\x16.tests.v1beta1.GetTestH\x00\x12+\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x19.tests.v1beta1.CreateTestH\x00\x12%\n\x03set\x18\x04 \x01(\x0b\x32\x16.tests.v1beta1.SetTestH\x00\x12+\n\x06update\x18\x05 \x01(\x0b\x32\x19.tests.v1beta1.UpdateTestH\x00\x12\x36\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x1e.tests.v1beta1.UpdatePathsTestH\x00\x12+\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x19.tests.v1beta1.DeleteTestH\x00\x12)\n\x05query\x18\x08 \x01(\x0b\x32\x18.tests.v1beta1.QueryTestH\x00\x12+\n\x06listen\x18\t \x01(\x0b\x32\x19.tests.v1beta1.ListenTestH\x00\x42\x06\n\x04test"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa8\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12(\n\x06option\x18\x02 \x01(\x0b\x32\x18.tests.v1beta1.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xf5\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12-\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"B\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12(\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath"\x92\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12&\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\x15.tests.v1beta1.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xe0\x02\n\x06\x43lause\x12\'\n\x06select\x18\x01 \x01(\x0b\x32\x15.tests.v1beta1.SelectH\x00\x12%\n\x05where\x18\x02 \x01(\x0b\x32\x14.tests.v1beta1.WhereH\x00\x12*\n\x08order_by\x18\x03 \x01(\x0b\x32\x16.tests.v1beta1.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12)\n\x08start_at\x18\x06 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12,\n\x0bstart_after\x18\x07 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12\'\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12+\n\nend_before\x18\t \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x42\x08\n\x06\x63lause"2\n\x06Select\x12(\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath"O\n\x05Where\x12&\n\x04path\x18\x01 \x01(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"D\n\x07OrderBy\x12&\n\x04path\x18\x01 \x01(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"O\n\x06\x43ursor\x12\x30\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x1a.tests.v1beta1.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x87\x01\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12*\n\tsnapshots\x18\x02 \x03(\x0b\x32\x17.tests.v1beta1.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x96\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12)\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x18.tests.v1beta1.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd3\x01\n\tDocChange\x12+\n\x04kind\x18\x01 \x01(\x0e\x32\x1d.tests.v1beta1.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
+ ),
+ dependencies=[
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
+ name="Kind",
+ full_name="tests.v1beta1.DocChange.Kind",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ADDED", index=1, number=1, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="REMOVED", index=2, number=2, options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="MODIFIED", index=3, number=3, options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=3107,
+ serialized_end=3173,
+)
+_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
+
+
+_TESTSUITE = _descriptor.Descriptor(
+ name="TestSuite",
+ full_name="tests.v1beta1.TestSuite",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="tests",
+ full_name="tests.v1beta1.TestSuite.tests",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=278,
+ serialized_end=325,
+)
+
+
+_TEST = _descriptor.Descriptor(
+ name="Test",
+ full_name="tests.v1beta1.Test",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="description",
+ full_name="tests.v1beta1.Test.description",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="get",
+ full_name="tests.v1beta1.Test.get",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="create",
+ full_name="tests.v1beta1.Test.create",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="set",
+ full_name="tests.v1beta1.Test.set",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update",
+ full_name="tests.v1beta1.Test.update",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_paths",
+ full_name="tests.v1beta1.Test.update_paths",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="delete",
+ full_name="tests.v1beta1.Test.delete",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="tests.v1beta1.Test.query",
+ index=7,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="listen",
+ full_name="tests.v1beta1.Test.listen",
+ index=8,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="test",
+ full_name="tests.v1beta1.Test.test",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=328,
+ serialized_end=720,
+)
+
+
+_GETTEST = _descriptor.Descriptor(
+ name="GetTest",
+ full_name="tests.v1beta1.GetTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1beta1.GetTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1beta1.GetTest.request",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=722,
+ serialized_end=816,
+)
+
+
+_CREATETEST = _descriptor.Descriptor(
+ name="CreateTest",
+ full_name="tests.v1beta1.CreateTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1beta1.CreateTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1beta1.CreateTest.json_data",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1beta1.CreateTest.request",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.CreateTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=819,
+ serialized_end=948,
+)
+
+
+_SETTEST = _descriptor.Descriptor(
+ name="SetTest",
+ full_name="tests.v1beta1.SetTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1beta1.SetTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="option",
+ full_name="tests.v1beta1.SetTest.option",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1beta1.SetTest.json_data",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1beta1.SetTest.request",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.SetTest.is_error",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=951,
+ serialized_end=1119,
+)
+
+
+_UPDATETEST = _descriptor.Descriptor(
+ name="UpdateTest",
+ full_name="tests.v1beta1.UpdateTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1beta1.UpdateTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="tests.v1beta1.UpdateTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1beta1.UpdateTest.json_data",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1beta1.UpdateTest.request",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.UpdateTest.is_error",
+ index=4,
+ number=5,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1122,
+ serialized_end=1313,
+)
+
+
+_UPDATEPATHSTEST = _descriptor.Descriptor(
+ name="UpdatePathsTest",
+ full_name="tests.v1beta1.UpdatePathsTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1beta1.UpdatePathsTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="tests.v1beta1.UpdatePathsTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field_paths",
+ full_name="tests.v1beta1.UpdatePathsTest.field_paths",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_values",
+ full_name="tests.v1beta1.UpdatePathsTest.json_values",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1beta1.UpdatePathsTest.request",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.UpdatePathsTest.is_error",
+ index=5,
+ number=6,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1316,
+ serialized_end=1561,
+)
+
+
+_DELETETEST = _descriptor.Descriptor(
+ name="DeleteTest",
+ full_name="tests.v1beta1.DeleteTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_ref_path",
+ full_name="tests.v1beta1.DeleteTest.doc_ref_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="precondition",
+ full_name="tests.v1beta1.DeleteTest.precondition",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="request",
+ full_name="tests.v1beta1.DeleteTest.request",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.DeleteTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1564,
+ serialized_end=1736,
+)
+
+
+_SETOPTION = _descriptor.Descriptor(
+ name="SetOption",
+ full_name="tests.v1beta1.SetOption",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="all",
+ full_name="tests.v1beta1.SetOption.all",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="tests.v1beta1.SetOption.fields",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1738,
+ serialized_end=1804,
+)
+
+
+_QUERYTEST = _descriptor.Descriptor(
+ name="QueryTest",
+ full_name="tests.v1beta1.QueryTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="coll_path",
+ full_name="tests.v1beta1.QueryTest.coll_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="clauses",
+ full_name="tests.v1beta1.QueryTest.clauses",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="query",
+ full_name="tests.v1beta1.QueryTest.query",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.QueryTest.is_error",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1807,
+ serialized_end=1953,
+)
+
+
+_CLAUSE = _descriptor.Descriptor(
+ name="Clause",
+ full_name="tests.v1beta1.Clause",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="select",
+ full_name="tests.v1beta1.Clause.select",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="where",
+ full_name="tests.v1beta1.Clause.where",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="order_by",
+ full_name="tests.v1beta1.Clause.order_by",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="offset",
+ full_name="tests.v1beta1.Clause.offset",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="limit",
+ full_name="tests.v1beta1.Clause.limit",
+ index=4,
+ number=5,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_at",
+ full_name="tests.v1beta1.Clause.start_at",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="start_after",
+ full_name="tests.v1beta1.Clause.start_after",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_at",
+ full_name="tests.v1beta1.Clause.end_at",
+ index=7,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="end_before",
+ full_name="tests.v1beta1.Clause.end_before",
+ index=8,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="clause",
+ full_name="tests.v1beta1.Clause.clause",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=1956,
+ serialized_end=2308,
+)
+
+
+_SELECT = _descriptor.Descriptor(
+ name="Select",
+ full_name="tests.v1beta1.Select",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="fields",
+ full_name="tests.v1beta1.Select.fields",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2310,
+ serialized_end=2360,
+)
+
+
+_WHERE = _descriptor.Descriptor(
+ name="Where",
+ full_name="tests.v1beta1.Where",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="tests.v1beta1.Where.path",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="op",
+ full_name="tests.v1beta1.Where.op",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_value",
+ full_name="tests.v1beta1.Where.json_value",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2362,
+ serialized_end=2441,
+)
+
+
+_ORDERBY = _descriptor.Descriptor(
+ name="OrderBy",
+ full_name="tests.v1beta1.OrderBy",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="tests.v1beta1.OrderBy.path",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="direction",
+ full_name="tests.v1beta1.OrderBy.direction",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2443,
+ serialized_end=2511,
+)
+
+
+_CURSOR = _descriptor.Descriptor(
+ name="Cursor",
+ full_name="tests.v1beta1.Cursor",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="doc_snapshot",
+ full_name="tests.v1beta1.Cursor.doc_snapshot",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_values",
+ full_name="tests.v1beta1.Cursor.json_values",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2513,
+ serialized_end=2592,
+)
+
+
+_DOCSNAPSHOT = _descriptor.Descriptor(
+ name="DocSnapshot",
+ full_name="tests.v1beta1.DocSnapshot",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="tests.v1beta1.DocSnapshot.path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="json_data",
+ full_name="tests.v1beta1.DocSnapshot.json_data",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2594,
+ serialized_end=2640,
+)
+
+
+_FIELDPATH = _descriptor.Descriptor(
+ name="FieldPath",
+ full_name="tests.v1beta1.FieldPath",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field",
+ full_name="tests.v1beta1.FieldPath.field",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2642,
+ serialized_end=2668,
+)
+
+
+_LISTENTEST = _descriptor.Descriptor(
+ name="ListenTest",
+ full_name="tests.v1beta1.ListenTest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="responses",
+ full_name="tests.v1beta1.ListenTest.responses",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="snapshots",
+ full_name="tests.v1beta1.ListenTest.snapshots",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="is_error",
+ full_name="tests.v1beta1.ListenTest.is_error",
+ index=2,
+ number=3,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2671,
+ serialized_end=2806,
+)
+
+
+_SNAPSHOT = _descriptor.Descriptor(
+ name="Snapshot",
+ full_name="tests.v1beta1.Snapshot",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="docs",
+ full_name="tests.v1beta1.Snapshot.docs",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="changes",
+ full_name="tests.v1beta1.Snapshot.changes",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="tests.v1beta1.Snapshot.read_time",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2809,
+ serialized_end=2959,
+)
+
+
+_DOCCHANGE = _descriptor.Descriptor(
+ name="DocChange",
+ full_name="tests.v1beta1.DocChange",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="kind",
+ full_name="tests.v1beta1.DocChange.kind",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="doc",
+ full_name="tests.v1beta1.DocChange.doc",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="old_index",
+ full_name="tests.v1beta1.DocChange.old_index",
+ index=2,
+ number=3,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="new_index",
+ full_name="tests.v1beta1.DocChange.new_index",
+ index=3,
+ number=4,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_DOCCHANGE_KIND],
+ options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=2962,
+ serialized_end=3173,
+)
+
+_TESTSUITE.fields_by_name["tests"].message_type = _TEST
+_TEST.fields_by_name["get"].message_type = _GETTEST
+_TEST.fields_by_name["create"].message_type = _CREATETEST
+_TEST.fields_by_name["set"].message_type = _SETTEST
+_TEST.fields_by_name["update"].message_type = _UPDATETEST
+_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
+_TEST.fields_by_name["delete"].message_type = _DELETETEST
+_TEST.fields_by_name["query"].message_type = _QUERYTEST
+_TEST.fields_by_name["listen"].message_type = _LISTENTEST
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
+_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
+_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
+_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
+_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
+_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
+_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
+_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
+_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
+_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
+_GETTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
+)
+_CREATETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_SETTEST.fields_by_name["option"].message_type = _SETOPTION
+_SETTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_UPDATETEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_UPDATETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_UPDATEPATHSTEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
+_UPDATEPATHSTEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_DELETETEST.fields_by_name[
+ "precondition"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_DELETETEST.fields_by_name[
+ "request"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
+)
+_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
+_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
+_QUERYTEST.fields_by_name[
+ "query"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
+)
+_CLAUSE.fields_by_name["select"].message_type = _SELECT
+_CLAUSE.fields_by_name["where"].message_type = _WHERE
+_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
+_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
+_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
+_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
+_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
+_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
+_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
+_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
+_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
+_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
+_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
+_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
+ "clause"
+]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
+_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
+_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
+_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
+_WHERE.fields_by_name["path"].message_type = _FIELDPATH
+_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
+_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
+_LISTENTEST.fields_by_name[
+ "responses"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
+)
+_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
+_SNAPSHOT.fields_by_name[
+ "docs"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
+_SNAPSHOT.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
+_DOCCHANGE.fields_by_name[
+ "doc"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_DOCCHANGE_KIND.containing_type = _DOCCHANGE
+DESCRIPTOR.message_types_by_name["TestSuite"] = _TESTSUITE
+DESCRIPTOR.message_types_by_name["Test"] = _TEST
+DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
+DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
+DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
+DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
+DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
+DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
+DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
+DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
+DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
+DESCRIPTOR.message_types_by_name["Select"] = _SELECT
+DESCRIPTOR.message_types_by_name["Where"] = _WHERE
+DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
+DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
+DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
+DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
+DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
+DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
+DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+TestSuite = _reflection.GeneratedProtocolMessageType(
+ "TestSuite",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_TESTSUITE,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.TestSuite)
+ ),
+)
+_sym_db.RegisterMessage(TestSuite)
+
+Test = _reflection.GeneratedProtocolMessageType(
+ "Test",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_TEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.Test)
+ ),
+)
+_sym_db.RegisterMessage(Test)
+
+GetTest = _reflection.GeneratedProtocolMessageType(
+ "GetTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_GETTEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.GetTest)
+ ),
+)
+_sym_db.RegisterMessage(GetTest)
+
+CreateTest = _reflection.GeneratedProtocolMessageType(
+ "CreateTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CREATETEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.CreateTest)
+ ),
+)
+_sym_db.RegisterMessage(CreateTest)
+
+SetTest = _reflection.GeneratedProtocolMessageType(
+ "SetTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SETTEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.SetTest)
+ ),
+)
+_sym_db.RegisterMessage(SetTest)
+
+UpdateTest = _reflection.GeneratedProtocolMessageType(
+ "UpdateTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_UPDATETEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.UpdateTest)
+ ),
+)
+_sym_db.RegisterMessage(UpdateTest)
+
+UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
+ "UpdatePathsTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_UPDATEPATHSTEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.UpdatePathsTest)
+ ),
+)
+_sym_db.RegisterMessage(UpdatePathsTest)
+
+DeleteTest = _reflection.GeneratedProtocolMessageType(
+ "DeleteTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DELETETEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.DeleteTest)
+ ),
+)
+_sym_db.RegisterMessage(DeleteTest)
+
+SetOption = _reflection.GeneratedProtocolMessageType(
+ "SetOption",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SETOPTION,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.SetOption)
+ ),
+)
+_sym_db.RegisterMessage(SetOption)
+
+QueryTest = _reflection.GeneratedProtocolMessageType(
+ "QueryTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_QUERYTEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.QueryTest)
+ ),
+)
+_sym_db.RegisterMessage(QueryTest)
+
+Clause = _reflection.GeneratedProtocolMessageType(
+ "Clause",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CLAUSE,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.Clause)
+ ),
+)
+_sym_db.RegisterMessage(Clause)
+
+Select = _reflection.GeneratedProtocolMessageType(
+ "Select",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SELECT,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.Select)
+ ),
+)
+_sym_db.RegisterMessage(Select)
+
+Where = _reflection.GeneratedProtocolMessageType(
+ "Where",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_WHERE,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.Where)
+ ),
+)
+_sym_db.RegisterMessage(Where)
+
+OrderBy = _reflection.GeneratedProtocolMessageType(
+ "OrderBy",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_ORDERBY,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.OrderBy)
+ ),
+)
+_sym_db.RegisterMessage(OrderBy)
+
+Cursor = _reflection.GeneratedProtocolMessageType(
+ "Cursor",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_CURSOR,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.Cursor)
+ ),
+)
+_sym_db.RegisterMessage(Cursor)
+
+DocSnapshot = _reflection.GeneratedProtocolMessageType(
+ "DocSnapshot",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DOCSNAPSHOT,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.DocSnapshot)
+ ),
+)
+_sym_db.RegisterMessage(DocSnapshot)
+
+FieldPath = _reflection.GeneratedProtocolMessageType(
+ "FieldPath",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_FIELDPATH,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.FieldPath)
+ ),
+)
+_sym_db.RegisterMessage(FieldPath)
+
+ListenTest = _reflection.GeneratedProtocolMessageType(
+ "ListenTest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_LISTENTEST,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.ListenTest)
+ ),
+)
+_sym_db.RegisterMessage(ListenTest)
+
+Snapshot = _reflection.GeneratedProtocolMessageType(
+ "Snapshot",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SNAPSHOT,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.Snapshot)
+ ),
+)
+_sym_db.RegisterMessage(Snapshot)
+
+DocChange = _reflection.GeneratedProtocolMessageType(
+ "DocChange",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_DOCCHANGE,
+ __module__="test_v1beta1_pb2"
+ # @@protoc_insertion_point(class_scope:tests.v1beta1.DocChange)
+ ),
+)
+_sym_db.RegisterMessage(DocChange)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(
+ descriptor_pb2.FileOptions(),
+ _b(
+ '\n&com.google.cloud.firestore.conformance\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
+ ),
+)
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write.proto b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write.proto
new file mode 100644
index 000000000..ba75b42a0
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write.proto
@@ -0,0 +1,255 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.firestore.v1beta1;
+
+import "google/firestore/v1beta1/common.proto";
+import "google/firestore/v1beta1/document.proto";
+import "google/protobuf/timestamp.proto";
+import "google/api/annotations.proto";
+
+option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
+option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
+option java_multiple_files = true;
+option java_outer_classname = "WriteProto";
+option java_package = "com.google.firestore.v1beta1";
+option objc_class_prefix = "GCFS";
+option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
+option ruby_package = "Google::Cloud::Firestore::V1beta1";
+
+// A write on a document.
+message Write {
+ // The operation to execute.
+ oneof operation {
+ // A document to write.
+ Document update = 1;
+
+ // A document name to delete. In the format:
+ // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
+ string delete = 2;
+
+ // Applies a transformation to a document.
+ // At most one `transform` per document is allowed in a given request.
+ // An `update` cannot follow a `transform` on the same document in a given
+ // request.
+ DocumentTransform transform = 6;
+ }
+
+ // The fields to update in this write.
+ //
+ // This field can be set only when the operation is `update`.
+ // If the mask is not set for an `update` and the document exists, any
+ // existing data will be overwritten.
+ // If the mask is set and the document on the server has fields not covered by
+ // the mask, they are left unchanged.
+ // Fields referenced in the mask, but not present in the input document, are
+ // deleted from the document on the server.
+ // The field paths in this mask must not contain a reserved field name.
+ DocumentMask update_mask = 3;
+
+ // An optional precondition on the document.
+ //
+ // The write will fail if this is set and not met by the target document.
+ Precondition current_document = 4;
+}
+
+// A transformation of a document.
+message DocumentTransform {
+ // A transformation of a field of the document.
+ message FieldTransform {
+ // A value that is calculated by the server.
+ enum ServerValue {
+ // Unspecified. This value must not be used.
+ SERVER_VALUE_UNSPECIFIED = 0;
+
+ // The time at which the server processed the request, with millisecond
+ // precision.
+ REQUEST_TIME = 1;
+ }
+
+ // The path of the field. See [Document.fields][google.firestore.v1beta1.Document.fields] for the field path syntax
+ // reference.
+ string field_path = 1;
+
+ // The transformation to apply on the field.
+ oneof transform_type {
+ // Sets the field to the given server value.
+ ServerValue set_to_server_value = 2;
+
+ // Adds the given value to the field's current value.
+ //
+ // This must be an integer or a double value.
+ // If the field is not an integer or double, or if the field does not yet
+ // exist, the transformation will set the field to the given value.
+ // If either of the given value or the current field value are doubles,
+ // both values will be interpreted as doubles. Double arithmetic and
+ // representation of double values follow IEEE 754 semantics.
+ // If there is positive/negative integer overflow, the field is resolved
+ // to the largest magnitude positive/negative integer.
+ Value increment = 3;
+
+ // Sets the field to the maximum of its current value and the given value.
+ //
+ // This must be an integer or a double value.
+ // If the field is not an integer or double, or if the field does not yet
+ // exist, the transformation will set the field to the given value.
+ // If a maximum operation is applied where the field and the input value
+ // are of mixed types (that is - one is an integer and one is a double)
+ // the field takes on the type of the larger operand. If the operands are
+ // equivalent (e.g. 3 and 3.0), the field does not change.
+ // 0, 0.0, and -0.0 are all zero. The maximum of a zero stored value and
+ // zero input value is always the stored value.
+ // The maximum of any numeric value x and NaN is NaN.
+ Value maximum = 4;
+
+ // Sets the field to the minimum of its current value and the given value.
+ //
+ // This must be an integer or a double value.
+ // If the field is not an integer or double, or if the field does not yet
+ // exist, the transformation will set the field to the input value.
+ // If a minimum operation is applied where the field and the input value
+ // are of mixed types (that is - one is an integer and one is a double)
+ // the field takes on the type of the smaller operand. If the operands are
+ // equivalent (e.g. 3 and 3.0), the field does not change.
+ // 0, 0.0, and -0.0 are all zero. The minimum of a zero stored value and
+ // zero input value is always the stored value.
+ // The minimum of any numeric value x and NaN is NaN.
+ Value minimum = 5;
+
+ // Append the given elements in order if they are not already present in
+ // the current field value.
+ // If the field is not an array, or if the field does not yet exist, it is
+ // first set to the empty array.
+ //
+ // Equivalent numbers of different types (e.g. 3L and 3.0) are
+ // considered equal when checking if a value is missing.
+ // NaN is equal to NaN, and Null is equal to Null.
+ // If the input contains multiple equivalent values, only the first will
+ // be considered.
+ //
+ // The corresponding transform_result will be the null value.
+ ArrayValue append_missing_elements = 6;
+
+ // Remove all of the given elements from the array in the field.
+ // If the field is not an array, or if the field does not yet exist, it is
+ // set to the empty array.
+ //
+ // Equivalent numbers of the different types (e.g. 3L and 3.0) are
+ // considered equal when deciding whether an element should be removed.
+ // NaN is equal to NaN, and Null is equal to Null.
+ // This will remove all equivalent values if there are duplicates.
+ //
+ // The corresponding transform_result will be the null value.
+ ArrayValue remove_all_from_array = 7;
+ }
+ }
+
+ // The name of the document to transform.
+ string document = 1;
+
+ // The list of transformations to apply to the fields of the document, in
+ // order.
+ // This must not be empty.
+ repeated FieldTransform field_transforms = 2;
+}
+
+// The result of applying a write.
+message WriteResult {
+ // The last update time of the document after applying the write. Not set
+ // after a `delete`.
+ //
+ // If the write did not actually change the document, this will be the
+ // previous update_time.
+ google.protobuf.Timestamp update_time = 1;
+
+ // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], in the
+ // same order.
+ repeated Value transform_results = 2;
+}
+
+// A [Document][google.firestore.v1beta1.Document] has changed.
+//
+// May be the result of multiple [writes][google.firestore.v1beta1.Write], including deletes, that
+// ultimately resulted in a new value for the [Document][google.firestore.v1beta1.Document].
+//
+// Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] messages may be returned for the same logical
+// change, if multiple targets are affected.
+message DocumentChange {
+ // The new state of the [Document][google.firestore.v1beta1.Document].
+ //
+ // If `mask` is set, contains only fields that were updated or added.
+ Document document = 1;
+
+ // A set of target IDs of targets that match this document.
+ repeated int32 target_ids = 5;
+
+ // A set of target IDs for targets that no longer match this document.
+ repeated int32 removed_target_ids = 6;
+}
+
+// A [Document][google.firestore.v1beta1.Document] has been deleted.
+//
+// May be the result of multiple [writes][google.firestore.v1beta1.Write], including updates, the
+// last of which deleted the [Document][google.firestore.v1beta1.Document].
+//
+// Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages may be returned for the same logical
+// delete, if multiple targets are affected.
+message DocumentDelete {
+ // The resource name of the [Document][google.firestore.v1beta1.Document] that was deleted.
+ string document = 1;
+
+ // A set of target IDs for targets that previously matched this entity.
+ repeated int32 removed_target_ids = 6;
+
+ // The read timestamp at which the delete was observed.
+ //
+ // Greater or equal to the `commit_time` of the delete.
+ google.protobuf.Timestamp read_time = 4;
+}
+
+// A [Document][google.firestore.v1beta1.Document] has been removed from the view of the targets.
+//
+// Sent if the document is no longer relevant to a target and is out of view.
+// Can be sent instead of a DocumentDelete or a DocumentChange if the server
+// can not send the new value of the document.
+//
+// Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages may be returned for the same logical
+// write or delete, if multiple targets are affected.
+message DocumentRemove {
+ // The resource name of the [Document][google.firestore.v1beta1.Document] that has gone out of view.
+ string document = 1;
+
+ // A set of target IDs for targets that previously matched this document.
+ repeated int32 removed_target_ids = 2;
+
+ // The read timestamp at which the remove was observed.
+ //
+ // Greater or equal to the `commit_time` of the change/delete/remove.
+ google.protobuf.Timestamp read_time = 4;
+}
+
+// A digest of all the documents that match a given target.
+message ExistenceFilter {
+ // The target ID to which this filter applies.
+ int32 target_id = 1;
+
+ // The total count of documents that match [target_id][google.firestore.v1beta1.ExistenceFilter.target_id].
+ //
+ // If different from the count of documents in the client that match, the
+ // client must manually determine which documents no longer match the target.
+ int32 count = 2;
+}
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write_pb2.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write_pb2.py
new file mode 100644
index 000000000..c58dd47d5
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write_pb2.py
@@ -0,0 +1,1178 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/firestore_v1beta1/proto/write.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.cloud.firestore_v1beta1.proto import (
+ common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
+)
+from google.cloud.firestore_v1beta1.proto import (
+ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
+)
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/firestore_v1beta1/proto/write.proto",
+ package="google.firestore.v1beta1",
+ syntax="proto3",
+ serialized_options=b"\n\034com.google.firestore.v1beta1B\nWriteProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1\352\002!Google::Cloud::Firestore::V1beta1",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n0google/cloud/firestore_v1beta1/proto/write.proto\x12\x18google.firestore.v1beta1\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x9d\x02\n\x05Write\x12\x34\n\x06update\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x10\n\x06\x64\x65lete\x18\x02 \x01(\tH\x00\x12@\n\ttransform\x18\x06 \x01(\x0b\x32+.google.firestore.v1beta1.DocumentTransformH\x00\x12;\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.PreconditionB\x0b\n\toperation"\x88\x05\n\x11\x44ocumentTransform\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12T\n\x10\x66ield_transforms\x18\x02 \x03(\x0b\x32:.google.firestore.v1beta1.DocumentTransform.FieldTransform\x1a\x8a\x04\n\x0e\x46ieldTransform\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x65\n\x13set_to_server_value\x18\x02 \x01(\x0e\x32\x46.google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValueH\x00\x12\x34\n\tincrement\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07maximum\x18\x04 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07minimum\x18\x05 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12G\n\x17\x61ppend_missing_elements\x18\x06 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x45\n\x15remove_all_from_array\x18\x07 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00"=\n\x0bServerValue\x12\x1c\n\x18SERVER_VALUE_UNSPECIFIED\x10\x00\x12\x10\n\x0cREQUEST_TIME\x10\x01\x42\x10\n\x0etransform_type"z\n\x0bWriteResult\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x11transform_results\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"v\n\x0e\x44ocumentChange\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x12\n\ntarget_ids\x18\x05 \x03(\x05\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05"m\n\x0e\x44ocumentDelete\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"m\n\x0e\x44ocumentRemove\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x02 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"3\n\x0f\x45xistenceFilter\x12\x11\n\ttarget_id\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\x42\xdc\x01\n\x1c\x63om.google.firestore.v1beta1B\nWriteProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1\xea\x02!Google::Cloud::Firestore::V1beta1b\x06proto3',
+ dependencies=[
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
+
+
+_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE = _descriptor.EnumDescriptor(
+ name="ServerValue",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValue",
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="SERVER_VALUE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="REQUEST_TIME",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1103,
+ serialized_end=1164,
+)
+_sym_db.RegisterEnumDescriptor(_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE)
+
+
+_WRITE = _descriptor.Descriptor(
+ name="Write",
+ full_name="google.firestore.v1beta1.Write",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="update",
+ full_name="google.firestore.v1beta1.Write.update",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="delete",
+ full_name="google.firestore.v1beta1.Write.delete",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transform",
+ full_name="google.firestore.v1beta1.Write.transform",
+ index=2,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_mask",
+ full_name="google.firestore.v1beta1.Write.update_mask",
+ index=3,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="current_document",
+ full_name="google.firestore.v1beta1.Write.current_document",
+ index=4,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="operation",
+ full_name="google.firestore.v1beta1.Write.operation",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=246,
+ serialized_end=531,
+)
+
+
+_DOCUMENTTRANSFORM_FIELDTRANSFORM = _descriptor.Descriptor(
+ name="FieldTransform",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="field_path",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.field_path",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="set_to_server_value",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.set_to_server_value",
+ index=1,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="increment",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.increment",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="maximum",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.maximum",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="minimum",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.minimum",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="append_missing_elements",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements",
+ index=5,
+ number=6,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="remove_all_from_array",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array",
+ index=6,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE,],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="transform_type",
+ full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.transform_type",
+ index=0,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[],
+ ),
+ ],
+ serialized_start=660,
+ serialized_end=1182,
+)
+
+_DOCUMENTTRANSFORM = _descriptor.Descriptor(
+ name="DocumentTransform",
+ full_name="google.firestore.v1beta1.DocumentTransform",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.DocumentTransform.document",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="field_transforms",
+ full_name="google.firestore.v1beta1.DocumentTransform.field_transforms",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=534,
+ serialized_end=1182,
+)
+
+
+_WRITERESULT = _descriptor.Descriptor(
+ name="WriteResult",
+ full_name="google.firestore.v1beta1.WriteResult",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="update_time",
+ full_name="google.firestore.v1beta1.WriteResult.update_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="transform_results",
+ full_name="google.firestore.v1beta1.WriteResult.transform_results",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1184,
+ serialized_end=1306,
+)
+
+
+_DOCUMENTCHANGE = _descriptor.Descriptor(
+ name="DocumentChange",
+ full_name="google.firestore.v1beta1.DocumentChange",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.DocumentChange.document",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="target_ids",
+ full_name="google.firestore.v1beta1.DocumentChange.target_ids",
+ index=1,
+ number=5,
+ type=5,
+ cpp_type=1,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="removed_target_ids",
+ full_name="google.firestore.v1beta1.DocumentChange.removed_target_ids",
+ index=2,
+ number=6,
+ type=5,
+ cpp_type=1,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1308,
+ serialized_end=1426,
+)
+
+
+_DOCUMENTDELETE = _descriptor.Descriptor(
+ name="DocumentDelete",
+ full_name="google.firestore.v1beta1.DocumentDelete",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.DocumentDelete.document",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="removed_target_ids",
+ full_name="google.firestore.v1beta1.DocumentDelete.removed_target_ids",
+ index=1,
+ number=6,
+ type=5,
+ cpp_type=1,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.DocumentDelete.read_time",
+ index=2,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1428,
+ serialized_end=1537,
+)
+
+
+_DOCUMENTREMOVE = _descriptor.Descriptor(
+ name="DocumentRemove",
+ full_name="google.firestore.v1beta1.DocumentRemove",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="document",
+ full_name="google.firestore.v1beta1.DocumentRemove.document",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=b"".decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="removed_target_ids",
+ full_name="google.firestore.v1beta1.DocumentRemove.removed_target_ids",
+ index=1,
+ number=2,
+ type=5,
+ cpp_type=1,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_time",
+ full_name="google.firestore.v1beta1.DocumentRemove.read_time",
+ index=2,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1539,
+ serialized_end=1648,
+)
+
+
+_EXISTENCEFILTER = _descriptor.Descriptor(
+ name="ExistenceFilter",
+ full_name="google.firestore.v1beta1.ExistenceFilter",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="target_id",
+ full_name="google.firestore.v1beta1.ExistenceFilter.target_id",
+ index=0,
+ number=1,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.FieldDescriptor(
+ name="count",
+ full_name="google.firestore.v1beta1.ExistenceFilter.count",
+ index=1,
+ number=2,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=1650,
+ serialized_end=1701,
+)
+
+_WRITE.fields_by_name[
+ "update"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_WRITE.fields_by_name["transform"].message_type = _DOCUMENTTRANSFORM
+_WRITE.fields_by_name[
+ "update_mask"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
+)
+_WRITE.fields_by_name[
+ "current_document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
+)
+_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["update"])
+_WRITE.fields_by_name["update"].containing_oneof = _WRITE.oneofs_by_name["operation"]
+_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["delete"])
+_WRITE.fields_by_name["delete"].containing_oneof = _WRITE.oneofs_by_name["operation"]
+_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["transform"])
+_WRITE.fields_by_name["transform"].containing_oneof = _WRITE.oneofs_by_name["operation"]
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "set_to_server_value"
+].enum_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "increment"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "maximum"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "minimum"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "append_missing_elements"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "remove_all_from_array"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.containing_type = _DOCUMENTTRANSFORM
+_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE.containing_type = (
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["set_to_server_value"]
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "set_to_server_value"
+].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["increment"]
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "increment"
+].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["maximum"]
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "maximum"
+].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["minimum"]
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "minimum"
+].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["append_missing_elements"]
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "append_missing_elements"
+].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
+ _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["remove_all_from_array"]
+)
+_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
+ "remove_all_from_array"
+].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
+_DOCUMENTTRANSFORM.fields_by_name[
+ "field_transforms"
+].message_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM
+_WRITERESULT.fields_by_name[
+ "update_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_WRITERESULT.fields_by_name[
+ "transform_results"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
+)
+_DOCUMENTCHANGE.fields_by_name[
+ "document"
+].message_type = (
+ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
+)
+_DOCUMENTDELETE.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_DOCUMENTREMOVE.fields_by_name[
+ "read_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+DESCRIPTOR.message_types_by_name["Write"] = _WRITE
+DESCRIPTOR.message_types_by_name["DocumentTransform"] = _DOCUMENTTRANSFORM
+DESCRIPTOR.message_types_by_name["WriteResult"] = _WRITERESULT
+DESCRIPTOR.message_types_by_name["DocumentChange"] = _DOCUMENTCHANGE
+DESCRIPTOR.message_types_by_name["DocumentDelete"] = _DOCUMENTDELETE
+DESCRIPTOR.message_types_by_name["DocumentRemove"] = _DOCUMENTREMOVE
+DESCRIPTOR.message_types_by_name["ExistenceFilter"] = _EXISTENCEFILTER
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Write = _reflection.GeneratedProtocolMessageType(
+ "Write",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WRITE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A write on a document.
+
+ Attributes:
+ operation:
+ The operation to execute.
+ update:
+ A document to write.
+ delete:
+ A document name to delete. In the format: ``projects/{project_
+ id}/databases/{database_id}/documents/{document_path}``.
+ transform:
+ Applies a transformation to a document. At most one
+ ``transform`` per document is allowed in a given request. An
+ ``update`` cannot follow a ``transform`` on the same document
+ in a given request.
+ update_mask:
+ The fields to update in this write. This field can be set
+ only when the operation is ``update``. If the mask is not set
+ for an ``update`` and the document exists, any existing data
+ will be overwritten. If the mask is set and the document on
+ the server has fields not covered by the mask, they are left
+ unchanged. Fields referenced in the mask, but not present in
+ the input document, are deleted from the document on the
+ server. The field paths in this mask must not contain a
+ reserved field name.
+ current_document:
+ An optional precondition on the document. The write will fail
+ if this is set and not met by the target document.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Write)
+ },
+)
+_sym_db.RegisterMessage(Write)
+
+DocumentTransform = _reflection.GeneratedProtocolMessageType(
+ "DocumentTransform",
+ (_message.Message,),
+ {
+ "FieldTransform": _reflection.GeneratedProtocolMessageType(
+ "FieldTransform",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTTRANSFORM_FIELDTRANSFORM,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A transformation of a field of the document.
+
+ Attributes:
+ field_path:
+ The path of the field. See
+ [Document.fields][google.firestore.v1beta1.Document.fields]
+ for the field path syntax reference.
+ transform_type:
+ The transformation to apply on the field.
+ set_to_server_value:
+ Sets the field to the given server value.
+ increment:
+ Adds the given value to the field’s current value. This must
+ be an integer or a double value. If the field is not an
+ integer or double, or if the field does not yet exist, the
+ transformation will set the field to the given value. If
+ either of the given value or the current field value are
+ doubles, both values will be interpreted as doubles. Double
+ arithmetic and representation of double values follow IEEE 754
+ semantics. If there is positive/negative integer overflow, the
+ field is resolved to the largest magnitude positive/negative
+ integer.
+ maximum:
+ Sets the field to the maximum of its current value and the
+ given value. This must be an integer or a double value. If
+ the field is not an integer or double, or if the field does
+ not yet exist, the transformation will set the field to the
+ given value. If a maximum operation is applied where the field
+ and the input value are of mixed types (that is - one is an
+ integer and one is a double) the field takes on the type of
+ the larger operand. If the operands are equivalent (e.g. 3 and
+ 3.0), the field does not change. 0, 0.0, and -0.0 are all
+ zero. The maximum of a zero stored value and zero input value
+ is always the stored value. The maximum of any numeric value x
+ and NaN is NaN.
+ minimum:
+ Sets the field to the minimum of its current value and the
+ given value. This must be an integer or a double value. If
+ the field is not an integer or double, or if the field does
+ not yet exist, the transformation will set the field to the
+ input value. If a minimum operation is applied where the field
+ and the input value are of mixed types (that is - one is an
+ integer and one is a double) the field takes on the type of
+ the smaller operand. If the operands are equivalent (e.g. 3
+ and 3.0), the field does not change. 0, 0.0, and -0.0 are all
+ zero. The minimum of a zero stored value and zero input value
+ is always the stored value. The minimum of any numeric value x
+ and NaN is NaN.
+ append_missing_elements:
+ Append the given elements in order if they are not already
+ present in the current field value. If the field is not an
+ array, or if the field does not yet exist, it is first set to
+ the empty array. Equivalent numbers of different types
+ (e.g. 3L and 3.0) are considered equal when checking if a
+ value is missing. NaN is equal to NaN, and Null is equal to
+ Null. If the input contains multiple equivalent values, only
+ the first will be considered. The corresponding
+ transform_result will be the null value.
+ remove_all_from_array:
+ Remove all of the given elements from the array in the field.
+ If the field is not an array, or if the field does not yet
+ exist, it is set to the empty array. Equivalent numbers of
+ the different types (e.g. 3L and 3.0) are considered equal
+ when deciding whether an element should be removed. NaN is
+ equal to NaN, and Null is equal to Null. This will remove all
+ equivalent values if there are duplicates. The corresponding
+ transform_result will be the null value.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform.FieldTransform)
+ },
+ ),
+ "DESCRIPTOR": _DOCUMENTTRANSFORM,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A transformation of a document.
+
+ Attributes:
+ document:
+ The name of the document to transform.
+ field_transforms:
+ The list of transformations to apply to the fields of the
+ document, in order. This must not be empty.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform)
+ },
+)
+_sym_db.RegisterMessage(DocumentTransform)
+_sym_db.RegisterMessage(DocumentTransform.FieldTransform)
+
+WriteResult = _reflection.GeneratedProtocolMessageType(
+ "WriteResult",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WRITERESULT,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """The result of applying a write.
+
+ Attributes:
+ update_time:
+ The last update time of the document after applying the write.
+ Not set after a ``delete``. If the write did not actually
+ change the document, this will be the previous update_time.
+ transform_results:
+ The results of applying each [DocumentTransform.FieldTransform
+ ][google.firestore.v1beta1.DocumentTransform.FieldTransform],
+ in the same order.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResult)
+ },
+)
+_sym_db.RegisterMessage(WriteResult)
+
+DocumentChange = _reflection.GeneratedProtocolMessageType(
+ "DocumentChange",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTCHANGE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A [Document][google.firestore.v1beta1.Document] has changed. May be
+ the result of multiple [writes][google.firestore.v1beta1.Write],
+ including deletes, that ultimately resulted in a new value for the
+ [Document][google.firestore.v1beta1.Document]. Multiple
+ [DocumentChange][google.firestore.v1beta1.DocumentChange] messages may
+ be returned for the same logical change, if multiple targets are
+ affected.
+
+ Attributes:
+ document:
+ The new state of the
+ [Document][google.firestore.v1beta1.Document]. If ``mask`` is
+ set, contains only fields that were updated or added.
+ target_ids:
+ A set of target IDs of targets that match this document.
+ removed_target_ids:
+ A set of target IDs for targets that no longer match this
+ document.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentChange)
+ },
+)
+_sym_db.RegisterMessage(DocumentChange)
+
+DocumentDelete = _reflection.GeneratedProtocolMessageType(
+ "DocumentDelete",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTDELETE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A [Document][google.firestore.v1beta1.Document] has been deleted. May
+ be the result of multiple [writes][google.firestore.v1beta1.Write],
+ including updates, the last of which deleted the
+ [Document][google.firestore.v1beta1.Document]. Multiple
+ [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages may
+ be returned for the same logical delete, if multiple targets are
+ affected.
+
+ Attributes:
+ document:
+ The resource name of the
+ [Document][google.firestore.v1beta1.Document] that was
+ deleted.
+ removed_target_ids:
+ A set of target IDs for targets that previously matched this
+ entity.
+ read_time:
+ The read timestamp at which the delete was observed. Greater
+ or equal to the ``commit_time`` of the delete.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentDelete)
+ },
+)
+_sym_db.RegisterMessage(DocumentDelete)
+
+DocumentRemove = _reflection.GeneratedProtocolMessageType(
+ "DocumentRemove",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTREMOVE,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A [Document][google.firestore.v1beta1.Document] has been removed from
+ the view of the targets. Sent if the document is no longer relevant
+ to a target and is out of view. Can be sent instead of a
+ DocumentDelete or a DocumentChange if the server can not send the new
+ value of the document. Multiple
+ [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages may
+ be returned for the same logical write or delete, if multiple targets
+ are affected.
+
+ Attributes:
+ document:
+ The resource name of the
+ [Document][google.firestore.v1beta1.Document] that has gone
+ out of view.
+ removed_target_ids:
+ A set of target IDs for targets that previously matched this
+ document.
+ read_time:
+ The read timestamp at which the remove was observed. Greater
+ or equal to the ``commit_time`` of the change/delete/remove.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentRemove)
+ },
+)
+_sym_db.RegisterMessage(DocumentRemove)
+
+ExistenceFilter = _reflection.GeneratedProtocolMessageType(
+ "ExistenceFilter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _EXISTENCEFILTER,
+ "__module__": "google.cloud.firestore_v1beta1.proto.write_pb2",
+ "__doc__": """A digest of all the documents that match a given target.
+
+ Attributes:
+ target_id:
+ The target ID to which this filter applies.
+ count:
+ The total count of documents that match [target_id][google.fir
+ estore.v1beta1.ExistenceFilter.target_id]. If different from
+ the count of documents in the client that match, the client
+ must manually determine which documents no longer match the
+ target.
+ """,
+ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ExistenceFilter)
+ },
+)
+_sym_db.RegisterMessage(ExistenceFilter)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py
new file mode 100644
index 000000000..8a9393943
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py
@@ -0,0 +1,3 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/query.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/query.py
new file mode 100644
index 000000000..70dafb055
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/query.py
@@ -0,0 +1,971 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing queries for the Google Cloud Firestore API.
+
+A :class:`~google.cloud.firestore_v1beta1.query.Query` can be created directly
+from a :class:`~google.cloud.firestore_v1beta1.collection.Collection`,
+and that can be a more common way to create a query than direct usage of the
+constructor.
+"""
+import copy
+import math
+import warnings
+
+from google.protobuf import wrappers_pb2
+import six
+
+from google.cloud.firestore_v1beta1 import _helpers
+from google.cloud.firestore_v1beta1 import document
+from google.cloud.firestore_v1beta1 import field_path as field_path_module
+from google.cloud.firestore_v1beta1 import transforms
+from google.cloud.firestore_v1beta1.gapic import enums
+from google.cloud.firestore_v1beta1.proto import query_pb2
+from google.cloud.firestore_v1beta1.order import Order
+from google.cloud.firestore_v1beta1.watch import Watch
+
+_EQ_OP = "=="
+_operator_enum = enums.StructuredQuery.FieldFilter.Operator
+_COMPARISON_OPERATORS = {
+ "<": _operator_enum.LESS_THAN,
+ "<=": _operator_enum.LESS_THAN_OR_EQUAL,
+ _EQ_OP: _operator_enum.EQUAL,
+ ">=": _operator_enum.GREATER_THAN_OR_EQUAL,
+ ">": _operator_enum.GREATER_THAN,
+ "array_contains": _operator_enum.ARRAY_CONTAINS,
+}
+_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}."
+_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values'
+_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values."
+_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}."
+_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values."
+_MISSING_ORDER_BY = (
+ 'The "order by" field path {!r} is not present in the cursor data {!r}. '
+ "All fields sent to ``order_by()`` must be present in the fields "
+ "if passed to one of ``start_at()`` / ``start_after()`` / "
+ "``end_before()`` / ``end_at()`` to define a cursor."
+)
+_NO_ORDERS_FOR_CURSOR = (
+ "Attempting to create a cursor with no fields to order on. "
+ "When defining a cursor with one of ``start_at()`` / ``start_after()`` / "
+ "``end_before()`` / ``end_at()``, all fields in the cursor must "
+ "come from fields set in ``order_by()``."
+)
+_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}."
+
+
+class Query(object):
+ """Represents a query to the Firestore API.
+
+ Instances of this class are considered immutable: all methods that
+ would modify an instance instead return a new instance.
+
+ Args:
+ parent (~.firestore_v1beta1.collection.Collection): The collection
+ that this query applies to.
+ projection (Optional[google.cloud.proto.firestore.v1beta1.\
+ query_pb2.StructuredQuery.Projection]): A projection of document
+ fields to limit the query results to.
+ field_filters (Optional[Tuple[google.cloud.proto.firestore.v1beta1.\
+ query_pb2.StructuredQuery.FieldFilter, ...]]): The filters to be
+ applied in the query.
+ orders (Optional[Tuple[google.cloud.proto.firestore.v1beta1.\
+ query_pb2.StructuredQuery.Order, ...]]): The "order by" entries
+ to use in the query.
+ limit (Optional[int]): The maximum number of documents the
+ query is allowed to return.
+ offset (Optional[int]): The number of results to skip.
+ start_at (Optional[Tuple[dict, bool]]): Two-tuple of
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * an ``after`` flag
+
+ The fields and the flag combine to form a cursor used as
+ a starting point in a query result set. If the ``after``
+ flag is :data:`True`, the results will start just after any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ end_at (Optional[Tuple[dict, bool]]): Two-tuple of
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * a ``before`` flag
+
+ The fields and the flag combine to form a cursor used as
+ an ending point in a query result set. If the ``before``
+ flag is :data:`True`, the results will end just before any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ """
+
+ ASCENDING = "ASCENDING"
+ """str: Sort query results in ascending order on a field."""
+ DESCENDING = "DESCENDING"
+ """str: Sort query results in descending order on a field."""
+
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ ):
+ self._parent = parent
+ self._projection = projection
+ self._field_filters = field_filters
+ self._orders = orders
+ self._limit = limit
+ self._offset = offset
+ self._start_at = start_at
+ self._end_at = end_at
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return (
+ self._parent == other._parent
+ and self._projection == other._projection
+ and self._field_filters == other._field_filters
+ and self._orders == other._orders
+ and self._limit == other._limit
+ and self._offset == other._offset
+ and self._start_at == other._start_at
+ and self._end_at == other._end_at
+ )
+
+ @property
+ def _client(self):
+ """The client of the parent collection.
+
+ Returns:
+ ~.firestore_v1beta1.client.Client: The client that owns
+ this query.
+ """
+ return self._parent._client
+
+ def select(self, field_paths):
+ """Project documents matching query to a limited set of fields.
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
+ for more information on **field paths**.
+
+ If the current query already has a projection set (i.e. has already
+ called :meth:`~google.cloud.firestore_v1beta1.query.Query.select`),
+ this will overwrite it.
+
+ Args:
+ field_paths (Iterable[str, ...]): An iterable of field paths
+ (``.``-delimited list of field names) to use as a projection
+ of document fields in the query results.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A "projected" query. Acts as
+ a copy of the current query, modified with the newly added
+ projection.
+ Raises:
+ ValueError: If any ``field_path`` is invalid.
+ """
+ field_paths = list(field_paths)
+ for field_path in field_paths:
+ field_path_module.split_field_path(field_path) # raises
+
+ new_projection = query_pb2.StructuredQuery.Projection(
+ fields=[
+ query_pb2.StructuredQuery.FieldReference(field_path=field_path)
+ for field_path in field_paths
+ ]
+ )
+ return self.__class__(
+ self._parent,
+ projection=new_projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=self._limit,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ )
+
+ def where(self, field_path, op_string, value):
+ """Filter the query on a field.
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
+ for more information on **field paths**.
+
+ Returns a new :class:`~google.cloud.firestore_v1beta1.query.Query`
+ that filters on a specific field path, according to an operation
+ (e.g. ``==`` or "equals") and a particular value to be paired with
+ that operation.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) for the field to filter on.
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``
+ and ``>``.
+ value (Any): The value to compare the field against in the filter.
+ If ``value`` is :data:`None` or a NaN, then ``==`` is the only
+ allowed operation.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A filtered query. Acts as a
+ copy of the current query, modified with the newly added filter.
+
+ Raises:
+ ValueError: If ``field_path`` is invalid.
+ ValueError: If ``value`` is a NaN or :data:`None` and
+ ``op_string`` is not ``==``.
+ """
+ field_path_module.split_field_path(field_path) # raises
+
+ if value is None:
+ if op_string != _EQ_OP:
+ raise ValueError(_BAD_OP_NAN_NULL)
+ filter_pb = query_pb2.StructuredQuery.UnaryFilter(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL,
+ )
+ elif _isnan(value):
+ if op_string != _EQ_OP:
+ raise ValueError(_BAD_OP_NAN_NULL)
+ filter_pb = query_pb2.StructuredQuery.UnaryFilter(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ op=enums.StructuredQuery.UnaryFilter.Operator.IS_NAN,
+ )
+ elif isinstance(value, (transforms.Sentinel, transforms._ValueList)):
+ raise ValueError(_INVALID_WHERE_TRANSFORM)
+ else:
+ filter_pb = query_pb2.StructuredQuery.FieldFilter(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ op=_enum_from_op_string(op_string),
+ value=_helpers.encode_value(value),
+ )
+
+ new_filters = self._field_filters + (filter_pb,)
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=new_filters,
+ orders=self._orders,
+ limit=self._limit,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ )
+
+ @staticmethod
+ def _make_order(field_path, direction):
+ """Helper for :meth:`order_by`."""
+ return query_pb2.StructuredQuery.Order(
+ field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ direction=_enum_from_direction(direction),
+ )
+
+ def order_by(self, field_path, direction=ASCENDING):
+ """Modify the query to add an order clause on a specific field.
+
+ See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
+ for more information on **field paths**.
+
+ Successive :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` calls
+ will further refine the ordering of results returned by the query
+ (i.e. the new "order by" fields will be added to existing ones).
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) on which to order the query results.
+ direction (Optional[str]): The direction to order by. Must be one
+ of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to
+ :attr:`ASCENDING`.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: An ordered query. Acts as a
+ copy of the current query, modified with the newly added
+ "order by" constraint.
+
+ Raises:
+ ValueError: If ``field_path`` is invalid.
+ ValueError: If ``direction`` is not one of :attr:`ASCENDING` or
+ :attr:`DESCENDING`.
+ """
+ field_path_module.split_field_path(field_path) # raises
+
+ order_pb = self._make_order(field_path, direction)
+
+ new_orders = self._orders + (order_pb,)
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=new_orders,
+ limit=self._limit,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ )
+
+ def limit(self, count):
+ """Limit a query to return a fixed number of results.
+
+ If the current query already has a limit set, this will overwrite it.
+
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A limited query. Acts as a
+ copy of the current query, modified with the newly added
+ "limit" filter.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=count,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ )
+
+ def offset(self, num_to_skip):
+ """Skip to an offset in a query.
+
+ If the current query already has specified an offset, this will
+ overwrite it.
+
+ Args:
+ num_to_skip (int): The number of results to skip at the beginning
+ of query results. (Must be non-negative.)
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: An offset query. Acts as a
+ copy of the current query, modified with the newly added
+ "offset" field.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=self._limit,
+ offset=num_to_skip,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ )
+
+ def _cursor_helper(self, document_fields, before, start):
+ """Set values to be used for a ``start_at`` or ``end_at`` cursor.
+
+ The values will later be used in a query protobuf.
+
+ When the query is sent to the server, the ``document_fields`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+ before (bool): Flag indicating if the document in
+ ``document_fields`` should (:data:`False`) or
+ shouldn't (:data:`True`) be included in the result set.
+ start (Optional[bool]): determines if the cursor is a ``start_at``
+ cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`).
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "start at" cursor.
+ """
+ if isinstance(document_fields, tuple):
+ document_fields = list(document_fields)
+ elif isinstance(document_fields, document.DocumentSnapshot):
+ if document_fields.reference._path[:-1] != self._parent._path:
+ raise ValueError(
+ "Cannot use snapshot from another collection as a cursor."
+ )
+ else:
+ # NOTE: We copy so that the caller can't modify after calling.
+ document_fields = copy.deepcopy(document_fields)
+
+ cursor_pair = document_fields, before
+ query_kwargs = {
+ "projection": self._projection,
+ "field_filters": self._field_filters,
+ "orders": self._orders,
+ "limit": self._limit,
+ "offset": self._offset,
+ }
+ if start:
+ query_kwargs["start_at"] = cursor_pair
+ query_kwargs["end_at"] = self._end_at
+ else:
+ query_kwargs["start_at"] = self._start_at
+ query_kwargs["end_at"] = cursor_pair
+
+ return self.__class__(self._parent, **query_kwargs)
+
+ def start_at(self, document_fields):
+ """Start query results at a particular document value.
+
+ The result set will **include** the document specified by
+ ``document_fields``.
+
+ If the current query already has specified a start cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.start_after` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "start at" cursor.
+ """
+ return self._cursor_helper(document_fields, before=True, start=True)
+
+ def start_after(self, document_fields):
+ """Start query results after a particular document value.
+
+ The result set will **exclude** the document specified by
+ ``document_fields``.
+
+ If the current query already has specified a start cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.start_at` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "start after" cursor.
+ """
+ return self._cursor_helper(document_fields, before=False, start=True)
+
+ def end_before(self, document_fields):
+ """End query results before a particular document value.
+
+ The result set will **exclude** the document specified by
+ ``document_fields``.
+
+ If the current query already has specified an end cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.end_at` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "end before" cursor.
+ """
+ return self._cursor_helper(document_fields, before=True, start=False)
+
+ def end_at(self, document_fields):
+ """End query results at a particular document value.
+
+ The result set will **include** the document specified by
+ ``document_fields``.
+
+ If the current query already has specified an end cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.end_before` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
+
+ Args:
+ document_fields (Union[~.firestore_v1beta1.\
+ document.DocumentSnapshot, dict, list, tuple]): a document
+ snapshot or a dictionary/list/tuple of fields representing a
+ query results cursor. A cursor is a collection of values that
+ represent a position in a query result set.
+
+ Returns:
+ ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "end at" cursor.
+ """
+ return self._cursor_helper(document_fields, before=False, start=False)
+
+ def _filters_pb(self):
+ """Convert all the filters into a single generic Filter protobuf.
+
+ This may be a lone field filter or unary filter, may be a composite
+ filter or may be :data:`None`.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.\
+ StructuredQuery.Filter: A "generic" filter representing the
+ current query's filters.
+ """
+ num_filters = len(self._field_filters)
+ if num_filters == 0:
+ return None
+ elif num_filters == 1:
+ return _filter_pb(self._field_filters[0])
+ else:
+ composite_filter = query_pb2.StructuredQuery.CompositeFilter(
+ op=enums.StructuredQuery.CompositeFilter.Operator.AND,
+ filters=[_filter_pb(filter_) for filter_ in self._field_filters],
+ )
+ return query_pb2.StructuredQuery.Filter(composite_filter=composite_filter)
+
+ @staticmethod
+ def _normalize_projection(projection):
+ """Helper: convert field paths to message."""
+ if projection is not None:
+
+ fields = list(projection.fields)
+
+ if not fields:
+ field_ref = query_pb2.StructuredQuery.FieldReference(
+ field_path="__name__"
+ )
+ return query_pb2.StructuredQuery.Projection(fields=[field_ref])
+
+ return projection
+
+ def _normalize_orders(self):
+ """Helper: adjust orders based on cursors, where clauses."""
+ orders = list(self._orders)
+ _has_snapshot_cursor = False
+
+ if self._start_at:
+ if isinstance(self._start_at[0], document.DocumentSnapshot):
+ _has_snapshot_cursor = True
+
+ if self._end_at:
+ if isinstance(self._end_at[0], document.DocumentSnapshot):
+ _has_snapshot_cursor = True
+
+ if _has_snapshot_cursor:
+ should_order = [
+ _enum_from_op_string(key)
+ for key in _COMPARISON_OPERATORS
+ if key not in (_EQ_OP, "array_contains")
+ ]
+ order_keys = [order.field.field_path for order in orders]
+ for filter_ in self._field_filters:
+ field = filter_.field.field_path
+ if filter_.op in should_order and field not in order_keys:
+ orders.append(self._make_order(field, "ASCENDING"))
+ if not orders:
+ orders.append(self._make_order("__name__", "ASCENDING"))
+ else:
+ order_keys = [order.field.field_path for order in orders]
+ if "__name__" not in order_keys:
+ direction = orders[-1].direction # enum?
+ orders.append(self._make_order("__name__", direction))
+
+ return orders
+
+ def _normalize_cursor(self, cursor, orders):
+ """Helper: convert cursor to a list of values based on orders."""
+ if cursor is None:
+ return
+
+ if not orders:
+ raise ValueError(_NO_ORDERS_FOR_CURSOR)
+
+ document_fields, before = cursor
+
+ order_keys = [order.field.field_path for order in orders]
+
+ if isinstance(document_fields, document.DocumentSnapshot):
+ snapshot = document_fields
+ document_fields = snapshot.to_dict()
+ document_fields["__name__"] = snapshot.reference
+
+ if isinstance(document_fields, dict):
+ # Transform to list using orders
+ values = []
+ data = document_fields
+ for order_key in order_keys:
+ try:
+ values.append(field_path_module.get_nested_value(order_key, data))
+ except KeyError:
+ msg = _MISSING_ORDER_BY.format(order_key, data)
+ raise ValueError(msg)
+ document_fields = values
+
+ if len(document_fields) != len(orders):
+ msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys)
+ raise ValueError(msg)
+
+ _transform_bases = (transforms.Sentinel, transforms._ValueList)
+
+ for index, key_field in enumerate(zip(order_keys, document_fields)):
+ key, field = key_field
+
+ if isinstance(field, _transform_bases):
+ msg = _INVALID_CURSOR_TRANSFORM
+ raise ValueError(msg)
+
+ if key == "__name__" and isinstance(field, six.string_types):
+ document_fields[index] = self._parent.document(field)
+
+ return document_fields, before
+
+ def _to_protobuf(self):
+ """Convert the current query into the equivalent protobuf.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.StructuredQuery: The
+ query protobuf.
+ """
+ projection = self._normalize_projection(self._projection)
+ orders = self._normalize_orders()
+ start_at = self._normalize_cursor(self._start_at, orders)
+ end_at = self._normalize_cursor(self._end_at, orders)
+
+ query_kwargs = {
+ "select": projection,
+ "from": [
+ query_pb2.StructuredQuery.CollectionSelector(
+ collection_id=self._parent.id
+ )
+ ],
+ "where": self._filters_pb(),
+ "order_by": orders,
+ "start_at": _cursor_pb(start_at),
+ "end_at": _cursor_pb(end_at),
+ }
+ if self._offset is not None:
+ query_kwargs["offset"] = self._offset
+ if self._limit is not None:
+ query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit)
+
+ return query_pb2.StructuredQuery(**query_kwargs)
+
+ def get(self, transaction=None):
+ """Deprecated alias for :meth:`stream`."""
+ warnings.warn(
+ "'Query.get' is deprecated: please use 'Query.stream' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.stream(transaction=transaction)
+
+ def stream(self, transaction=None):
+ """Read the documents in the collection that match this query.
+
+ This sends a ``RunQuery`` RPC and then returns an iterator which
+ consumes each document returned in the stream of ``RunQueryResponse``
+ messages.
+
+ .. note::
+
+ The underlying stream of responses will time out after
+ the ``max_rpc_timeout_millis`` value set in the GAPIC
+ client configuration for the ``RunQuery`` API. Snapshots
+ not consumed from the iterator before that point will be lost.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ transaction (Optional[~.firestore_v1beta1.transaction.\
+ Transaction]): An existing transaction that this query will
+ run in.
+
+ Yields:
+ ~.firestore_v1beta1.document.DocumentSnapshot: The next
+ document that fulfills the query.
+ """
+ parent_path, expected_prefix = self._parent._parent_info()
+ response_iterator = self._client._firestore_api.run_query(
+ parent_path,
+ self._to_protobuf(),
+ transaction=_helpers.get_transaction_id(transaction),
+ metadata=self._client._rpc_metadata,
+ )
+
+ for response in response_iterator:
+ snapshot = _query_response_to_snapshot(
+ response, self._parent, expected_prefix
+ )
+ if snapshot is not None:
+ yield snapshot
+
+ def on_snapshot(self, callback):
+ """Monitor the documents in this collection that match this query.
+
+ This starts a watch on this query using a background thread. The
+ provided callback is run on the snapshot of the documents.
+
+ Args:
+ callback(~.firestore.query.QuerySnapshot): a callback to run when
+ a change occurs.
+
+ Example:
+ from google.cloud import firestore_v1beta1
+
+ db = firestore_v1beta1.Client()
+ query_ref = db.collection(u'users').where("user", "==", u'Ada')
+
+ def on_snapshot(docs, changes, read_time):
+ for doc in docs:
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+ # Watch this query
+ query_watch = query_ref.on_snapshot(on_snapshot)
+
+ # Terminate this watch
+ query_watch.unsubscribe()
+ """
+ return Watch.for_query(
+ self, callback, document.DocumentSnapshot, document.DocumentReference
+ )
+
+ def _comparator(self, doc1, doc2):
+ _orders = self._orders
+
+ # Add implicit sorting by name, using the last specified direction.
+ if len(_orders) == 0:
+ lastDirection = Query.ASCENDING
+ else:
+ if _orders[-1].direction == 1:
+ lastDirection = Query.ASCENDING
+ else:
+ lastDirection = Query.DESCENDING
+
+ orderBys = list(_orders)
+
+ order_pb = query_pb2.StructuredQuery.Order(
+ field=query_pb2.StructuredQuery.FieldReference(field_path="id"),
+ direction=_enum_from_direction(lastDirection),
+ )
+ orderBys.append(order_pb)
+
+ for orderBy in orderBys:
+ if orderBy.field.field_path == "id":
+ # If ordering by docuent id, compare resource paths.
+ comp = Order()._compare_to(doc1.reference._path, doc2.reference._path)
+ else:
+ if (
+ orderBy.field.field_path not in doc1._data
+ or orderBy.field.field_path not in doc2._data
+ ):
+ raise ValueError(
+ "Can only compare fields that exist in the "
+ "DocumentSnapshot. Please include the fields you are "
+ "ordering on in your select() call."
+ )
+ v1 = doc1._data[orderBy.field.field_path]
+ v2 = doc2._data[orderBy.field.field_path]
+ encoded_v1 = _helpers.encode_value(v1)
+ encoded_v2 = _helpers.encode_value(v2)
+ comp = Order().compare(encoded_v1, encoded_v2)
+
+ if comp != 0:
+ # 1 == Ascending, -1 == Descending
+ return orderBy.direction * comp
+
+ return 0
+
+
+def _enum_from_op_string(op_string):
+ """Convert a string representation of a binary operator to an enum.
+
+ These enums come from the protobuf message definition
+ ``StructuredQuery.FieldFilter.Operator``.
+
+ Args:
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``
+ and ``>``.
+
+ Returns:
+ int: The enum corresponding to ``op_string``.
+
+ Raises:
+ ValueError: If ``op_string`` is not a valid operator.
+ """
+ try:
+ return _COMPARISON_OPERATORS[op_string]
+ except KeyError:
+ choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys()))
+ msg = _BAD_OP_STRING.format(op_string, choices)
+ raise ValueError(msg)
+
+
+def _isnan(value):
+ """Check if a value is NaN.
+
+ This differs from ``math.isnan`` in that **any** input type is
+ allowed.
+
+ Args:
+ value (Any): A value to check for NaN-ness.
+
+ Returns:
+ bool: Indicates if the value is the NaN float.
+ """
+ if isinstance(value, float):
+ return math.isnan(value)
+ else:
+ return False
+
+
+def _enum_from_direction(direction):
+ """Convert a string representation of a direction to an enum.
+
+ Args:
+ direction (str): A direction to order by. Must be one of
+ :attr:`~google.cloud.firestore.Query.ASCENDING` or
+ :attr:`~google.cloud.firestore.Query.DESCENDING`.
+
+ Returns:
+ int: The enum corresponding to ``direction``.
+
+ Raises:
+ ValueError: If ``direction`` is not a valid direction.
+ """
+ if isinstance(direction, int):
+ return direction
+
+ if direction == Query.ASCENDING:
+ return enums.StructuredQuery.Direction.ASCENDING
+ elif direction == Query.DESCENDING:
+ return enums.StructuredQuery.Direction.DESCENDING
+ else:
+ msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING)
+ raise ValueError(msg)
+
+
+def _filter_pb(field_or_unary):
+ """Convert a specific protobuf filter to the generic filter type.
+
+ Args:
+ field_or_unary (Union[google.cloud.proto.firestore.v1beta1.\
+ query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\
+ firestore.v1beta1.query_pb2.StructuredQuery.FieldFilter]): A
+ field or unary filter to convert to a generic filter.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.\
+ StructuredQuery.Filter: A "generic" filter.
+
+ Raises:
+ ValueError: If ``field_or_unary`` is not a field or unary filter.
+ """
+ if isinstance(field_or_unary, query_pb2.StructuredQuery.FieldFilter):
+ return query_pb2.StructuredQuery.Filter(field_filter=field_or_unary)
+ elif isinstance(field_or_unary, query_pb2.StructuredQuery.UnaryFilter):
+ return query_pb2.StructuredQuery.Filter(unary_filter=field_or_unary)
+ else:
+ raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary)
+
+
+def _cursor_pb(cursor_pair):
+ """Convert a cursor pair to a protobuf.
+
+ If ``cursor_pair`` is :data:`None`, just returns :data:`None`.
+
+ Args:
+ cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of
+
+ * a list of field values.
+ * a ``before`` flag
+
+ Returns:
+ Optional[google.cloud.firestore_v1beta1.types.Cursor]: A
+ protobuf cursor corresponding to the values.
+ """
+ if cursor_pair is not None:
+ data, before = cursor_pair
+ value_pbs = [_helpers.encode_value(value) for value in data]
+ return query_pb2.Cursor(values=value_pbs, before=before)
+
+
+def _query_response_to_snapshot(response_pb, collection, expected_prefix):
+ """Parse a query response protobuf to a document snapshot.
+
+ Args:
+ response_pb (google.cloud.proto.firestore.v1beta1.\
+ firestore_pb2.RunQueryResponse): A
+ collection (~.firestore_v1beta1.collection.CollectionReference): A
+ reference to the collection that initiated the query.
+ expected_prefix (str): The expected prefix for fully-qualified
+ document names returned in the query results. This can be computed
+ directly from ``collection`` via :meth:`_parent_info`.
+
+ Returns:
+ Optional[~.firestore.document.DocumentSnapshot]: A
+ snapshot of the data returned in the query. If ``response_pb.document``
+ is not set, the snapshot will be :data:`None`.
+ """
+ if not response_pb.HasField("document"):
+ return None
+
+ document_id = _helpers.get_doc_id(response_pb.document, expected_prefix)
+ reference = collection.document(document_id)
+ data = _helpers.decode_dict(response_pb.document.fields, collection._client)
+ snapshot = document.DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=response_pb.read_time,
+ create_time=response_pb.document.create_time,
+ update_time=response_pb.document.update_time,
+ )
+ return snapshot
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/transaction.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/transaction.py
new file mode 100644
index 000000000..9a37f18d8
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/transaction.py
@@ -0,0 +1,409 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for applying Google Cloud Firestore changes in a transaction."""
+
+
+import random
+import time
+
+import six
+
+from google.api_core import exceptions
+from google.cloud.firestore_v1beta1 import batch
+from google.cloud.firestore_v1beta1 import types
+
+
+MAX_ATTEMPTS = 5
+"""int: Default number of transaction attempts (with retries)."""
+_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}."
+_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}."
+_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back")
+_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed")
+_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction."
+_INITIAL_SLEEP = 1.0
+"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`."""
+_MAX_SLEEP = 30.0
+"""float: Eventual "max" sleep time. To be used in :func:`_sleep`."""
+_MULTIPLIER = 2.0
+"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`."""
+_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts."
+_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried."
+
+
+class Transaction(batch.WriteBatch):
+ """Accumulate read-and-write operations to be sent in a transaction.
+
+ Args:
+ client (~.firestore_v1beta1.client.Client): The client that
+ created this transaction.
+ max_attempts (Optional[int]): The maximum number of attempts for
+ the transaction (i.e. allowing retries). Defaults to
+ :attr:`~google.cloud.firestore_v1beta1.transaction.MAX_ATTEMPTS`.
+ read_only (Optional[bool]): Flag indicating if the transaction
+ should be read-only or should allow writes. Defaults to
+ :data:`False`.
+ """
+
+ def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False):
+ super(Transaction, self).__init__(client)
+ self._max_attempts = max_attempts
+ self._read_only = read_only
+ self._id = None
+
+ def _add_write_pbs(self, write_pbs):
+ """Add `Write`` protobufs to this transaction.
+
+ Args:
+ write_pbs (List[google.cloud.proto.firestore.v1beta1.\
+ write_pb2.Write]): A list of write protobufs to be added.
+
+ Raises:
+ ValueError: If this transaction is read-only.
+ """
+ if self._read_only:
+ raise ValueError(_WRITE_READ_ONLY)
+
+ super(Transaction, self)._add_write_pbs(write_pbs)
+
+ def _options_protobuf(self, retry_id):
+ """Convert the current object to protobuf.
+
+ The ``retry_id`` value is used when retrying a transaction that
+ failed (e.g. due to contention). It is intended to be the "first"
+ transaction that failed (i.e. if multiple retries are needed).
+
+ Args:
+ retry_id (Union[bytes, NoneType]): Transaction ID of a transaction
+ to be retried.
+
+ Returns:
+ Optional[google.cloud.firestore_v1beta1.types.TransactionOptions]:
+ The protobuf ``TransactionOptions`` if ``read_only==True`` or if
+ there is a transaction ID to be retried, else :data:`None`.
+
+ Raises:
+ ValueError: If ``retry_id`` is not :data:`None` but the
+ transaction is read-only.
+ """
+ if retry_id is not None:
+ if self._read_only:
+ raise ValueError(_CANT_RETRY_READ_ONLY)
+
+ return types.TransactionOptions(
+ read_write=types.TransactionOptions.ReadWrite(
+ retry_transaction=retry_id
+ )
+ )
+ elif self._read_only:
+ return types.TransactionOptions(
+ read_only=types.TransactionOptions.ReadOnly()
+ )
+ else:
+ return None
+
+ @property
+ def in_progress(self):
+ """Determine if this transaction has already begun.
+
+ Returns:
+ bool: Indicates if the transaction has started.
+ """
+ return self._id is not None
+
+ @property
+ def id(self):
+ """Get the current transaction ID.
+
+ Returns:
+ Optional[bytes]: The transaction ID (or :data:`None` if the
+ current transaction is not in progress).
+ """
+ return self._id
+
+ def _begin(self, retry_id=None):
+ """Begin the transaction.
+
+ Args:
+ retry_id (Optional[bytes]): Transaction ID of a transaction to be
+ retried.
+
+ Raises:
+ ValueError: If the current transaction has already begun.
+ """
+ if self.in_progress:
+ msg = _CANT_BEGIN.format(self._id)
+ raise ValueError(msg)
+
+ transaction_response = self._client._firestore_api.begin_transaction(
+ self._client._database_string,
+ options_=self._options_protobuf(retry_id),
+ metadata=self._client._rpc_metadata,
+ )
+ self._id = transaction_response.transaction
+
+ def _clean_up(self):
+ """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``.
+
+ This intended to occur on success or failure of the associated RPCs.
+ """
+ self._write_pbs = []
+ self._id = None
+
+ def _rollback(self):
+ """Roll back the transaction.
+
+ Raises:
+ ValueError: If no transaction is in progress.
+ """
+ if not self.in_progress:
+ raise ValueError(_CANT_ROLLBACK)
+
+ try:
+ # NOTE: The response is just ``google.protobuf.Empty``.
+ self._client._firestore_api.rollback(
+ self._client._database_string,
+ self._id,
+ metadata=self._client._rpc_metadata,
+ )
+ finally:
+ self._clean_up()
+
+ def _commit(self):
+ """Transactionally commit the changes accumulated.
+
+ Returns:
+ List[google.cloud.proto.firestore.v1beta1.\
+ write_pb2.WriteResult, ...]: The write results corresponding
+ to the changes committed, returned in the same order as the
+ changes were applied to this transaction. A write result contains
+ an ``update_time`` field.
+
+ Raises:
+ ValueError: If no transaction is in progress.
+ """
+ if not self.in_progress:
+ raise ValueError(_CANT_COMMIT)
+
+ commit_response = _commit_with_retry(self._client, self._write_pbs, self._id)
+
+ self._clean_up()
+ return list(commit_response.write_results)
+
+
+class _Transactional(object):
+ """Provide a callable object to use as a transactional decorater.
+
+ This is surfaced via
+ :func:`~google.cloud.firestore_v1beta1.transaction.transactional`.
+
+ Args:
+ to_wrap (Callable[~.firestore_v1beta1.transaction.Transaction, \
+ Any]): A callable that should be run (and retried) in a
+ transaction.
+ """
+
+ def __init__(self, to_wrap):
+ self.to_wrap = to_wrap
+ self.current_id = None
+ """Optional[bytes]: The current transaction ID."""
+ self.retry_id = None
+ """Optional[bytes]: The ID of the first attempted transaction."""
+
+ def _reset(self):
+ """Unset the transaction IDs."""
+ self.current_id = None
+ self.retry_id = None
+
+ def _pre_commit(self, transaction, *args, **kwargs):
+ """Begin transaction and call the wrapped callable.
+
+ If the callable raises an exception, the transaction will be rolled
+ back. If not, the transaction will be "ready" for ``Commit`` (i.e.
+ it will have staged writes).
+
+ Args:
+ transaction (~.firestore_v1beta1.transaction.Transaction): A
+ transaction to execute the callable within.
+ args (Tuple[Any, ...]): The extra positional arguments to pass
+ along to the wrapped callable.
+ kwargs (Dict[str, Any]): The extra keyword arguments to pass
+ along to the wrapped callable.
+
+ Returns:
+ Any: result of the wrapped callable.
+
+ Raises:
+ Exception: Any failure caused by ``to_wrap``.
+ """
+ # Force the ``transaction`` to be not "in progress".
+ transaction._clean_up()
+ transaction._begin(retry_id=self.retry_id)
+
+ # Update the stored transaction IDs.
+ self.current_id = transaction._id
+ if self.retry_id is None:
+ self.retry_id = self.current_id
+ try:
+ return self.to_wrap(transaction, *args, **kwargs)
+ except: # noqa
+ # NOTE: If ``rollback`` fails this will lose the information
+ # from the original failure.
+ transaction._rollback()
+ raise
+
+ def _maybe_commit(self, transaction):
+ """Try to commit the transaction.
+
+ If the transaction is read-write and the ``Commit`` fails with the
+ ``ABORTED`` status code, it will be retried. Any other failure will
+ not be caught.
+
+ Args:
+ transaction (~.firestore_v1beta1.transaction.Transaction): The
+ transaction to be ``Commit``-ed.
+
+ Returns:
+ bool: Indicating if the commit succeeded.
+ """
+ try:
+ transaction._commit()
+ return True
+ except exceptions.GoogleAPICallError as exc:
+ if transaction._read_only:
+ raise
+
+ if isinstance(exc, exceptions.Aborted):
+ # If a read-write transaction returns ABORTED, retry.
+ return False
+ else:
+ raise
+
+ def __call__(self, transaction, *args, **kwargs):
+ """Execute the wrapped callable within a transaction.
+
+ Args:
+ transaction (~.firestore_v1beta1.transaction.Transaction): A
+ transaction to execute the callable within.
+ args (Tuple[Any, ...]): The extra positional arguments to pass
+ along to the wrapped callable.
+ kwargs (Dict[str, Any]): The extra keyword arguments to pass
+ along to the wrapped callable.
+
+ Returns:
+ Any: The result of the wrapped callable.
+
+ Raises:
+ ValueError: If the transaction does not succeed in
+ ``max_attempts``.
+ """
+ self._reset()
+
+ for attempt in six.moves.xrange(transaction._max_attempts):
+ result = self._pre_commit(transaction, *args, **kwargs)
+ succeeded = self._maybe_commit(transaction)
+ if succeeded:
+ return result
+
+ # Subsequent requests will use the failed transaction ID as part of
+ # the ``BeginTransactionRequest`` when restarting this transaction
+ # (via ``options.retry_transaction``). This preserves the "spot in
+ # line" of the transaction, so exponential backoff is not required
+ # in this case.
+
+ transaction._rollback()
+ msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts)
+ raise ValueError(msg)
+
+
+def transactional(to_wrap):
+ """Decorate a callable so that it runs in a transaction.
+
+ Args:
+ to_wrap (Callable[~.firestore_v1beta1.transaction.Transaction, \
+ Any]): A callable that should be run (and retried) in a
+ transaction.
+
+ Returns:
+ Callable[~.firestore_v1beta1.transaction.Transaction, Any]: the
+ wrapped callable.
+ """
+ return _Transactional(to_wrap)
+
+
+def _commit_with_retry(client, write_pbs, transaction_id):
+ """Call ``Commit`` on the GAPIC client with retry / sleep.
+
+ Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level
+ retry is handled by the underlying GAPICd client, but in this case it
+ doesn't because ``Commit`` is not always idempotent. But here we know it
+ is "idempotent"-like because it has a transaction ID. We also need to do
+ our own retry to special-case the ``INVALID_ARGUMENT`` error.
+
+ Args:
+ client (~.firestore_v1beta1.client.Client): A client with
+ GAPIC client and configuration details.
+ write_pbs (List[google.cloud.proto.firestore.v1beta1.\
+ write_pb2.Write, ...]): A ``Write`` protobuf instance to
+ be committed.
+ transaction_id (bytes): ID of an existing transaction that
+ this commit will run in.
+
+ Returns:
+ google.cloud.firestore_v1beta1.types.CommitResponse:
+ The protobuf response from ``Commit``.
+
+ Raises:
+ ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable
+ exception is encountered.
+ """
+ current_sleep = _INITIAL_SLEEP
+ while True:
+ try:
+ return client._firestore_api.commit(
+ client._database_string,
+ write_pbs,
+ transaction=transaction_id,
+ metadata=client._rpc_metadata,
+ )
+ except exceptions.ServiceUnavailable:
+ # Retry
+ pass
+
+ current_sleep = _sleep(current_sleep)
+
+
+def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER):
+ """Sleep and produce a new sleep time.
+
+ .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\
+ 2015/03/backoff.html
+
+ Select a duration between zero and ``current_sleep``. It might seem
+ counterintuitive to have so much jitter, but
+ `Exponential Backoff And Jitter`_ argues that "full jitter" is
+ the best strategy.
+
+ Args:
+ current_sleep (float): The current "max" for sleep interval.
+ max_sleep (Optional[float]): Eventual "max" sleep time
+ multiplier (Optional[float]): Multiplier for exponential backoff.
+
+ Returns:
+ float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever
+ is smaller)
+ """
+ actual_sleep = random.uniform(0.0, current_sleep)
+ time.sleep(actual_sleep)
+ return min(multiplier * current_sleep, max_sleep)
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/transforms.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/transforms.py
new file mode 100644
index 000000000..4a64cf9ec
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/transforms.py
@@ -0,0 +1,90 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpful constants to use for Google Cloud Firestore."""
+
+
+class Sentinel(object):
+ """Sentinel objects used to signal special handling."""
+
+ __slots__ = ("description",)
+
+ def __init__(self, description):
+ self.description = description
+
+ def __repr__(self):
+ return "Sentinel: {}".format(self.description)
+
+
+DELETE_FIELD = Sentinel("Value used to delete a field in a document.")
+
+
+SERVER_TIMESTAMP = Sentinel(
+ "Value used to set a document field to the server timestamp."
+)
+
+
+class _ValueList(object):
+ """Read-only list of values.
+
+ Args:
+ values (List | Tuple): values held in the helper.
+ """
+
+ slots = ("_values",)
+
+ def __init__(self, values):
+ if not isinstance(values, (list, tuple)):
+ raise ValueError("'values' must be a list or tuple.")
+
+ if len(values) == 0:
+ raise ValueError("'values' must be non-empty.")
+
+ self._values = list(values)
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._values == other._values
+
+ @property
+ def values(self):
+ """Values to append.
+
+ Returns (List):
+ values to be appended by the transform.
+ """
+ return self._values
+
+
+class ArrayUnion(_ValueList):
+ """Field transform: appends missing values to an array field.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1beta1#google.firestore.v1beta1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1beta1.ArrayValue.google.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements
+
+ Args:
+ values (List | Tuple): values to append.
+ """
+
+
+class ArrayRemove(_ValueList):
+ """Field transform: remove values from an array field.
+
+ See:
+ https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1beta1#google.firestore.v1beta1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1beta1.ArrayValue.google.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array
+
+ Args:
+ values (List | Tuple): values to remove.
+ """
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/types.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/types.py
new file mode 100644
index 000000000..90c03b8ab
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/types.py
@@ -0,0 +1,63 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+import sys
+
+from google.api import http_pb2
+from google.protobuf import any_pb2
+from google.protobuf import descriptor_pb2
+from google.protobuf import empty_pb2
+from google.protobuf import struct_pb2
+from google.protobuf import timestamp_pb2
+from google.protobuf import wrappers_pb2
+from google.rpc import status_pb2
+from google.type import latlng_pb2
+
+from google.api_core.protobuf_helpers import get_messages
+from google.cloud.firestore_v1beta1.proto import common_pb2
+from google.cloud.firestore_v1beta1.proto import document_pb2
+from google.cloud.firestore_v1beta1.proto import firestore_pb2
+from google.cloud.firestore_v1beta1.proto import query_pb2
+from google.cloud.firestore_v1beta1.proto import write_pb2
+
+
+_shared_modules = [
+ http_pb2,
+ any_pb2,
+ descriptor_pb2,
+ empty_pb2,
+ struct_pb2,
+ timestamp_pb2,
+ wrappers_pb2,
+ status_pb2,
+ latlng_pb2,
+]
+
+_local_modules = [common_pb2, document_pb2, firestore_pb2, query_pb2, write_pb2]
+
+names = []
+
+for module in _shared_modules:
+ for name, message in get_messages(module).items():
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+
+for module in _local_modules:
+ for name, message in get_messages(module).items():
+ message.__module__ = "google.cloud.firestore_v1beta1.types"
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+
+__all__ = tuple(sorted(names))
diff --git a/venv/Lib/site-packages/google/cloud/firestore_v1beta1/watch.py b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/watch.py
new file mode 100644
index 000000000..63ded0d2d
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/firestore_v1beta1/watch.py
@@ -0,0 +1,722 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import collections
+import threading
+import datetime
+from enum import Enum
+import functools
+
+import pytz
+
+from google.api_core.bidi import ResumableBidiRpc
+from google.api_core.bidi import BackgroundConsumer
+from google.cloud.firestore_v1beta1.proto import firestore_pb2
+from google.cloud.firestore_v1beta1 import _helpers
+
+from google.api_core import exceptions
+
+import grpc
+
+"""Python client for Google Cloud Firestore Watch."""
+
+_LOGGER = logging.getLogger(__name__)
+
+WATCH_TARGET_ID = 0x5079 # "Py"
+
+GRPC_STATUS_CODE = {
+ "OK": 0,
+ "CANCELLED": 1,
+ "UNKNOWN": 2,
+ "INVALID_ARGUMENT": 3,
+ "DEADLINE_EXCEEDED": 4,
+ "NOT_FOUND": 5,
+ "ALREADY_EXISTS": 6,
+ "PERMISSION_DENIED": 7,
+ "UNAUTHENTICATED": 16,
+ "RESOURCE_EXHAUSTED": 8,
+ "FAILED_PRECONDITION": 9,
+ "ABORTED": 10,
+ "OUT_OF_RANGE": 11,
+ "UNIMPLEMENTED": 12,
+ "INTERNAL": 13,
+ "UNAVAILABLE": 14,
+ "DATA_LOSS": 15,
+ "DO_NOT_USE": -1,
+}
+_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated"
+_RETRYABLE_STREAM_ERRORS = (
+ exceptions.DeadlineExceeded,
+ exceptions.ServiceUnavailable,
+ exceptions.InternalServerError,
+ exceptions.Unknown,
+ exceptions.GatewayTimeout,
+)
+
+DocTreeEntry = collections.namedtuple("DocTreeEntry", ["value", "index"])
+
+
+class WatchDocTree(object):
+ # TODO: Currently this uses a dict. Other implementations us an rbtree.
+ # The performance of this implementation should be investigated and may
+ # require modifying the underlying datastructure to a rbtree.
+ def __init__(self):
+ self._dict = {}
+ self._index = 0
+
+ def keys(self):
+ return list(self._dict.keys())
+
+ def _copy(self):
+ wdt = WatchDocTree()
+ wdt._dict = self._dict.copy()
+ wdt._index = self._index
+ self = wdt
+ return self
+
+ def insert(self, key, value):
+ self = self._copy()
+ self._dict[key] = DocTreeEntry(value, self._index)
+ self._index += 1
+ return self
+
+ def find(self, key):
+ return self._dict[key]
+
+ def remove(self, key):
+ self = self._copy()
+ del self._dict[key]
+ return self
+
+ def __iter__(self):
+ for k in self._dict:
+ yield k
+
+ def __len__(self):
+ return len(self._dict)
+
+ def __contains__(self, k):
+ return k in self._dict
+
+
+class ChangeType(Enum):
+ ADDED = 1
+ REMOVED = 2
+ MODIFIED = 3
+
+
+class DocumentChange(object):
+ def __init__(self, type, document, old_index, new_index):
+ """DocumentChange
+
+ Args:
+ type (ChangeType):
+ document (document.DocumentSnapshot):
+ old_index (int):
+ new_index (int):
+ """
+ # TODO: spec indicated an isEqual param also
+ self.type = type
+ self.document = document
+ self.old_index = old_index
+ self.new_index = new_index
+
+
+class WatchResult(object):
+ def __init__(self, snapshot, name, change_type):
+ self.snapshot = snapshot
+ self.name = name
+ self.change_type = change_type
+
+
+def _maybe_wrap_exception(exception):
+ """Wraps a gRPC exception class, if needed."""
+ if isinstance(exception, grpc.RpcError):
+ return exceptions.from_grpc_error(exception)
+ return exception
+
+
+def document_watch_comparator(doc1, doc2):
+ assert doc1 == doc2, "Document watches only support one document."
+ return 0
+
+
+class Watch(object):
+
+ BackgroundConsumer = BackgroundConsumer # FBO unit tests
+ ResumableBidiRpc = ResumableBidiRpc # FBO unit tests
+
+ def __init__(
+ self,
+ document_reference,
+ firestore,
+ target,
+ comparator,
+ snapshot_callback,
+ document_snapshot_cls,
+ document_reference_cls,
+ BackgroundConsumer=None, # FBO unit testing
+ ResumableBidiRpc=None, # FBO unit testing
+ ):
+ """
+ Args:
+ firestore:
+ target:
+ comparator:
+ snapshot_callback: Callback method to process snapshots.
+ Args:
+ docs (List(DocumentSnapshot)): A callback that returns the
+ ordered list of documents stored in this snapshot.
+ changes (List(str)): A callback that returns the list of
+ changed documents since the last snapshot delivered for
+ this watch.
+ read_time (string): The ISO 8601 time at which this
+ snapshot was obtained.
+
+ document_snapshot_cls: instance of DocumentSnapshot
+ document_reference_cls: instance of DocumentReference
+ """
+ self._document_reference = document_reference
+ self._firestore = firestore
+ self._api = firestore._firestore_api
+ self._targets = target
+ self._comparator = comparator
+ self.DocumentSnapshot = document_snapshot_cls
+ self.DocumentReference = document_reference_cls
+ self._snapshot_callback = snapshot_callback
+ self._closing = threading.Lock()
+ self._closed = False
+
+ def should_recover(exc): # pragma: NO COVER
+ return (
+ isinstance(exc, grpc.RpcError)
+ and exc.code() == grpc.StatusCode.UNAVAILABLE
+ )
+
+ initial_request = firestore_pb2.ListenRequest(
+ database=self._firestore._database_string, add_target=self._targets
+ )
+
+ if ResumableBidiRpc is None:
+ ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests
+
+ self._rpc = ResumableBidiRpc(
+ self._api.transport.listen,
+ initial_request=initial_request,
+ should_recover=should_recover,
+ metadata=self._firestore._rpc_metadata,
+ )
+
+ self._rpc.add_done_callback(self._on_rpc_done)
+
+ # Initialize state for on_snapshot
+ # The sorted tree of QueryDocumentSnapshots as sent in the last
+ # snapshot. We only look at the keys.
+ self.doc_tree = WatchDocTree()
+
+ # A map of document names to QueryDocumentSnapshots for the last sent
+ # snapshot.
+ self.doc_map = {}
+
+ # The accumulates map of document changes (keyed by document name) for
+ # the current snapshot.
+ self.change_map = {}
+
+ # The current state of the query results.
+ self.current = False
+
+ # We need this to track whether we've pushed an initial set of changes,
+ # since we should push those even when there are no changes, if there
+ # aren't docs.
+ self.has_pushed = False
+
+ # The server assigns and updates the resume token.
+ self.resume_token = None
+ if BackgroundConsumer is None: # FBO unit tests
+ BackgroundConsumer = self.BackgroundConsumer
+
+ self._consumer = BackgroundConsumer(self._rpc, self.on_snapshot)
+ self._consumer.start()
+
+ @property
+ def is_active(self):
+ """bool: True if this manager is actively streaming.
+
+ Note that ``False`` does not indicate this is complete shut down,
+ just that it stopped getting new messages.
+ """
+ return self._consumer is not None and self._consumer.is_active
+
+ def close(self, reason=None):
+ """Stop consuming messages and shutdown all helper threads.
+
+ This method is idempotent. Additional calls will have no effect.
+
+ Args:
+ reason (Any): The reason to close this. If None, this is considered
+ an "intentional" shutdown.
+ """
+ with self._closing:
+ if self._closed:
+ return
+
+ # Stop consuming messages.
+ if self.is_active:
+ _LOGGER.debug("Stopping consumer.")
+ self._consumer.stop()
+ self._consumer = None
+
+ self._rpc.close()
+ self._rpc = None
+ self._closed = True
+ _LOGGER.debug("Finished stopping manager.")
+
+ if reason:
+ # Raise an exception if a reason is provided
+ _LOGGER.debug("reason for closing: %s" % reason)
+ if isinstance(reason, Exception):
+ raise reason
+ raise RuntimeError(reason)
+
+ def _on_rpc_done(self, future):
+ """Triggered whenever the underlying RPC terminates without recovery.
+
+ This is typically triggered from one of two threads: the background
+ consumer thread (when calling ``recv()`` produces a non-recoverable
+ error) or the grpc management thread (when cancelling the RPC).
+
+ This method is *non-blocking*. It will start another thread to deal
+ with shutting everything down. This is to prevent blocking in the
+ background consumer and preventing it from being ``joined()``.
+ """
+ _LOGGER.info("RPC termination has signaled manager shutdown.")
+ future = _maybe_wrap_exception(future)
+ thread = threading.Thread(
+ name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future}
+ )
+ thread.daemon = True
+ thread.start()
+
+ def unsubscribe(self):
+ self.close()
+
+ @classmethod
+ def for_document(
+ cls,
+ document_ref,
+ snapshot_callback,
+ snapshot_class_instance,
+ reference_class_instance,
+ ):
+ """
+ Creates a watch snapshot listener for a document. snapshot_callback
+ receives a DocumentChange object, but may also start to get
+ targetChange and such soon
+
+ Args:
+ document_ref: Reference to Document
+ snapshot_callback: callback to be called on snapshot
+ snapshot_class_instance: instance of DocumentSnapshot to make
+ snapshots with to pass to snapshot_callback
+ reference_class_instance: instance of DocumentReference to make
+ references
+
+ """
+ return cls(
+ document_ref,
+ document_ref._client,
+ {
+ "documents": {"documents": [document_ref._document_path]},
+ "target_id": WATCH_TARGET_ID,
+ },
+ document_watch_comparator,
+ snapshot_callback,
+ snapshot_class_instance,
+ reference_class_instance,
+ )
+
+ @classmethod
+ def for_query(
+ cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance
+ ):
+ query_target = firestore_pb2.Target.QueryTarget(
+ parent=query._client._database_string, structured_query=query._to_protobuf()
+ )
+
+ return cls(
+ query,
+ query._client,
+ {"query": query_target, "target_id": WATCH_TARGET_ID},
+ query._comparator,
+ snapshot_callback,
+ snapshot_class_instance,
+ reference_class_instance,
+ )
+
+ def _on_snapshot_target_change_no_change(self, proto):
+ _LOGGER.debug("on_snapshot: target change: NO_CHANGE")
+ change = proto.target_change
+
+ no_target_ids = change.target_ids is None or len(change.target_ids) == 0
+ if no_target_ids and change.read_time and self.current:
+ # TargetChange.CURRENT followed by TargetChange.NO_CHANGE
+ # signals a consistent state. Invoke the onSnapshot
+ # callback as specified by the user.
+ self.push(change.read_time, change.resume_token)
+
+ def _on_snapshot_target_change_add(self, proto):
+ _LOGGER.debug("on_snapshot: target change: ADD")
+ target_id = proto.target_change.target_ids[0]
+ if target_id != WATCH_TARGET_ID:
+ raise RuntimeError("Unexpected target ID %s sent by server" % target_id)
+
+ def _on_snapshot_target_change_remove(self, proto):
+ _LOGGER.debug("on_snapshot: target change: REMOVE")
+ change = proto.target_change
+
+ code = 13
+ message = "internal error"
+ if change.cause:
+ code = change.cause.code
+ message = change.cause.message
+
+ message = "Error %s: %s" % (code, message)
+
+ raise RuntimeError(message)
+
+ def _on_snapshot_target_change_reset(self, proto):
+ # Whatever changes have happened so far no longer matter.
+ _LOGGER.debug("on_snapshot: target change: RESET")
+ self._reset_docs()
+
+ def _on_snapshot_target_change_current(self, proto):
+ _LOGGER.debug("on_snapshot: target change: CURRENT")
+ self.current = True
+
+ def on_snapshot(self, proto):
+ """
+ Called everytime there is a response from listen. Collect changes
+ and 'push' the changes in a batch to the customer when we receive
+ 'current' from the listen response.
+
+ Args:
+ listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`):
+ Callback method that receives a object to
+ """
+ TargetChange = firestore_pb2.TargetChange
+
+ target_changetype_dispatch = {
+ TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change,
+ TargetChange.ADD: self._on_snapshot_target_change_add,
+ TargetChange.REMOVE: self._on_snapshot_target_change_remove,
+ TargetChange.RESET: self._on_snapshot_target_change_reset,
+ TargetChange.CURRENT: self._on_snapshot_target_change_current,
+ }
+
+ target_change = proto.target_change
+ if str(target_change):
+ target_change_type = target_change.target_change_type
+ _LOGGER.debug("on_snapshot: target change: " + str(target_change_type))
+ meth = target_changetype_dispatch.get(target_change_type)
+ if meth is None:
+ _LOGGER.info(
+ "on_snapshot: Unknown target change " + str(target_change_type)
+ )
+ self.close(
+ reason="Unknown target change type: %s " % str(target_change_type)
+ )
+ else:
+ try:
+ meth(proto)
+ except Exception as exc2:
+ _LOGGER.debug("meth(proto) exc: " + str(exc2))
+ raise
+
+ # NOTE:
+ # in other implementations, such as node, the backoff is reset here
+ # in this version bidi rpc is just used and will control this.
+
+ elif str(proto.document_change):
+ _LOGGER.debug("on_snapshot: document change")
+
+ # No other target_ids can show up here, but we still need to see
+ # if the targetId was in the added list or removed list.
+ target_ids = proto.document_change.target_ids or []
+ removed_target_ids = proto.document_change.removed_target_ids or []
+ changed = False
+ removed = False
+
+ if WATCH_TARGET_ID in target_ids:
+ changed = True
+
+ if WATCH_TARGET_ID in removed_target_ids:
+ removed = True
+
+ if changed:
+ _LOGGER.debug("on_snapshot: document change: CHANGED")
+
+ # google.cloud.firestore_v1beta1.types.DocumentChange
+ document_change = proto.document_change
+ # google.cloud.firestore_v1beta1.types.Document
+ document = document_change.document
+
+ data = _helpers.decode_dict(document.fields, self._firestore)
+
+ # Create a snapshot. As Document and Query objects can be
+ # passed we need to get a Document Reference in a more manual
+ # fashion than self._document_reference
+ document_name = document.name
+ db_str = self._firestore._database_string
+ db_str_documents = db_str + "/documents/"
+ if document_name.startswith(db_str_documents):
+ document_name = document_name[len(db_str_documents) :]
+
+ document_ref = self._firestore.document(document_name)
+
+ snapshot = self.DocumentSnapshot(
+ reference=document_ref,
+ data=data,
+ exists=True,
+ read_time=None,
+ create_time=document.create_time,
+ update_time=document.update_time,
+ )
+ self.change_map[document.name] = snapshot
+
+ elif removed:
+ _LOGGER.debug("on_snapshot: document change: REMOVED")
+ document = proto.document_change.document
+ self.change_map[document.name] = ChangeType.REMOVED
+
+ # NB: document_delete and document_remove (as far as we, the client,
+ # are concerned) are functionally equivalent
+
+ elif str(proto.document_delete):
+ _LOGGER.debug("on_snapshot: document change: DELETE")
+ name = proto.document_delete.document
+ self.change_map[name] = ChangeType.REMOVED
+
+ elif str(proto.document_remove):
+ _LOGGER.debug("on_snapshot: document change: REMOVE")
+ name = proto.document_remove.document
+ self.change_map[name] = ChangeType.REMOVED
+
+ elif proto.filter:
+ _LOGGER.debug("on_snapshot: filter update")
+ if proto.filter.count != self._current_size():
+ # We need to remove all the current results.
+ self._reset_docs()
+ # The filter didn't match, so re-issue the query.
+ # TODO: reset stream method?
+ # self._reset_stream();
+
+ else:
+ _LOGGER.debug("UNKNOWN TYPE. UHOH")
+ self.close(reason=ValueError("Unknown listen response type: %s" % proto))
+
+ def push(self, read_time, next_resume_token):
+ """
+ Assembles a new snapshot from the current set of changes and invokes
+ the user's callback. Clears the current changes on completion.
+ """
+ deletes, adds, updates = Watch._extract_changes(
+ self.doc_map, self.change_map, read_time
+ )
+
+ updated_tree, updated_map, appliedChanges = self._compute_snapshot(
+ self.doc_tree, self.doc_map, deletes, adds, updates
+ )
+
+ if not self.has_pushed or len(appliedChanges):
+ # TODO: It is possible in the future we will have the tree order
+ # on insert. For now, we sort here.
+ key = functools.cmp_to_key(self._comparator)
+ keys = sorted(updated_tree.keys(), key=key)
+
+ self._snapshot_callback(
+ keys,
+ appliedChanges,
+ datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc),
+ )
+ self.has_pushed = True
+
+ self.doc_tree = updated_tree
+ self.doc_map = updated_map
+ self.change_map.clear()
+ self.resume_token = next_resume_token
+
+ @staticmethod
+ def _extract_changes(doc_map, changes, read_time):
+ deletes = []
+ adds = []
+ updates = []
+
+ for name, value in changes.items():
+ if value == ChangeType.REMOVED:
+ if name in doc_map:
+ deletes.append(name)
+ elif name in doc_map:
+ if read_time is not None:
+ value.read_time = read_time
+ updates.append(value)
+ else:
+ if read_time is not None:
+ value.read_time = read_time
+ adds.append(value)
+
+ return (deletes, adds, updates)
+
+ def _compute_snapshot(
+ self, doc_tree, doc_map, delete_changes, add_changes, update_changes
+ ):
+ updated_tree = doc_tree
+ updated_map = doc_map
+
+ assert len(doc_tree) == len(doc_map), (
+ "The document tree and document map should have the same "
+ + "number of entries."
+ )
+
+ def delete_doc(name, updated_tree, updated_map):
+ """
+ Applies a document delete to the document tree and document map.
+ Returns the corresponding DocumentChange event.
+ """
+ assert name in updated_map, "Document to delete does not exist"
+ old_document = updated_map.get(name)
+ # TODO: If a document doesn't exist this raises IndexError. Handle?
+ existing = updated_tree.find(old_document)
+ old_index = existing.index
+ updated_tree = updated_tree.remove(old_document)
+ del updated_map[name]
+ return (
+ DocumentChange(ChangeType.REMOVED, old_document, old_index, -1),
+ updated_tree,
+ updated_map,
+ )
+
+ def add_doc(new_document, updated_tree, updated_map):
+ """
+ Applies a document add to the document tree and the document map.
+ Returns the corresponding DocumentChange event.
+ """
+ name = new_document.reference._document_path
+ assert name not in updated_map, "Document to add already exists"
+ updated_tree = updated_tree.insert(new_document, None)
+ new_index = updated_tree.find(new_document).index
+ updated_map[name] = new_document
+ return (
+ DocumentChange(ChangeType.ADDED, new_document, -1, new_index),
+ updated_tree,
+ updated_map,
+ )
+
+ def modify_doc(new_document, updated_tree, updated_map):
+ """
+ Applies a document modification to the document tree and the
+ document map.
+ Returns the DocumentChange event for successful modifications.
+ """
+ name = new_document.reference._document_path
+ assert name in updated_map, "Document to modify does not exist"
+ old_document = updated_map.get(name)
+ if old_document.update_time != new_document.update_time:
+ remove_change, updated_tree, updated_map = delete_doc(
+ name, updated_tree, updated_map
+ )
+ add_change, updated_tree, updated_map = add_doc(
+ new_document, updated_tree, updated_map
+ )
+ return (
+ DocumentChange(
+ ChangeType.MODIFIED,
+ new_document,
+ remove_change.old_index,
+ add_change.new_index,
+ ),
+ updated_tree,
+ updated_map,
+ )
+
+ return None, updated_tree, updated_map
+
+ # Process the sorted changes in the order that is expected by our
+ # clients (removals, additions, and then modifications). We also need
+ # to sort the individual changes to assure that old_index/new_index
+ # keep incrementing.
+ appliedChanges = []
+
+ key = functools.cmp_to_key(self._comparator)
+
+ # Deletes are sorted based on the order of the existing document.
+ delete_changes = sorted(delete_changes, key=key)
+ for name in delete_changes:
+ change, updated_tree, updated_map = delete_doc(
+ name, updated_tree, updated_map
+ )
+ appliedChanges.append(change)
+
+ add_changes = sorted(add_changes, key=key)
+ _LOGGER.debug("walk over add_changes")
+ for snapshot in add_changes:
+ _LOGGER.debug("in add_changes")
+ change, updated_tree, updated_map = add_doc(
+ snapshot, updated_tree, updated_map
+ )
+ appliedChanges.append(change)
+
+ update_changes = sorted(update_changes, key=key)
+ for snapshot in update_changes:
+ change, updated_tree, updated_map = modify_doc(
+ snapshot, updated_tree, updated_map
+ )
+ if change is not None:
+ appliedChanges.append(change)
+
+ assert len(updated_tree) == len(updated_map), (
+ "The update document "
+ + "tree and document map should have the same number of entries."
+ )
+ return (updated_tree, updated_map, appliedChanges)
+
+ def _affects_target(self, target_ids, current_id):
+ if target_ids is None:
+ return True
+
+ return current_id in target_ids
+
+ def _current_size(self):
+ """
+ Returns the current count of all documents, including the changes from
+ the current changeMap.
+ """
+ deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None)
+ return len(self.doc_map) + len(adds) - len(deletes)
+
+ def _reset_docs(self):
+ """
+ Helper to clear the docs on RESET or filter mismatch.
+ """
+ _LOGGER.debug("resetting documents")
+ self.change_map.clear()
+ self.resume_token = None
+
+ # Mark each document as deleted. If documents are not deleted
+ # they will be sent again by the server.
+ for snapshot in self.doc_tree.keys():
+ name = snapshot.reference._document_path
+ self.change_map[name] = ChangeType.REMOVED
+
+ self.current = False
diff --git a/venv/Lib/site-packages/google/cloud/obsolete.py b/venv/Lib/site-packages/google/cloud/obsolete.py
new file mode 100644
index 000000000..99a83f9ce
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/obsolete.py
@@ -0,0 +1,42 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for deprecated code and modules."""
+
+import warnings
+
+import pkg_resources
+
+
+def complain(distribution_name):
+ """Issue a warning if `distribution_name` is installed.
+
+ In a future release, this method will be updated to raise ImportError
+ rather than just send a warning.
+
+ Args:
+ distribution_name (str): The name of the obsolete distribution.
+ """
+ try:
+ pkg_resources.get_distribution(distribution_name)
+ warnings.warn(
+ "The {pkg} distribution is now obsolete. "
+ "Please `pip uninstall {pkg}`. "
+ "In the future, this warning will become an ImportError.".format(
+ pkg=distribution_name
+ ),
+ DeprecationWarning,
+ )
+ except pkg_resources.DistributionNotFound:
+ pass
diff --git a/venv/Lib/site-packages/google/cloud/operation.py b/venv/Lib/site-packages/google/cloud/operation.py
new file mode 100644
index 000000000..28ac8cf1c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/operation.py
@@ -0,0 +1,266 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Wrap long-running operations returned from Google Cloud APIs."""
+
+from google.longrunning import operations_pb2
+from google.protobuf import json_format
+
+
+_GOOGLE_APIS_PREFIX = "type.googleapis.com"
+
+_TYPE_URL_MAP = {}
+
+
+def _compute_type_url(klass, prefix=_GOOGLE_APIS_PREFIX):
+ """Compute a type URL for a klass.
+
+ :type klass: type
+ :param klass: class to be used as a factory for the given type
+
+ :type prefix: str
+ :param prefix: URL prefix for the type
+
+ :rtype: str
+ :returns: the URL, prefixed as appropriate
+ """
+ name = klass.DESCRIPTOR.full_name
+ return "%s/%s" % (prefix, name)
+
+
+def register_type(klass, type_url=None):
+ """Register a klass as the factory for a given type URL.
+
+ :type klass: :class:`type`
+ :param klass: class to be used as a factory for the given type
+
+ :type type_url: str
+ :param type_url: (Optional) URL naming the type. If not provided,
+ infers the URL from the type descriptor.
+
+ :raises ValueError: if a registration already exists for the URL.
+ """
+ if type_url is None:
+ type_url = _compute_type_url(klass)
+ if type_url in _TYPE_URL_MAP:
+ if _TYPE_URL_MAP[type_url] is not klass:
+ raise ValueError("Conflict: %s" % (_TYPE_URL_MAP[type_url],))
+
+ _TYPE_URL_MAP[type_url] = klass
+
+
+def _from_any(any_pb):
+ """Convert an ``Any`` protobuf into the actual class.
+
+ Uses the type URL to do the conversion.
+
+ .. note::
+
+ This assumes that the type URL is already registered.
+
+ :type any_pb: :class:`google.protobuf.any_pb2.Any`
+ :param any_pb: An any object to be converted.
+
+ :rtype: object
+ :returns: The instance (of the correct type) stored in the any
+ instance.
+ """
+ klass = _TYPE_URL_MAP[any_pb.type_url]
+ return klass.FromString(any_pb.value)
+
+
+class Operation(object):
+ """Representation of a Google API Long-Running Operation.
+
+ .. _protobuf: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L80
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+ .. _JSON: https://cloud.google.com/speech/reference/rest/\
+ v1beta1/operations#Operation
+
+ This wraps an operation `protobuf`_ object and attempts to
+ interact with the long-running operations `service`_ (specific
+ to a given API). (Some services also offer a `JSON`_
+ API that maps the same underlying data type.)
+
+ :type name: str
+ :param name: The fully-qualified path naming the operation.
+
+ :type client: :class:`~google.cloud.client.Client`
+ :param client: The client used to poll for the status of the operation.
+ If the operation was created via JSON/HTTP, the client
+ must own a :class:`~google.cloud._http.Connection`
+ to send polling requests. If created via protobuf, the
+ client must have a gRPC stub in the ``_operations_stub``
+ attribute.
+
+ :type caller_metadata: dict
+ :param caller_metadata: caller-assigned metadata about the operation
+ """
+
+ target = None
+ """Instance assocated with the operations: callers may set."""
+
+ response = None
+ """Response returned from completed operation.
+
+ Only one of this and :attr:`error` can be populated.
+ """
+
+ error = None
+ """Error that resulted from a failed (complete) operation.
+
+ Only one of this and :attr:`response` can be populated.
+ """
+
+ metadata = None
+ """Metadata about the current operation (as a protobuf).
+
+ Code that uses operations must register the metadata types (via
+ :func:`register_type`) to ensure that the metadata fields can be
+ converted into the correct types.
+ """
+
+ _from_grpc = True
+
+ def __init__(self, name, client, **caller_metadata):
+ self.name = name
+ self.client = client
+ self.caller_metadata = caller_metadata.copy()
+ self._complete = False
+
+ @classmethod
+ def from_pb(cls, operation_pb, client, **caller_metadata):
+ """Factory: construct an instance from a protobuf.
+
+ :type operation_pb:
+ :class:`~google.longrunning.operations_pb2.Operation`
+ :param operation_pb: Protobuf to be parsed.
+
+ :type client: object: must provide ``_operations_stub`` accessor.
+ :param client: The client used to poll for the status of the operation.
+
+ :type caller_metadata: dict
+ :param caller_metadata: caller-assigned metadata about the operation
+
+ :rtype: :class:`Operation`
+ :returns: new instance, with attributes based on the protobuf.
+ """
+ result = cls(operation_pb.name, client, **caller_metadata)
+ result._update_state(operation_pb)
+ result._from_grpc = True
+ return result
+
+ @classmethod
+ def from_dict(cls, operation, client, **caller_metadata):
+ """Factory: construct an instance from a dictionary.
+
+ :type operation: dict
+ :param operation: Operation as a JSON object.
+
+ :type client: :class:`~google.cloud.client.Client`
+ :param client: The client used to poll for the status of the operation.
+
+ :type caller_metadata: dict
+ :param caller_metadata: caller-assigned metadata about the operation
+
+ :rtype: :class:`Operation`
+ :returns: new instance, with attributes based on the protobuf.
+ """
+ operation_pb = json_format.ParseDict(operation, operations_pb2.Operation())
+ result = cls(operation_pb.name, client, **caller_metadata)
+ result._update_state(operation_pb)
+ result._from_grpc = False
+ return result
+
+ @property
+ def complete(self):
+ """Has the operation already completed?
+
+ :rtype: bool
+ :returns: True if already completed, else false.
+ """
+ return self._complete
+
+ def _get_operation_rpc(self):
+ """Polls the status of the current operation.
+
+ Uses gRPC request to check.
+
+ :rtype: :class:`~google.longrunning.operations_pb2.Operation`
+ :returns: The latest status of the current operation.
+ """
+ request_pb = operations_pb2.GetOperationRequest(name=self.name)
+ return self.client._operations_stub.GetOperation(request_pb)
+
+ def _get_operation_http(self):
+ """Checks the status of the current operation.
+
+ Uses HTTP request to check.
+
+ :rtype: :class:`~google.longrunning.operations_pb2.Operation`
+ :returns: The latest status of the current operation.
+ """
+ path = "operations/%s" % (self.name,)
+ api_response = self.client._connection.api_request(method="GET", path=path)
+ return json_format.ParseDict(api_response, operations_pb2.Operation())
+
+ def _get_operation(self):
+ """Checks the status of the current operation.
+
+ :rtype: :class:`~google.longrunning.operations_pb2.Operation`
+ :returns: The latest status of the current operation.
+ """
+ if self._from_grpc:
+ return self._get_operation_rpc()
+ else:
+ return self._get_operation_http()
+
+ def _update_state(self, operation_pb):
+ """Update the state of the current object based on operation.
+
+ :type operation_pb:
+ :class:`~google.longrunning.operations_pb2.Operation`
+ :param operation_pb: Protobuf to be parsed.
+ """
+ if operation_pb.done:
+ self._complete = True
+
+ if operation_pb.HasField("metadata"):
+ self.metadata = _from_any(operation_pb.metadata)
+
+ result_type = operation_pb.WhichOneof("result")
+ if result_type == "error":
+ self.error = operation_pb.error
+ elif result_type == "response":
+ self.response = _from_any(operation_pb.response)
+
+ def poll(self):
+ """Check if the operation has finished.
+
+ :rtype: bool
+ :returns: A boolean indicating if the current operation has completed.
+ :raises ValueError: if the operation
+ has already completed.
+ """
+ if self.complete:
+ raise ValueError("The operation has completed.")
+
+ operation_pb = self._get_operation()
+ self._update_state(operation_pb)
+
+ return self.complete
diff --git a/venv/Lib/site-packages/google/cloud/storage/__init__.py b/venv/Lib/site-packages/google/cloud/storage/__init__.py
new file mode 100644
index 000000000..2a9629dfb
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/__init__.py
@@ -0,0 +1,45 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shortcut methods for getting set up with Google Cloud Storage.
+
+You'll typically use these to get started with the API:
+
+.. literalinclude:: snippets.py
+ :start-after: [START storage_get_started]
+ :end-before: [END storage_get_started]
+ :dedent: 4
+
+The main concepts with this API are:
+
+- :class:`~google.cloud.storage.bucket.Bucket` which represents a particular
+ bucket (akin to a mounted disk on a computer).
+
+- :class:`~google.cloud.storage.blob.Blob` which represents a pointer to a
+ particular entity in Cloud Storage (akin to a file path on a remote
+ machine).
+"""
+
+
+from pkg_resources import get_distribution
+
+__version__ = get_distribution("google-cloud-storage").version
+
+from google.cloud.storage.batch import Batch
+from google.cloud.storage.blob import Blob
+from google.cloud.storage.bucket import Bucket
+from google.cloud.storage.client import Client
+
+
+__all__ = ["__version__", "Batch", "Blob", "Bucket", "Client"]
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..79ec7504b
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..9ed355d07
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/_http.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/_http.cpython-36.pyc
new file mode 100644
index 000000000..63d6e366b
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/_http.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/_signing.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/_signing.cpython-36.pyc
new file mode 100644
index 000000000..5b81a872f
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/_signing.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/acl.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/acl.cpython-36.pyc
new file mode 100644
index 000000000..47abe6aff
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/acl.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/batch.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/batch.cpython-36.pyc
new file mode 100644
index 000000000..b36d30a20
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/batch.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/blob.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/blob.cpython-36.pyc
new file mode 100644
index 000000000..2bdb2facf
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/blob.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/bucket.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/bucket.cpython-36.pyc
new file mode 100644
index 000000000..f0741c4db
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/bucket.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/client.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/client.cpython-36.pyc
new file mode 100644
index 000000000..490d2c048
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/constants.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/constants.cpython-36.pyc
new file mode 100644
index 000000000..432dd9c24
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/constants.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/hmac_key.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/hmac_key.cpython-36.pyc
new file mode 100644
index 000000000..e89404143
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/hmac_key.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/iam.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/iam.cpython-36.pyc
new file mode 100644
index 000000000..bbdf0bd75
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/iam.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/__pycache__/notification.cpython-36.pyc b/venv/Lib/site-packages/google/cloud/storage/__pycache__/notification.cpython-36.pyc
new file mode 100644
index 000000000..5ebcdbdb6
Binary files /dev/null and b/venv/Lib/site-packages/google/cloud/storage/__pycache__/notification.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/cloud/storage/_helpers.py b/venv/Lib/site-packages/google/cloud/storage/_helpers.py
new file mode 100644
index 000000000..a1075eac7
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/_helpers.py
@@ -0,0 +1,514 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for Cloud Storage utility classes.
+
+These are *not* part of the API.
+"""
+
+import base64
+from hashlib import md5
+from datetime import datetime
+import os
+
+from six.moves.urllib.parse import urlsplit
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
+STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST"
+"""Environment variable defining host for Storage emulator."""
+
+_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com"
+
+# generation match parameters in camel and snake cases
+_GENERATION_MATCH_PARAMETERS = (
+ ("if_generation_match", "ifGenerationMatch"),
+ ("if_generation_not_match", "ifGenerationNotMatch"),
+ ("if_metageneration_match", "ifMetagenerationMatch"),
+ ("if_metageneration_not_match", "ifMetagenerationNotMatch"),
+ ("if_source_generation_match", "ifSourceGenerationMatch"),
+ ("if_source_generation_not_match", "ifSourceGenerationNotMatch"),
+ ("if_source_metageneration_match", "ifSourceMetagenerationMatch"),
+ ("if_source_metageneration_not_match", "ifSourceMetagenerationNotMatch"),
+)
+
+
+def _get_storage_host():
+ return os.environ.get(STORAGE_EMULATOR_ENV_VAR, _DEFAULT_STORAGE_HOST)
+
+
+def _validate_name(name):
+ """Pre-flight ``Bucket`` name validation.
+
+ :type name: str or :data:`NoneType`
+ :param name: Proposed bucket name.
+
+ :rtype: str or :data:`NoneType`
+ :returns: ``name`` if valid.
+ """
+ if name is None:
+ return
+
+ # The first and last characters must be alphanumeric.
+ if not all([name[0].isalnum(), name[-1].isalnum()]):
+ raise ValueError("Bucket names must start and end with a number or letter.")
+ return name
+
+
+class _PropertyMixin(object):
+ """Abstract mixin for cloud storage classes with associated properties.
+
+ Non-abstract subclasses should implement:
+ - path
+ - client
+ - user_project
+
+ :type name: str
+ :param name: The name of the object. Bucket names must start and end with a
+ number or letter.
+ """
+
+ def __init__(self, name=None):
+ self.name = name
+ self._properties = {}
+ self._changes = set()
+
+ @property
+ def path(self):
+ """Abstract getter for the object path."""
+ raise NotImplementedError
+
+ @property
+ def client(self):
+ """Abstract getter for the object client."""
+ raise NotImplementedError
+
+ @property
+ def user_project(self):
+ """Abstract getter for the object user_project."""
+ raise NotImplementedError
+
+ def _require_client(self, client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :rtype: :class:`google.cloud.storage.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ if client is None:
+ client = self.client
+ return client
+
+ def _encryption_headers(self):
+ """Return any encryption headers needed to fetch the object.
+
+ .. note::
+ Defined here because :meth:`reload` calls it, but this method is
+ really only relevant for :class:`~google.cloud.storage.blob.Blob`.
+
+ :rtype: dict
+ :returns: a mapping of encryption-related headers.
+ """
+ return {}
+
+ @property
+ def _query_params(self):
+ """Default query parameters."""
+ params = {}
+ if self.user_project is not None:
+ params["userProject"] = self.user_project
+ return params
+
+ def reload(
+ self,
+ client=None,
+ projection="noAcl",
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Reload properties from Cloud Storage.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :type projection: str
+ :param projection: (Optional) If used, must be 'full' or 'noAcl'.
+ Defaults to ``'noAcl'``. Specifies the set of
+ properties to return.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ """
+ client = self._require_client(client)
+ query_params = self._query_params
+ # Pass only '?projection=noAcl' here because 'acl' and related
+ # are handled via custom endpoints.
+ query_params["projection"] = projection
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ api_response = client._connection.api_request(
+ method="GET",
+ path=self.path,
+ query_params=query_params,
+ headers=self._encryption_headers(),
+ _target_object=self,
+ timeout=timeout,
+ )
+ self._set_properties(api_response)
+
+ def _patch_property(self, name, value):
+ """Update field of this object's properties.
+
+ This method will only update the field provided and will not
+ touch the other fields.
+
+ It **will not** reload the properties from the server. The behavior is
+ local only and syncing occurs via :meth:`patch`.
+
+ :type name: str
+ :param name: The field name to update.
+
+ :type value: object
+ :param value: The value being updated.
+ """
+ self._changes.add(name)
+ self._properties[name] = value
+
+ def _set_properties(self, value):
+ """Set the properties for the current object.
+
+ :type value: dict or :class:`google.cloud.storage.batch._FutureDict`
+ :param value: The properties to be set.
+ """
+ self._properties = value
+ # If the values are reset, the changes must as well.
+ self._changes = set()
+
+ def patch(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Sends all changed properties in a PATCH request.
+
+ Updates the ``_properties`` with the response from the backend.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ """
+ client = self._require_client(client)
+ query_params = self._query_params
+ # Pass '?projection=full' here because 'PATCH' documented not
+ # to work properly w/ 'noAcl'.
+ query_params["projection"] = "full"
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ update_properties = {key: self._properties[key] for key in self._changes}
+
+ # Make the API call.
+ api_response = client._connection.api_request(
+ method="PATCH",
+ path=self.path,
+ data=update_properties,
+ query_params=query_params,
+ _target_object=self,
+ timeout=timeout,
+ )
+ self._set_properties(api_response)
+
+ def update(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Sends all properties in a PUT request.
+
+ Updates the ``_properties`` with the response from the backend.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ """
+ client = self._require_client(client)
+
+ query_params = self._query_params
+ query_params["projection"] = "full"
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ api_response = client._connection.api_request(
+ method="PUT",
+ path=self.path,
+ data=self._properties,
+ query_params=query_params,
+ _target_object=self,
+ timeout=timeout,
+ )
+ self._set_properties(api_response)
+
+
+def _scalar_property(fieldname):
+ """Create a property descriptor around the :class:`_PropertyMixin` helpers.
+ """
+
+ def _getter(self):
+ """Scalar property getter."""
+ return self._properties.get(fieldname)
+
+ def _setter(self, value):
+ """Scalar property setter."""
+ self._patch_property(fieldname, value)
+
+ return property(_getter, _setter)
+
+
+def _write_buffer_to_hash(buffer_object, hash_obj, digest_block_size=8192):
+ """Read blocks from a buffer and update a hash with them.
+
+ :type buffer_object: bytes buffer
+ :param buffer_object: Buffer containing bytes used to update a hash object.
+
+ :type hash_obj: object that implements update
+ :param hash_obj: A hash object (MD5 or CRC32-C).
+
+ :type digest_block_size: int
+ :param digest_block_size: The block size to write to the hash.
+ Defaults to 8192.
+ """
+ block = buffer_object.read(digest_block_size)
+
+ while len(block) > 0:
+ hash_obj.update(block)
+ # Update the block for the next iteration.
+ block = buffer_object.read(digest_block_size)
+
+
+def _base64_md5hash(buffer_object):
+ """Get MD5 hash of bytes (as base64).
+
+ :type buffer_object: bytes buffer
+ :param buffer_object: Buffer containing bytes used to compute an MD5
+ hash (as base64).
+
+ :rtype: str
+ :returns: A base64 encoded digest of the MD5 hash.
+ """
+ hash_obj = md5()
+ _write_buffer_to_hash(buffer_object, hash_obj)
+ digest_bytes = hash_obj.digest()
+ return base64.b64encode(digest_bytes)
+
+
+def _convert_to_timestamp(value):
+ """Convert non-none datetime to timestamp.
+
+ :type value: :class:`datetime.datetime`
+ :param value: The datetime to convert.
+
+ :rtype: int
+ :returns: The timestamp.
+ """
+ utc_naive = value.replace(tzinfo=None) - value.utcoffset()
+ mtime = (utc_naive - datetime(1970, 1, 1)).total_seconds()
+ return mtime
+
+
+def _add_generation_match_parameters(parameters, **match_parameters):
+ """Add generation match parameters into the given parameters list.
+
+ :type parameters: list or dict
+ :param parameters: Parameters list or dict.
+
+ :type match_parameters: dict
+ :param match_parameters: if*generation*match parameters to add.
+
+ :raises: :exc:`ValueError` if ``parameters`` is not a ``list()``
+ or a ``dict()``.
+ """
+ for snakecase_name, camelcase_name in _GENERATION_MATCH_PARAMETERS:
+ value = match_parameters.get(snakecase_name)
+
+ if value is not None:
+ if isinstance(parameters, list):
+ parameters.append((camelcase_name, value))
+
+ elif isinstance(parameters, dict):
+ parameters[camelcase_name] = value
+
+ else:
+ raise ValueError(
+ "`parameters` argument should be a dict() or a list()."
+ )
+
+
+def _raise_if_more_than_one_set(**kwargs):
+ """Raise ``ValueError`` exception if more than one parameter was set.
+
+ :type error: :exc:`ValueError`
+ :param error: Description of which fields were set
+
+ :raises: :class:`~ValueError` containing the fields that were set
+ """
+ if sum(arg is not None for arg in kwargs.values()) > 1:
+ escaped_keys = ["'%s'" % name for name in kwargs.keys()]
+
+ keys_but_last = ", ".join(escaped_keys[:-1])
+ last_key = escaped_keys[-1]
+
+ msg = "Pass at most one of {keys_but_last} and {last_key}".format(
+ keys_but_last=keys_but_last, last_key=last_key
+ )
+
+ raise ValueError(msg)
+
+
+def _bucket_bound_hostname_url(host, scheme=None):
+ """Helper to build bucket bound hostname URL.
+
+ :type host: str
+ :param host: Host name.
+
+ :type scheme: str
+ :param scheme: (Optional) Web scheme. If passed, use it
+ as a scheme in the result URL.
+
+ :rtype: str
+ :returns: A bucket bound hostname URL.
+ """
+ url_parts = urlsplit(host)
+ if url_parts.scheme and url_parts.netloc:
+ return host
+
+ return "{scheme}://{host}/".format(scheme=scheme, host=host)
diff --git a/venv/Lib/site-packages/google/cloud/storage/_http.py b/venv/Lib/site-packages/google/cloud/storage/_http.py
new file mode 100644
index 000000000..032f70e02
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/_http.py
@@ -0,0 +1,48 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with Google Cloud Storage connections."""
+
+from google.cloud import _http
+
+from google.cloud.storage import __version__
+
+
+class Connection(_http.JSONConnection):
+ """A connection to Google Cloud Storage via the JSON REST API.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: The client that owns the current connection.
+
+ :type client_info: :class:`~google.api_core.client_info.ClientInfo`
+ :param client_info: (Optional) instance used to generate user agent.
+ """
+
+ DEFAULT_API_ENDPOINT = "https://storage.googleapis.com"
+
+ def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT):
+ super(Connection, self).__init__(client, client_info)
+ self.API_BASE_URL = api_endpoint
+ self._client_info.client_library_version = __version__
+
+ # TODO: When metrics all use gccl, this should be removed #9552
+ if self._client_info.user_agent is None: # pragma: no branch
+ self._client_info.user_agent = ""
+ self._client_info.user_agent += " gcloud-python/{} ".format(__version__)
+
+ API_VERSION = "v1"
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE = "{api_base_url}/storage/{api_version}{path}"
+ """A template for the URL of a particular API call."""
diff --git a/venv/Lib/site-packages/google/cloud/storage/_signing.py b/venv/Lib/site-packages/google/cloud/storage/_signing.py
new file mode 100644
index 000000000..1382ebc77
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/_signing.py
@@ -0,0 +1,720 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import base64
+import binascii
+import collections
+import datetime
+import hashlib
+import json
+
+import six
+
+import google.auth.credentials
+
+from google.auth import exceptions
+from google.auth.transport import requests
+from google.cloud import _helpers
+
+
+NOW = datetime.datetime.utcnow # To be replaced by tests.
+
+SERVICE_ACCOUNT_URL = (
+ "https://googleapis.dev/python/google-api-core/latest/"
+ "auth.html#setting-up-a-service-account"
+)
+
+
+def ensure_signed_credentials(credentials):
+ """Raise AttributeError if the credentials are unsigned.
+
+ :type credentials: :class:`google.auth.credentials.Signing`
+ :param credentials: The credentials used to create a private key
+ for signing text.
+
+ :raises: :exc:`AttributeError` if credentials is not an instance
+ of :class:`google.auth.credentials.Signing`.
+ """
+ if not isinstance(credentials, google.auth.credentials.Signing):
+ raise AttributeError(
+ "you need a private key to sign credentials."
+ "the credentials you are currently using {} "
+ "just contains a token. see {} for more "
+ "details.".format(type(credentials), SERVICE_ACCOUNT_URL)
+ )
+
+
+def get_signed_query_params_v2(credentials, expiration, string_to_sign):
+ """Gets query parameters for creating a signed URL.
+
+ :type credentials: :class:`google.auth.credentials.Signing`
+ :param credentials: The credentials used to create a private key
+ for signing text.
+
+ :type expiration: int or long
+ :param expiration: When the signed URL should expire.
+
+ :type string_to_sign: str
+ :param string_to_sign: The string to be signed by the credentials.
+
+ :raises: :exc:`AttributeError` if credentials is not an instance
+ of :class:`google.auth.credentials.Signing`.
+
+ :rtype: dict
+ :returns: Query parameters matching the signing credentials with a
+ signed payload.
+ """
+ ensure_signed_credentials(credentials)
+ signature_bytes = credentials.sign_bytes(string_to_sign)
+ signature = base64.b64encode(signature_bytes)
+ service_account_name = credentials.signer_email
+ return {
+ "GoogleAccessId": service_account_name,
+ "Expires": expiration,
+ "Signature": signature,
+ }
+
+
+def get_expiration_seconds_v2(expiration):
+ """Convert 'expiration' to a number of seconds in the future.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Point in time when the signed URL should expire. If
+ a ``datetime`` instance is passed without an explicit
+ ``tzinfo`` set, it will be assumed to be ``UTC``.
+
+ :raises: :exc:`TypeError` when expiration is not a valid type.
+
+ :rtype: int
+ :returns: a timestamp as an absolute number of seconds since epoch.
+ """
+ # If it's a timedelta, add it to `now` in UTC.
+ if isinstance(expiration, datetime.timedelta):
+ now = NOW().replace(tzinfo=_helpers.UTC)
+ expiration = now + expiration
+
+ # If it's a datetime, convert to a timestamp.
+ if isinstance(expiration, datetime.datetime):
+ micros = _helpers._microseconds_from_datetime(expiration)
+ expiration = micros // 10 ** 6
+
+ if not isinstance(expiration, six.integer_types):
+ raise TypeError(
+ "Expected an integer timestamp, datetime, or "
+ "timedelta. Got %s" % type(expiration)
+ )
+ return expiration
+
+
+_EXPIRATION_TYPES = six.integer_types + (datetime.datetime, datetime.timedelta)
+
+
+def get_expiration_seconds_v4(expiration):
+ """Convert 'expiration' to a number of seconds offset from the current time.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Point in time when the signed URL should expire. If
+ a ``datetime`` instance is passed without an explicit
+ ``tzinfo`` set, it will be assumed to be ``UTC``.
+
+ :raises: :exc:`TypeError` when expiration is not a valid type.
+ :raises: :exc:`ValueError` when expiration is too large.
+ :rtype: Integer
+ :returns: seconds in the future when the signed URL will expire
+ """
+ if not isinstance(expiration, _EXPIRATION_TYPES):
+ raise TypeError(
+ "Expected an integer timestamp, datetime, or "
+ "timedelta. Got %s" % type(expiration)
+ )
+
+ now = NOW().replace(tzinfo=_helpers.UTC)
+
+ if isinstance(expiration, six.integer_types):
+ seconds = expiration
+
+ if isinstance(expiration, datetime.datetime):
+
+ if expiration.tzinfo is None:
+ expiration = expiration.replace(tzinfo=_helpers.UTC)
+
+ expiration = expiration - now
+
+ if isinstance(expiration, datetime.timedelta):
+ seconds = int(expiration.total_seconds())
+
+ if seconds > SEVEN_DAYS:
+ raise ValueError(
+ "Max allowed expiration interval is seven days {}".format(SEVEN_DAYS)
+ )
+
+ return seconds
+
+
+def get_canonical_headers(headers):
+ """Canonicalize headers for signing.
+
+ See:
+ https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers
+
+ :type headers: Union[dict|List(Tuple(str,str))]
+ :param headers:
+ (Optional) Additional HTTP headers to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers
+ Requests using the signed URL *must* pass the specified header
+ (name and value) with each request for the URL.
+
+ :rtype: str
+ :returns: List of headers, normalized / sortted per the URL refernced above.
+ """
+ if headers is None:
+ headers = []
+ elif isinstance(headers, dict):
+ headers = list(headers.items())
+
+ if not headers:
+ return [], []
+
+ normalized = collections.defaultdict(list)
+ for key, val in headers:
+ key = key.lower().strip()
+ val = " ".join(val.split())
+ normalized[key].append(val)
+
+ ordered_headers = sorted((key, ",".join(val)) for key, val in normalized.items())
+
+ canonical_headers = ["{}:{}".format(*item) for item in ordered_headers]
+ return canonical_headers, ordered_headers
+
+
+_Canonical = collections.namedtuple(
+ "_Canonical", ["method", "resource", "query_parameters", "headers"]
+)
+
+
+def canonicalize_v2(method, resource, query_parameters, headers):
+ """Canonicalize method, resource per the V2 spec.
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+ Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
+ signature will additionally contain the `x-goog-resumable`
+ header, and the method changed to POST. See the signed URL
+ docs regarding this flow:
+ https://cloud.google.com/storage/docs/access-control/signed-urls
+
+ :type resource: str
+ :param resource: A pointer to a specific resource
+ (typically, ``/bucket-name/path/to/blob.txt``).
+
+ :type query_parameters: dict
+ :param query_parameters:
+ (Optional) Additional query parameters to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers#query
+
+ :type headers: Union[dict|List(Tuple(str,str))]
+ :param headers:
+ (Optional) Additional HTTP headers to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers
+ Requests using the signed URL *must* pass the specified header
+ (name and value) with each request for the URL.
+
+ :rtype: :class:_Canonical
+ :returns: Canonical method, resource, query_parameters, and headers.
+ """
+ headers, _ = get_canonical_headers(headers)
+
+ if method == "RESUMABLE":
+ method = "POST"
+ headers.append("x-goog-resumable:start")
+
+ if query_parameters is None:
+ return _Canonical(method, resource, [], headers)
+
+ normalized_qp = sorted(
+ (key.lower(), value and value.strip() or "")
+ for key, value in query_parameters.items()
+ )
+ encoded_qp = six.moves.urllib.parse.urlencode(normalized_qp)
+ canonical_resource = "{}?{}".format(resource, encoded_qp)
+ return _Canonical(method, canonical_resource, normalized_qp, headers)
+
+
+def generate_signed_url_v2(
+ credentials,
+ resource,
+ expiration,
+ api_access_endpoint="",
+ method="GET",
+ content_md5=None,
+ content_type=None,
+ response_type=None,
+ response_disposition=None,
+ generation=None,
+ headers=None,
+ query_parameters=None,
+ service_account_email=None,
+ access_token=None,
+):
+ """Generate a V2 signed URL to provide query-string auth'n to a resource.
+
+ .. note::
+
+ Assumes ``credentials`` implements the
+ :class:`google.auth.credentials.Signing` interface. Also assumes
+ ``credentials`` has a ``service_account_email`` property which
+ identifies the credentials.
+
+ .. note::
+
+ If you are on Google Compute Engine, you can't generate a signed URL.
+ Follow `Issue 922`_ for updates on this. If you'd like to be able to
+ generate a signed URL from GCE, you can use a standard service account
+ from a JSON file rather than a GCE service account.
+
+ See headers `reference`_ for more details on optional arguments.
+
+ .. _Issue 922: https://github.com/GoogleCloudPlatform/\
+ google-cloud-python/issues/922
+ .. _reference: https://cloud.google.com/storage/docs/reference-headers
+
+ :type credentials: :class:`google.auth.credentials.Signing`
+ :param credentials: Credentials object with an associated private key to
+ sign text.
+
+ :type resource: str
+ :param resource: A pointer to a specific resource
+ (typically, ``/bucket-name/path/to/blob.txt``).
+ Caller should have already URL-encoded the value.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Point in time when the signed URL should expire. If
+ a ``datetime`` instance is passed without an explicit
+ ``tzinfo`` set, it will be assumed to be ``UTC``.
+
+ :type api_access_endpoint: str
+ :param api_access_endpoint: (Optional) URI base. Defaults to empty string.
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+ Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
+ signature will additionally contain the `x-goog-resumable`
+ header, and the method changed to POST. See the signed URL
+ docs regarding this flow:
+ https://cloud.google.com/storage/docs/access-control/signed-urls
+
+
+ :type content_md5: str
+ :param content_md5: (Optional) The MD5 hash of the object referenced by
+ ``resource``.
+
+ :type content_type: str
+ :param content_type: (Optional) The content type of the object referenced
+ by ``resource``.
+
+ :type response_type: str
+ :param response_type: (Optional) Content type of responses to requests for
+ the signed URL. Ignored if content_type is set on
+ object/blob metadata.
+
+ :type response_disposition: str
+ :param response_disposition: (Optional) Content disposition of responses to
+ requests for the signed URL.
+
+ :type generation: str
+ :param generation: (Optional) A value that indicates which generation of
+ the resource to fetch.
+
+ :type headers: Union[dict|List(Tuple(str,str))]
+ :param headers:
+ (Optional) Additional HTTP headers to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers
+ Requests using the signed URL *must* pass the specified header
+ (name and value) with each request for the URL.
+
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
+ :type query_parameters: dict
+ :param query_parameters:
+ (Optional) Additional query parameters to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers#query
+
+ :raises: :exc:`TypeError` when expiration is not a valid type.
+ :raises: :exc:`AttributeError` if credentials is not an instance
+ of :class:`google.auth.credentials.Signing`.
+
+ :rtype: str
+ :returns: A signed URL you can use to access the resource
+ until expiration.
+ """
+ expiration_stamp = get_expiration_seconds_v2(expiration)
+
+ canonical = canonicalize_v2(method, resource, query_parameters, headers)
+
+ # Generate the string to sign.
+ elements_to_sign = [
+ canonical.method,
+ content_md5 or "",
+ content_type or "",
+ str(expiration_stamp),
+ ]
+ elements_to_sign.extend(canonical.headers)
+ elements_to_sign.append(canonical.resource)
+ string_to_sign = "\n".join(elements_to_sign)
+
+ # Set the right query parameters.
+ if access_token and service_account_email:
+ signature = _sign_message(string_to_sign, access_token, service_account_email)
+ signed_query_params = {
+ "GoogleAccessId": service_account_email,
+ "Expires": expiration_stamp,
+ "Signature": signature,
+ }
+ else:
+ signed_query_params = get_signed_query_params_v2(
+ credentials, expiration_stamp, string_to_sign
+ )
+
+ if response_type is not None:
+ signed_query_params["response-content-type"] = response_type
+ if response_disposition is not None:
+ signed_query_params["response-content-disposition"] = response_disposition
+ if generation is not None:
+ signed_query_params["generation"] = generation
+
+ signed_query_params.update(canonical.query_parameters)
+ sorted_signed_query_params = sorted(signed_query_params.items())
+
+ # Return the built URL.
+ return "{endpoint}{resource}?{querystring}".format(
+ endpoint=api_access_endpoint,
+ resource=resource,
+ querystring=six.moves.urllib.parse.urlencode(sorted_signed_query_params),
+ )
+
+
+SEVEN_DAYS = 7 * 24 * 60 * 60 # max age for V4 signed URLs.
+DEFAULT_ENDPOINT = "https://storage.googleapis.com"
+
+
+def generate_signed_url_v4(
+ credentials,
+ resource,
+ expiration,
+ api_access_endpoint=DEFAULT_ENDPOINT,
+ method="GET",
+ content_md5=None,
+ content_type=None,
+ response_type=None,
+ response_disposition=None,
+ generation=None,
+ headers=None,
+ query_parameters=None,
+ service_account_email=None,
+ access_token=None,
+ _request_timestamp=None, # for testing only
+):
+ """Generate a V4 signed URL to provide query-string auth'n to a resource.
+
+ .. note::
+
+ Assumes ``credentials`` implements the
+ :class:`google.auth.credentials.Signing` interface. Also assumes
+ ``credentials`` has a ``service_account_email`` property which
+ identifies the credentials.
+
+ .. note::
+
+ If you are on Google Compute Engine, you can't generate a signed URL.
+ Follow `Issue 922`_ for updates on this. If you'd like to be able to
+ generate a signed URL from GCE, you can use a standard service account
+ from a JSON file rather than a GCE service account.
+
+ See headers `reference`_ for more details on optional arguments.
+
+ .. _Issue 922: https://github.com/GoogleCloudPlatform/\
+ google-cloud-python/issues/922
+ .. _reference: https://cloud.google.com/storage/docs/reference-headers
+
+ :type credentials: :class:`google.auth.credentials.Signing`
+ :param credentials: Credentials object with an associated private key to
+ sign text.
+
+ :type resource: str
+ :param resource: A pointer to a specific resource
+ (typically, ``/bucket-name/path/to/blob.txt``).
+ Caller should have already URL-encoded the value.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Point in time when the signed URL should expire. If
+ a ``datetime`` instance is passed without an explicit
+ ``tzinfo`` set, it will be assumed to be ``UTC``.
+
+ :type api_access_endpoint: str
+ :param api_access_endpoint: (Optional) URI base. Defaults to
+ "https://storage.googleapis.com/"
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+ Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
+ signature will additionally contain the `x-goog-resumable`
+ header, and the method changed to POST. See the signed URL
+ docs regarding this flow:
+ https://cloud.google.com/storage/docs/access-control/signed-urls
+
+
+ :type content_md5: str
+ :param content_md5: (Optional) The MD5 hash of the object referenced by
+ ``resource``.
+
+ :type content_type: str
+ :param content_type: (Optional) The content type of the object referenced
+ by ``resource``.
+
+ :type response_type: str
+ :param response_type: (Optional) Content type of responses to requests for
+ the signed URL. Ignored if content_type is set on
+ object/blob metadata.
+
+ :type response_disposition: str
+ :param response_disposition: (Optional) Content disposition of responses to
+ requests for the signed URL.
+
+ :type generation: str
+ :param generation: (Optional) A value that indicates which generation of
+ the resource to fetch.
+
+ :type headers: dict
+ :param headers:
+ (Optional) Additional HTTP headers to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers
+ Requests using the signed URL *must* pass the specified header
+ (name and value) with each request for the URL.
+
+ :type query_parameters: dict
+ :param query_parameters:
+ (Optional) Additional query parameters to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers#query
+
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
+ :raises: :exc:`TypeError` when expiration is not a valid type.
+ :raises: :exc:`AttributeError` if credentials is not an instance
+ of :class:`google.auth.credentials.Signing`.
+
+ :rtype: str
+ :returns: A signed URL you can use to access the resource
+ until expiration.
+ """
+ ensure_signed_credentials(credentials)
+ expiration_seconds = get_expiration_seconds_v4(expiration)
+
+ if _request_timestamp is None:
+ request_timestamp, datestamp = get_v4_now_dtstamps()
+ else:
+ request_timestamp = _request_timestamp
+ datestamp = _request_timestamp[:8]
+
+ client_email = credentials.signer_email
+ credential_scope = "{}/auto/storage/goog4_request".format(datestamp)
+ credential = "{}/{}".format(client_email, credential_scope)
+
+ if headers is None:
+ headers = {}
+
+ if content_type is not None:
+ headers["Content-Type"] = content_type
+
+ if content_md5 is not None:
+ headers["Content-MD5"] = content_md5
+
+ header_names = [key.lower() for key in headers]
+ if "host" not in header_names:
+ headers["Host"] = six.moves.urllib.parse.urlparse(api_access_endpoint).netloc
+
+ if method.upper() == "RESUMABLE":
+ method = "POST"
+ headers["x-goog-resumable"] = "start"
+
+ canonical_headers, ordered_headers = get_canonical_headers(headers)
+ canonical_header_string = (
+ "\n".join(canonical_headers) + "\n"
+ ) # Yes, Virginia, the extra newline is part of the spec.
+ signed_headers = ";".join([key for key, _ in ordered_headers])
+
+ if query_parameters is None:
+ query_parameters = {}
+ else:
+ query_parameters = {key: value or "" for key, value in query_parameters.items()}
+
+ query_parameters["X-Goog-Algorithm"] = "GOOG4-RSA-SHA256"
+ query_parameters["X-Goog-Credential"] = credential
+ query_parameters["X-Goog-Date"] = request_timestamp
+ query_parameters["X-Goog-Expires"] = expiration_seconds
+ query_parameters["X-Goog-SignedHeaders"] = signed_headers
+
+ if response_type is not None:
+ query_parameters["response-content-type"] = response_type
+
+ if response_disposition is not None:
+ query_parameters["response-content-disposition"] = response_disposition
+
+ if generation is not None:
+ query_parameters["generation"] = generation
+
+ canonical_query_string = _url_encode(query_parameters)
+
+ lowercased_headers = dict(ordered_headers)
+
+ if "x-goog-content-sha256" in lowercased_headers:
+ payload = lowercased_headers["x-goog-content-sha256"]
+ else:
+ payload = "UNSIGNED-PAYLOAD"
+
+ canonical_elements = [
+ method,
+ resource,
+ canonical_query_string,
+ canonical_header_string,
+ signed_headers,
+ payload,
+ ]
+ canonical_request = "\n".join(canonical_elements)
+
+ canonical_request_hash = hashlib.sha256(
+ canonical_request.encode("ascii")
+ ).hexdigest()
+
+ string_elements = [
+ "GOOG4-RSA-SHA256",
+ request_timestamp,
+ credential_scope,
+ canonical_request_hash,
+ ]
+ string_to_sign = "\n".join(string_elements)
+
+ if access_token and service_account_email:
+ signature = _sign_message(string_to_sign, access_token, service_account_email)
+ signature_bytes = base64.b64decode(signature)
+ signature = binascii.hexlify(signature_bytes).decode("ascii")
+ else:
+ signature_bytes = credentials.sign_bytes(string_to_sign.encode("ascii"))
+ signature = binascii.hexlify(signature_bytes).decode("ascii")
+
+ return "{}{}?{}&X-Goog-Signature={}".format(
+ api_access_endpoint, resource, canonical_query_string, signature
+ )
+
+
+def get_v4_now_dtstamps():
+ """Get current timestamp and datestamp in V4 valid format.
+
+ :rtype: str, str
+ :returns: Current timestamp, datestamp.
+ """
+ now = NOW()
+ timestamp = now.strftime("%Y%m%dT%H%M%SZ")
+ datestamp = now.date().strftime("%Y%m%d")
+ return timestamp, datestamp
+
+
+def _sign_message(message, access_token, service_account_email):
+
+ """Signs a message.
+
+ :type message: str
+ :param message: The message to be signed.
+
+ :type access_token: str
+ :param access_token: Access token for a service account.
+
+
+ :type service_account_email: str
+ :param service_account_email: E-mail address of the service account.
+
+ :raises: :exc:`TransportError` if an `access_token` is unauthorized.
+
+ :rtype: str
+ :returns: The signature of the message.
+
+ """
+ message = _helpers._to_bytes(message)
+
+ method = "POST"
+ url = "https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/{}:signBlob?alt=json".format(
+ service_account_email
+ )
+ headers = {
+ "Authorization": "Bearer " + access_token,
+ "Content-type": "application/json",
+ }
+ body = json.dumps({"payload": base64.b64encode(message).decode("utf-8")})
+
+ request = requests.Request()
+ response = request(url=url, method=method, body=body, headers=headers)
+
+ if response.status != six.moves.http_client.OK:
+ raise exceptions.TransportError(
+ "Error calling the IAM signBytes API: {}".format(response.data)
+ )
+
+ data = json.loads(response.data.decode("utf-8"))
+ return data["signedBlob"]
+
+
+def _url_encode(query_params):
+ """Encode query params into URL.
+
+ :type query_params: dict
+ :param query_params: Query params to be encoded.
+
+ :rtype: str
+ :returns: URL encoded query params.
+ """
+ params = [
+ "{}={}".format(_quote_param(name), _quote_param(value))
+ for name, value in query_params.items()
+ ]
+
+ return "&".join(sorted(params))
+
+
+def _quote_param(param):
+ """Quote query param.
+
+ :type param: Any
+ :param param: Query param to be encoded.
+
+ :rtype: str
+ :returns: URL encoded query param.
+ """
+ if not isinstance(param, bytes):
+ param = str(param)
+ return six.moves.urllib.parse.quote(param, safe="~")
diff --git a/venv/Lib/site-packages/google/cloud/storage/acl.py b/venv/Lib/site-packages/google/cloud/storage/acl.py
new file mode 100644
index 000000000..765590f94
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/acl.py
@@ -0,0 +1,657 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Manipulate access control lists that Cloud Storage provides.
+
+:class:`google.cloud.storage.bucket.Bucket` has a getting method that creates
+an ACL object under the hood, and you can interact with that using
+:func:`google.cloud.storage.bucket.Bucket.acl`:
+
+.. literalinclude:: snippets.py
+ :start-after: [START client_bucket_acl]
+ :end-before: [END client_bucket_acl]
+ :dedent: 4
+
+
+Adding and removing permissions can be done with the following methods
+(in increasing order of granularity):
+
+- :func:`ACL.all`
+ corresponds to access for all users.
+- :func:`ACL.all_authenticated` corresponds
+ to access for all users that are signed into a Google account.
+- :func:`ACL.domain` corresponds to access on a
+ per Google Apps domain (ie, ``example.com``).
+- :func:`ACL.group` corresponds to access on a
+ per group basis (either by ID or e-mail address).
+- :func:`ACL.user` corresponds to access on a
+ per user basis (either by ID or e-mail address).
+
+And you are able to ``grant`` and ``revoke`` the following roles:
+
+- **Reading**:
+ :func:`_ACLEntity.grant_read` and :func:`_ACLEntity.revoke_read`
+- **Writing**:
+ :func:`_ACLEntity.grant_write` and :func:`_ACLEntity.revoke_write`
+- **Owning**:
+ :func:`_ACLEntity.grant_owner` and :func:`_ACLEntity.revoke_owner`
+
+You can use any of these like any other factory method (these happen to
+be :class:`_ACLEntity` factories):
+
+.. literalinclude:: snippets.py
+ :start-after: [START acl_user_settings]
+ :end-before: [END acl_user_settings]
+ :dedent: 4
+
+After that, you can save any changes you make with the
+:func:`google.cloud.storage.acl.ACL.save` method:
+
+.. literalinclude:: snippets.py
+ :start-after: [START acl_save]
+ :end-before: [END acl_save]
+ :dedent: 4
+
+You can alternatively save any existing :class:`google.cloud.storage.acl.ACL`
+object (whether it was created by a factory method or not) from a
+:class:`google.cloud.storage.bucket.Bucket`:
+
+.. literalinclude:: snippets.py
+ :start-after: [START acl_save_bucket]
+ :end-before: [END acl_save_bucket]
+ :dedent: 4
+
+To get the list of ``entity`` and ``role`` for each unique pair, the
+:class:`ACL` class is iterable:
+
+.. literalinclude:: snippets.py
+ :start-after: [START acl_print]
+ :end-before: [END acl_print]
+ :dedent: 4
+
+This list of tuples can be used as the ``entity`` and ``role`` fields
+when sending metadata for ACLs to the API.
+"""
+
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
+class _ACLEntity(object):
+ """Class representing a set of roles for an entity.
+
+ This is a helper class that you likely won't ever construct
+ outside of using the factor methods on the :class:`ACL` object.
+
+ :type entity_type: str
+ :param entity_type: The type of entity (ie, 'group' or 'user').
+
+ :type identifier: str
+ :param identifier: (Optional) The ID or e-mail of the entity. For the special
+ entity types (like 'allUsers').
+ """
+
+ READER_ROLE = "READER"
+ WRITER_ROLE = "WRITER"
+ OWNER_ROLE = "OWNER"
+
+ def __init__(self, entity_type, identifier=None):
+ self.identifier = identifier
+ self.roles = set([])
+ self.type = entity_type
+
+ def __str__(self):
+ if not self.identifier:
+ return str(self.type)
+ else:
+ return "{acl.type}-{acl.identifier}".format(acl=self)
+
+ def __repr__(self):
+ return "".format(
+ acl=self, roles=", ".join(self.roles)
+ )
+
+ def get_roles(self):
+ """Get the list of roles permitted by this entity.
+
+ :rtype: list of strings
+ :returns: The list of roles associated with this entity.
+ """
+ return self.roles
+
+ def grant(self, role):
+ """Add a role to the entity.
+
+ :type role: str
+ :param role: The role to add to the entity.
+ """
+ self.roles.add(role)
+
+ def revoke(self, role):
+ """Remove a role from the entity.
+
+ :type role: str
+ :param role: The role to remove from the entity.
+ """
+ if role in self.roles:
+ self.roles.remove(role)
+
+ def grant_read(self):
+ """Grant read access to the current entity."""
+ self.grant(_ACLEntity.READER_ROLE)
+
+ def grant_write(self):
+ """Grant write access to the current entity."""
+ self.grant(_ACLEntity.WRITER_ROLE)
+
+ def grant_owner(self):
+ """Grant owner access to the current entity."""
+ self.grant(_ACLEntity.OWNER_ROLE)
+
+ def revoke_read(self):
+ """Revoke read access from the current entity."""
+ self.revoke(_ACLEntity.READER_ROLE)
+
+ def revoke_write(self):
+ """Revoke write access from the current entity."""
+ self.revoke(_ACLEntity.WRITER_ROLE)
+
+ def revoke_owner(self):
+ """Revoke owner access from the current entity."""
+ self.revoke(_ACLEntity.OWNER_ROLE)
+
+
+class ACL(object):
+ """Container class representing a list of access controls."""
+
+ _URL_PATH_ELEM = "acl"
+ _PREDEFINED_QUERY_PARAM = "predefinedAcl"
+
+ PREDEFINED_XML_ACLS = {
+ # XML API name -> JSON API name
+ "project-private": "projectPrivate",
+ "public-read": "publicRead",
+ "public-read-write": "publicReadWrite",
+ "authenticated-read": "authenticatedRead",
+ "bucket-owner-read": "bucketOwnerRead",
+ "bucket-owner-full-control": "bucketOwnerFullControl",
+ }
+
+ PREDEFINED_JSON_ACLS = frozenset(
+ [
+ "private",
+ "projectPrivate",
+ "publicRead",
+ "publicReadWrite",
+ "authenticatedRead",
+ "bucketOwnerRead",
+ "bucketOwnerFullControl",
+ ]
+ )
+ """See
+ https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+ """
+
+ loaded = False
+
+ # Subclasses must override to provide these attributes (typically,
+ # as properties).
+ reload_path = None
+ save_path = None
+ user_project = None
+
+ def __init__(self):
+ self.entities = {}
+
+ def _ensure_loaded(self, timeout=_DEFAULT_TIMEOUT):
+ """Load if not already loaded.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ if not self.loaded:
+ self.reload(timeout=timeout)
+
+ @classmethod
+ def validate_predefined(cls, predefined):
+ """Ensures predefined is in list of predefined json values
+
+ :type predefined: str
+ :param predefined: name of a predefined acl
+
+ :type predefined: str
+ :param predefined: validated JSON name of predefined acl
+
+ :raises: :exc: `ValueError`: If predefined is not a valid acl
+ """
+ predefined = cls.PREDEFINED_XML_ACLS.get(predefined, predefined)
+ if predefined and predefined not in cls.PREDEFINED_JSON_ACLS:
+ raise ValueError("Invalid predefined ACL: %s" % (predefined,))
+ return predefined
+
+ def reset(self):
+ """Remove all entities from the ACL, and clear the ``loaded`` flag."""
+ self.entities.clear()
+ self.loaded = False
+
+ def __iter__(self):
+ self._ensure_loaded()
+
+ for entity in self.entities.values():
+ for role in entity.get_roles():
+ if role:
+ yield {"entity": str(entity), "role": role}
+
+ def entity_from_dict(self, entity_dict):
+ """Build an _ACLEntity object from a dictionary of data.
+
+ An entity is a mutable object that represents a list of roles
+ belonging to either a user or group or the special types for all
+ users and all authenticated users.
+
+ :type entity_dict: dict
+ :param entity_dict: Dictionary full of data from an ACL lookup.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An Entity constructed from the dictionary.
+ """
+ entity = entity_dict["entity"]
+ role = entity_dict["role"]
+
+ if entity == "allUsers":
+ entity = self.all()
+
+ elif entity == "allAuthenticatedUsers":
+ entity = self.all_authenticated()
+
+ elif "-" in entity:
+ entity_type, identifier = entity.split("-", 1)
+ entity = self.entity(entity_type=entity_type, identifier=identifier)
+
+ if not isinstance(entity, _ACLEntity):
+ raise ValueError("Invalid dictionary: %s" % entity_dict)
+
+ entity.grant(role)
+ return entity
+
+ def has_entity(self, entity):
+ """Returns whether or not this ACL has any entries for an entity.
+
+ :type entity: :class:`_ACLEntity`
+ :param entity: The entity to check for existence in this ACL.
+
+ :rtype: bool
+ :returns: True of the entity exists in the ACL.
+ """
+ self._ensure_loaded()
+ return str(entity) in self.entities
+
+ def get_entity(self, entity, default=None):
+ """Gets an entity object from the ACL.
+
+ :type entity: :class:`_ACLEntity` or string
+ :param entity: The entity to get lookup in the ACL.
+
+ :type default: anything
+ :param default: This value will be returned if the entity
+ doesn't exist.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: The corresponding entity or the value provided
+ to ``default``.
+ """
+ self._ensure_loaded()
+ return self.entities.get(str(entity), default)
+
+ def add_entity(self, entity):
+ """Add an entity to the ACL.
+
+ :type entity: :class:`_ACLEntity`
+ :param entity: The entity to add to this ACL.
+ """
+ self._ensure_loaded()
+ self.entities[str(entity)] = entity
+
+ def entity(self, entity_type, identifier=None):
+ """Factory method for creating an Entity.
+
+ If an entity with the same type and identifier already exists,
+ this will return a reference to that entity. If not, it will
+ create a new one and add it to the list of known entities for
+ this ACL.
+
+ :type entity_type: str
+ :param entity_type: The type of entity to create
+ (ie, ``user``, ``group``, etc)
+
+ :type identifier: str
+ :param identifier: The ID of the entity (if applicable).
+ This can be either an ID or an e-mail address.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: A new Entity or a reference to an existing identical entity.
+ """
+ entity = _ACLEntity(entity_type=entity_type, identifier=identifier)
+ if self.has_entity(entity):
+ entity = self.get_entity(entity)
+ else:
+ self.add_entity(entity)
+ return entity
+
+ def user(self, identifier):
+ """Factory method for a user Entity.
+
+ :type identifier: str
+ :param identifier: An id or e-mail for this particular user.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An Entity corresponding to this user.
+ """
+ return self.entity("user", identifier=identifier)
+
+ def group(self, identifier):
+ """Factory method for a group Entity.
+
+ :type identifier: str
+ :param identifier: An id or e-mail for this particular group.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An Entity corresponding to this group.
+ """
+ return self.entity("group", identifier=identifier)
+
+ def domain(self, domain):
+ """Factory method for a domain Entity.
+
+ :type domain: str
+ :param domain: The domain for this entity.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An entity corresponding to this domain.
+ """
+ return self.entity("domain", identifier=domain)
+
+ def all(self):
+ """Factory method for an Entity representing all users.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An entity representing all users.
+ """
+ return self.entity("allUsers")
+
+ def all_authenticated(self):
+ """Factory method for an Entity representing all authenticated users.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An entity representing all authenticated users.
+ """
+ return self.entity("allAuthenticatedUsers")
+
+ def get_entities(self):
+ """Get a list of all Entity objects.
+
+ :rtype: list of :class:`_ACLEntity` objects
+ :returns: A list of all Entity objects.
+ """
+ self._ensure_loaded()
+ return list(self.entities.values())
+
+ @property
+ def client(self):
+ """Abstract getter for the object client."""
+ raise NotImplementedError
+
+ def _require_client(self, client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current ACL.
+
+ :rtype: :class:`google.cloud.storage.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ if client is None:
+ client = self.client
+ return client
+
+ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Reload the ACL data from Cloud Storage.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ path = self.reload_path
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ self.entities.clear()
+
+ found = client._connection.api_request(
+ method="GET", path=path, query_params=query_params, timeout=timeout
+ )
+ self.loaded = True
+ for entry in found.get("items", ()):
+ self.add_entity(self.entity_from_dict(entry))
+
+ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT):
+ """Helper for :meth:`save` and :meth:`save_predefined`.
+
+ :type acl: :class:`google.cloud.storage.acl.ACL`, or a compatible list.
+ :param acl: The ACL object to save. If left blank, this will save
+ current entries.
+
+ :type predefined: str
+ :param predefined: An identifier for a predefined ACL. Must be one of the
+ keys in :attr:`PREDEFINED_JSON_ACLS` If passed, `acl` must be None.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ query_params = {"projection": "full"}
+ if predefined is not None:
+ acl = []
+ query_params[self._PREDEFINED_QUERY_PARAM] = predefined
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = self.save_path
+ client = self._require_client(client)
+
+ result = client._connection.api_request(
+ method="PATCH",
+ path=path,
+ data={self._URL_PATH_ELEM: list(acl)},
+ query_params=query_params,
+ timeout=timeout,
+ )
+ self.entities.clear()
+ for entry in result.get(self._URL_PATH_ELEM, ()):
+ self.add_entity(self.entity_from_dict(entry))
+ self.loaded = True
+
+ def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Save this ACL for the current bucket.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type acl: :class:`google.cloud.storage.acl.ACL`, or a compatible list.
+ :param acl: The ACL object to save. If left blank, this will save
+ current entries.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ if acl is None:
+ acl = self
+ save_to_backend = acl.loaded
+ else:
+ save_to_backend = True
+
+ if save_to_backend:
+ self._save(acl, None, client, timeout=timeout)
+
+ def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Save this ACL for the current bucket using a predefined ACL.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type predefined: str
+ :param predefined: An identifier for a predefined ACL. Must be one
+ of the keys in :attr:`PREDEFINED_JSON_ACLS`
+ or :attr:`PREDEFINED_XML_ACLS` (which will be
+ aliased to the corresponding JSON name).
+ If passed, `acl` must be None.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ predefined = self.validate_predefined(predefined)
+ self._save(None, predefined, client, timeout=timeout)
+
+ def clear(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Remove all ACL entries.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ Note that this won't actually remove *ALL* the rules, but it
+ will remove all the non-default rules. In short, you'll still
+ have access to a bucket that you created even after you clear
+ ACL rules with this method.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ self.save([], client=client, timeout=timeout)
+
+
+class BucketACL(ACL):
+ """An ACL specifically for a bucket.
+
+ :type bucket: :class:`google.cloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this ACL relates.
+ """
+
+ def __init__(self, bucket):
+ super(BucketACL, self).__init__()
+ self.bucket = bucket
+
+ @property
+ def client(self):
+ """The client bound to this ACL's bucket."""
+ return self.bucket.client
+
+ @property
+ def reload_path(self):
+ """Compute the path for GET API requests for this ACL."""
+ return "%s/%s" % (self.bucket.path, self._URL_PATH_ELEM)
+
+ @property
+ def save_path(self):
+ """Compute the path for PATCH API requests for this ACL."""
+ return self.bucket.path
+
+ @property
+ def user_project(self):
+ """Compute the user project charged for API requests for this ACL."""
+ return self.bucket.user_project
+
+
+class DefaultObjectACL(BucketACL):
+ """A class representing the default object ACL for a bucket."""
+
+ _URL_PATH_ELEM = "defaultObjectAcl"
+ _PREDEFINED_QUERY_PARAM = "predefinedDefaultObjectAcl"
+
+
+class ObjectACL(ACL):
+ """An ACL specifically for a Cloud Storage object / blob.
+
+ :type blob: :class:`google.cloud.storage.blob.Blob`
+ :param blob: The blob that this ACL corresponds to.
+ """
+
+ def __init__(self, blob):
+ super(ObjectACL, self).__init__()
+ self.blob = blob
+
+ @property
+ def client(self):
+ """The client bound to this ACL's blob."""
+ return self.blob.client
+
+ @property
+ def reload_path(self):
+ """Compute the path for GET API requests for this ACL."""
+ return "%s/acl" % self.blob.path
+
+ @property
+ def save_path(self):
+ """Compute the path for PATCH API requests for this ACL."""
+ return self.blob.path
+
+ @property
+ def user_project(self):
+ """Compute the user project charged for API requests for this ACL."""
+ return self.blob.user_project
diff --git a/venv/Lib/site-packages/google/cloud/storage/batch.py b/venv/Lib/site-packages/google/cloud/storage/batch.py
new file mode 100644
index 000000000..abfc88412
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/batch.py
@@ -0,0 +1,348 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Batch updates / deletes of storage buckets / blobs.
+
+See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch
+"""
+from email.encoders import encode_noop
+from email.generator import Generator
+from email.mime.application import MIMEApplication
+from email.mime.multipart import MIMEMultipart
+from email.parser import Parser
+import io
+import json
+
+import requests
+import six
+
+from google.cloud import _helpers
+from google.cloud import exceptions
+from google.cloud.storage._http import Connection
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
+class MIMEApplicationHTTP(MIMEApplication):
+ """MIME type for ``application/http``.
+
+ Constructs payload from headers and body
+
+ :type method: str
+ :param method: HTTP method
+
+ :type uri: str
+ :param uri: URI for HTTP request
+
+ :type headers: dict
+ :param headers: HTTP headers
+
+ :type body: str
+ :param body: (Optional) HTTP payload
+
+ """
+
+ def __init__(self, method, uri, headers, body):
+ if isinstance(body, dict):
+ body = json.dumps(body)
+ headers["Content-Type"] = "application/json"
+ headers["Content-Length"] = len(body)
+ if body is None:
+ body = ""
+ lines = ["%s %s HTTP/1.1" % (method, uri)]
+ lines.extend(
+ ["%s: %s" % (key, value) for key, value in sorted(headers.items())]
+ )
+ lines.append("")
+ lines.append(body)
+ payload = "\r\n".join(lines)
+ if six.PY2:
+ # email.message.Message is an old-style class, so we
+ # cannot use 'super()'.
+ MIMEApplication.__init__(self, payload, "http", encode_noop)
+ else: # pragma: NO COVER Python3
+ super_init = super(MIMEApplicationHTTP, self).__init__
+ super_init(payload, "http", encode_noop)
+
+
+class _FutureDict(object):
+ """Class to hold a future value for a deferred request.
+
+ Used by for requests that get sent in a :class:`Batch`.
+ """
+
+ @staticmethod
+ def get(key, default=None):
+ """Stand-in for dict.get.
+
+ :type key: object
+ :param key: Hashable dictionary key.
+
+ :type default: object
+ :param default: Fallback value to dict.get.
+
+ :raises: :class:`KeyError` always since the future is intended to fail
+ as a dictionary.
+ """
+ raise KeyError("Cannot get(%r, default=%r) on a future" % (key, default))
+
+ def __getitem__(self, key):
+ """Stand-in for dict[key].
+
+ :type key: object
+ :param key: Hashable dictionary key.
+
+ :raises: :class:`KeyError` always since the future is intended to fail
+ as a dictionary.
+ """
+ raise KeyError("Cannot get item %r from a future" % (key,))
+
+ def __setitem__(self, key, value):
+ """Stand-in for dict[key] = value.
+
+ :type key: object
+ :param key: Hashable dictionary key.
+
+ :type value: object
+ :param value: Dictionary value.
+
+ :raises: :class:`KeyError` always since the future is intended to fail
+ as a dictionary.
+ """
+ raise KeyError("Cannot set %r -> %r on a future" % (key, value))
+
+
+class _FutureResponse(requests.Response):
+ """Reponse that returns a placeholder dictionary for a batched requests."""
+
+ def __init__(self, future_dict):
+ super(_FutureResponse, self).__init__()
+ self._future_dict = future_dict
+ self.status_code = 204
+
+ def json(self):
+ return self._future_dict
+
+ @property
+ def content(self):
+ return self._future_dict
+
+
+class Batch(Connection):
+ """Proxy an underlying connection, batching up change operations.
+
+ :type client: :class:`google.cloud.storage.client.Client`
+ :param client: The client to use for making connections.
+ """
+
+ _MAX_BATCH_SIZE = 1000
+
+ def __init__(self, client):
+ super(Batch, self).__init__(client)
+ self._requests = []
+ self._target_objects = []
+
+ def _do_request(
+ self, method, url, headers, data, target_object, timeout=_DEFAULT_TIMEOUT
+ ):
+ """Override Connection: defer actual HTTP request.
+
+ Only allow up to ``_MAX_BATCH_SIZE`` requests to be deferred.
+
+ :type method: str
+ :param method: The HTTP method to use in the request.
+
+ :type url: str
+ :param url: The URL to send the request to.
+
+ :type headers: dict
+ :param headers: A dictionary of HTTP headers to send with the request.
+
+ :type data: str
+ :param data: The data to send as the body of the request.
+
+ :type target_object: object
+ :param target_object:
+ (Optional) This allows us to enable custom behavior in our batch
+ connection. Here we defer an HTTP request and complete
+ initialization of the object at a later time.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: tuple of ``response`` (a dictionary of sorts)
+ and ``content`` (a string).
+ :returns: The HTTP response object and the content of the response.
+ """
+ if len(self._requests) >= self._MAX_BATCH_SIZE:
+ raise ValueError(
+ "Too many deferred requests (max %d)" % self._MAX_BATCH_SIZE
+ )
+ self._requests.append((method, url, headers, data, timeout))
+ result = _FutureDict()
+ self._target_objects.append(target_object)
+ if target_object is not None:
+ target_object._properties = result
+ return _FutureResponse(result)
+
+ def _prepare_batch_request(self):
+ """Prepares headers and body for a batch request.
+
+ :rtype: tuple (dict, str)
+ :returns: The pair of headers and body of the batch request to be sent.
+ :raises: :class:`ValueError` if no requests have been deferred.
+ """
+ if len(self._requests) == 0:
+ raise ValueError("No deferred requests")
+
+ multi = MIMEMultipart()
+
+ # Use timeout of last request, default to _DEFAULT_TIMEOUT
+ timeout = _DEFAULT_TIMEOUT
+ for method, uri, headers, body, _timeout in self._requests:
+ subrequest = MIMEApplicationHTTP(method, uri, headers, body)
+ multi.attach(subrequest)
+ timeout = _timeout
+
+ # The `email` package expects to deal with "native" strings
+ if six.PY2: # pragma: NO COVER Python3
+ buf = io.BytesIO()
+ else:
+ buf = io.StringIO()
+ generator = Generator(buf, False, 0)
+ generator.flatten(multi)
+ payload = buf.getvalue()
+
+ # Strip off redundant header text
+ _, body = payload.split("\n\n", 1)
+ return dict(multi._headers), body, timeout
+
+ def _finish_futures(self, responses):
+ """Apply all the batch responses to the futures created.
+
+ :type responses: list of (headers, payload) tuples.
+ :param responses: List of headers and payloads from each response in
+ the batch.
+
+ :raises: :class:`ValueError` if no requests have been deferred.
+ """
+ # If a bad status occurs, we track it, but don't raise an exception
+ # until all futures have been populated.
+ exception_args = None
+
+ if len(self._target_objects) != len(responses): # pragma: NO COVER
+ raise ValueError("Expected a response for every request.")
+
+ for target_object, subresponse in zip(self._target_objects, responses):
+ if not 200 <= subresponse.status_code < 300:
+ exception_args = exception_args or subresponse
+ elif target_object is not None:
+ try:
+ target_object._properties = subresponse.json()
+ except ValueError:
+ target_object._properties = subresponse.content
+
+ if exception_args is not None:
+ raise exceptions.from_http_response(exception_args)
+
+ def finish(self):
+ """Submit a single `multipart/mixed` request with deferred requests.
+
+ :rtype: list of tuples
+ :returns: one ``(headers, payload)`` tuple per deferred request.
+ """
+ headers, body, timeout = self._prepare_batch_request()
+
+ url = "%s/batch/storage/v1" % self.API_BASE_URL
+
+ # Use the private ``_base_connection`` rather than the property
+ # ``_connection``, since the property may be this
+ # current batch.
+ response = self._client._base_connection._make_request(
+ "POST", url, data=body, headers=headers, timeout=timeout
+ )
+ responses = list(_unpack_batch_response(response))
+ self._finish_futures(responses)
+ return responses
+
+ def current(self):
+ """Return the topmost batch, or None."""
+ return self._client.current_batch
+
+ def __enter__(self):
+ self._client._push_batch(self)
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ try:
+ if exc_type is None:
+ self.finish()
+ finally:
+ self._client._pop_batch()
+
+
+def _generate_faux_mime_message(parser, response):
+ """Convert response, content -> (multipart) email.message.
+
+ Helper for _unpack_batch_response.
+ """
+ # We coerce to bytes to get consistent concat across
+ # Py2 and Py3. Percent formatting is insufficient since
+ # it includes the b in Py3.
+ content_type = _helpers._to_bytes(response.headers.get("content-type", ""))
+
+ faux_message = b"".join(
+ [b"Content-Type: ", content_type, b"\nMIME-Version: 1.0\n\n", response.content]
+ )
+
+ if six.PY2:
+ return parser.parsestr(faux_message)
+ else: # pragma: NO COVER Python3
+ return parser.parsestr(faux_message.decode("utf-8"))
+
+
+def _unpack_batch_response(response):
+ """Convert requests.Response -> [(headers, payload)].
+
+ Creates a generator of tuples of emulating the responses to
+ :meth:`requests.Session.request`.
+
+ :type response: :class:`requests.Response`
+ :param response: HTTP response / headers from a request.
+ """
+ parser = Parser()
+ message = _generate_faux_mime_message(parser, response)
+
+ if not isinstance(message._payload, list): # pragma: NO COVER
+ raise ValueError("Bad response: not multi-part")
+
+ for subrequest in message._payload:
+ status_line, rest = subrequest._payload.split("\n", 1)
+ _, status, _ = status_line.split(" ", 2)
+ sub_message = parser.parsestr(rest)
+ payload = sub_message._payload
+ msg_headers = dict(sub_message._headers)
+ content_id = msg_headers.get("Content-ID")
+
+ subresponse = requests.Response()
+ subresponse.request = requests.Request(
+ method="BATCH", url="contentid://{}".format(content_id)
+ ).prepare()
+ subresponse.status_code = int(status)
+ subresponse.headers.update(msg_headers)
+ subresponse._content = payload.encode("utf-8")
+
+ yield subresponse
diff --git a/venv/Lib/site-packages/google/cloud/storage/blob.py b/venv/Lib/site-packages/google/cloud/storage/blob.py
new file mode 100644
index 000000000..b1e13788d
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/blob.py
@@ -0,0 +1,3680 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-lines
+
+"""Create / interact with Google Cloud Storage blobs.
+
+.. _API reference docs: https://cloud.google.com/storage/docs/\
+ json_api/v1/objects
+.. _customer-supplied: https://cloud.google.com/storage/docs/\
+ encryption#customer-supplied
+.. _google-resumable-media: https://googleapis.github.io/\
+ google-resumable-media-python/latest/\
+ google.resumable_media.requests.html
+"""
+
+import base64
+import copy
+import hashlib
+from io import BytesIO
+import logging
+import mimetypes
+import os
+import re
+import warnings
+import six
+
+from six.moves.urllib.parse import parse_qsl
+from six.moves.urllib.parse import quote
+from six.moves.urllib.parse import urlencode
+from six.moves.urllib.parse import urlsplit
+from six.moves.urllib.parse import urlunsplit
+
+from google import resumable_media
+from google.resumable_media.requests import ChunkedDownload
+from google.resumable_media.requests import Download
+from google.resumable_media.requests import RawDownload
+from google.resumable_media.requests import RawChunkedDownload
+from google.resumable_media.requests import MultipartUpload
+from google.resumable_media.requests import ResumableUpload
+
+from google.api_core.iam import Policy
+from google.cloud import exceptions
+from google.cloud._helpers import _bytes_to_unicode
+from google.cloud._helpers import _datetime_to_rfc3339
+from google.cloud._helpers import _rfc3339_to_datetime
+from google.cloud._helpers import _to_bytes
+from google.cloud.exceptions import NotFound
+from google.cloud.storage._helpers import _add_generation_match_parameters
+from google.cloud.storage._helpers import _PropertyMixin
+from google.cloud.storage._helpers import _scalar_property
+from google.cloud.storage._helpers import _bucket_bound_hostname_url
+from google.cloud.storage._helpers import _convert_to_timestamp
+from google.cloud.storage._helpers import _raise_if_more_than_one_set
+from google.cloud.storage._signing import generate_signed_url_v2
+from google.cloud.storage._signing import generate_signed_url_v4
+from google.cloud.storage.acl import ACL
+from google.cloud.storage.acl import ObjectACL
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+from google.cloud.storage.constants import ARCHIVE_STORAGE_CLASS
+from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS
+from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS
+from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS
+from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS
+from google.cloud.storage.constants import STANDARD_STORAGE_CLASS
+
+
+_API_ACCESS_ENDPOINT = "https://storage.googleapis.com"
+_DEFAULT_CONTENT_TYPE = u"application/octet-stream"
+_DOWNLOAD_URL_TEMPLATE = u"{hostname}/download/storage/v1{path}?alt=media"
+_BASE_UPLOAD_TEMPLATE = u"{hostname}/upload/storage/v1{bucket_path}/o?uploadType="
+_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart"
+_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable"
+# NOTE: "acl" is also writeable but we defer ACL management to
+# the classes in the google.cloud.storage.acl module.
+_CONTENT_TYPE_FIELD = "contentType"
+_WRITABLE_FIELDS = (
+ "cacheControl",
+ "contentDisposition",
+ "contentEncoding",
+ "contentLanguage",
+ _CONTENT_TYPE_FIELD,
+ "crc32c",
+ "md5Hash",
+ "metadata",
+ "name",
+ "storageClass",
+)
+_NUM_RETRIES_MESSAGE = (
+ "`num_retries` has been deprecated and will be removed in a future "
+ "release. The default behavior (when `num_retries` is not specified) when "
+ "a transient error (e.g. 429 Too Many Requests or 500 Internal Server "
+ "Error) occurs will be as follows: upload requests will be automatically "
+ "retried. Subsequent retries will be sent after waiting 1, 2, 4, 8, etc. "
+ "seconds (exponential backoff) until 10 minutes of wait time have "
+ "elapsed. At that point, there will be no more attempts to retry."
+)
+_READ_LESS_THAN_SIZE = (
+ "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining."
+)
+_CHUNKED_DOWNLOAD_CHECKSUM_MESSAGE = (
+ "A checksum of type `{}` was requested, but checksumming is not available "
+ "for downloads when chunk_size is set."
+)
+
+
+_DEFAULT_CHUNKSIZE = 104857600 # 1024 * 1024 B * 100 = 100 MB
+_MAX_MULTIPART_SIZE = 8388608 # 8 MB
+
+
+class Blob(_PropertyMixin):
+ """A wrapper around Cloud Storage's concept of an ``Object``.
+
+ :type name: str
+ :param name: The name of the blob. This corresponds to the unique path of
+ the object in the bucket. If bytes, will be converted to a
+ unicode object. Blob / object names can contain any sequence
+ of valid unicode characters, of length 1-1024 bytes when
+ UTF-8 encoded.
+
+ :type bucket: :class:`google.cloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this blob belongs.
+
+ :type chunk_size: int
+ :param chunk_size:
+ (Optional) The size of a chunk of data whenever iterating (in bytes).
+ This must be a multiple of 256 KB per the API specification.
+
+ :type encryption_key: bytes
+ :param encryption_key:
+ (Optional) 32 byte encryption key for customer-supplied encryption.
+ See https://cloud.google.com/storage/docs/encryption#customer-supplied.
+
+ :type kms_key_name: str
+ :param kms_key_name:
+ (Optional) Resource name of Cloud KMS key used to encrypt the blob's
+ contents.
+
+ :type generation: long
+ :param generation: (Optional) If present, selects a specific revision of
+ this object.
+ """
+
+ _chunk_size = None # Default value for each instance.
+ _CHUNK_SIZE_MULTIPLE = 256 * 1024
+ """Number (256 KB, in bytes) that must divide the chunk size."""
+
+ STORAGE_CLASSES = (
+ STANDARD_STORAGE_CLASS,
+ NEARLINE_STORAGE_CLASS,
+ COLDLINE_STORAGE_CLASS,
+ ARCHIVE_STORAGE_CLASS,
+ MULTI_REGIONAL_LEGACY_STORAGE_CLASS,
+ REGIONAL_LEGACY_STORAGE_CLASS,
+ )
+ """Allowed values for :attr:`storage_class`.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/objects#storageClass
+ https://cloud.google.com/storage/docs/per-object-storage-class
+
+ .. note::
+ This list does not include 'DURABLE_REDUCED_AVAILABILITY', which
+ is only documented for buckets (and deprecated).
+ """
+
+ def __init__(
+ self,
+ name,
+ bucket,
+ chunk_size=None,
+ encryption_key=None,
+ kms_key_name=None,
+ generation=None,
+ ):
+ """
+ property :attr:`name`
+ Get the blob's name.
+ """
+ name = _bytes_to_unicode(name)
+ super(Blob, self).__init__(name=name)
+
+ self.chunk_size = chunk_size # Check that setter accepts value.
+ self._bucket = bucket
+ self._acl = ObjectACL(self)
+ _raise_if_more_than_one_set(
+ encryption_key=encryption_key, kms_key_name=kms_key_name
+ )
+
+ self._encryption_key = encryption_key
+
+ if kms_key_name is not None:
+ self._properties["kmsKeyName"] = kms_key_name
+
+ if generation is not None:
+ self._properties["generation"] = generation
+
+ @property
+ def bucket(self):
+ """Bucket which contains the object.
+
+ :rtype: :class:`~google.cloud.storage.bucket.Bucket`
+ :returns: The object's bucket.
+ """
+ return self._bucket
+
+ @property
+ def chunk_size(self):
+ """Get the blob's default chunk size.
+
+ :rtype: int or ``NoneType``
+ :returns: The current blob's chunk size, if it is set.
+ """
+ return self._chunk_size
+
+ @chunk_size.setter
+ def chunk_size(self, value):
+ """Set the blob's default chunk size.
+
+ :type value: int
+ :param value: (Optional) The current blob's chunk size, if it is set.
+
+ :raises: :class:`ValueError` if ``value`` is not ``None`` and is not a
+ multiple of 256 KB.
+ """
+ if value is not None and value > 0 and value % self._CHUNK_SIZE_MULTIPLE != 0:
+ raise ValueError(
+ "Chunk size must be a multiple of %d." % (self._CHUNK_SIZE_MULTIPLE,)
+ )
+ self._chunk_size = value
+
+ @staticmethod
+ def path_helper(bucket_path, blob_name):
+ """Relative URL path for a blob.
+
+ :type bucket_path: str
+ :param bucket_path: The URL path for a bucket.
+
+ :type blob_name: str
+ :param blob_name: The name of the blob.
+
+ :rtype: str
+ :returns: The relative URL path for ``blob_name``.
+ """
+ return bucket_path + "/o/" + _quote(blob_name)
+
+ @property
+ def acl(self):
+ """Create our ACL on demand."""
+ return self._acl
+
+ def __repr__(self):
+ if self.bucket:
+ bucket_name = self.bucket.name
+ else:
+ bucket_name = None
+
+ return "" % (bucket_name, self.name, self.generation)
+
+ @property
+ def path(self):
+ """Getter property for the URL path to this Blob.
+
+ :rtype: str
+ :returns: The URL path to this Blob.
+ """
+ if not self.name:
+ raise ValueError("Cannot determine path without a blob name.")
+
+ return self.path_helper(self.bucket.path, self.name)
+
+ @property
+ def client(self):
+ """The client bound to this blob."""
+ return self.bucket.client
+
+ @property
+ def user_project(self):
+ """Project ID billed for API requests made via this blob.
+
+ Derived from bucket's value.
+
+ :rtype: str
+ """
+ return self.bucket.user_project
+
+ def _encryption_headers(self):
+ """Return any encryption headers needed to fetch the object.
+
+ :rtype: List(Tuple(str, str))
+ :returns: a list of tuples to be passed as headers.
+ """
+ return _get_encryption_headers(self._encryption_key)
+
+ @property
+ def _query_params(self):
+ """Default query parameters."""
+ params = {}
+ if self.generation is not None:
+ params["generation"] = self.generation
+ if self.user_project is not None:
+ params["userProject"] = self.user_project
+ return params
+
+ @property
+ def public_url(self):
+ """The public URL for this blob.
+
+ Use :meth:`make_public` to enable anonymous access via the returned
+ URL.
+
+ :rtype: `string`
+ :returns: The public URL for this blob.
+ """
+ return "{storage_base_url}/{bucket_name}/{quoted_name}".format(
+ storage_base_url=_API_ACCESS_ENDPOINT,
+ bucket_name=self.bucket.name,
+ quoted_name=_quote(self.name, safe=b"/~"),
+ )
+
+ @classmethod
+ def from_string(cls, uri, client=None):
+ """Get a constructor for blob object by URI.
+
+ :type uri: str
+ :param uri: The blob uri pass to get blob object.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use.
+
+ :rtype: :class:`google.cloud.storage.blob.Blob`
+ :returns: The blob object created.
+
+ Example:
+ Get a constructor for blob object by URI..
+
+ >>> from google.cloud import storage
+ >>> from google.cloud.storage.blob import Blob
+ >>> client = storage.Client()
+ >>> blob = Blob.from_string("gs://bucket/object")
+ """
+ from google.cloud.storage.bucket import Bucket
+
+ scheme, netloc, path, query, frag = urlsplit(uri)
+ if scheme != "gs":
+ raise ValueError("URI scheme must be gs")
+
+ bucket = Bucket(client, name=netloc)
+ return cls(path[1:], bucket)
+
+ def generate_signed_url(
+ self,
+ expiration=None,
+ api_access_endpoint=_API_ACCESS_ENDPOINT,
+ method="GET",
+ content_md5=None,
+ content_type=None,
+ response_disposition=None,
+ response_type=None,
+ generation=None,
+ headers=None,
+ query_parameters=None,
+ client=None,
+ credentials=None,
+ version=None,
+ service_account_email=None,
+ access_token=None,
+ virtual_hosted_style=False,
+ bucket_bound_hostname=None,
+ scheme="http",
+ ):
+ """Generates a signed URL for this blob.
+
+ .. note::
+
+ If you are on Google Compute Engine, you can't generate a signed
+ URL using GCE service account. Follow `Issue 50`_ for updates on
+ this. If you'd like to be able to generate a signed URL from GCE,
+ you can use a standard service account from a JSON file rather
+ than a GCE service account.
+
+ .. _Issue 50: https://github.com/GoogleCloudPlatform/\
+ google-auth-library-python/issues/50
+
+ If you have a blob that you want to allow access to for a set
+ amount of time, you can use this method to generate a URL that
+ is only valid within a certain time period.
+
+ If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`,
+ ``https`` works only if using a ``CDN``.
+
+ Example:
+ Generates a signed URL for this blob using bucket_bound_hostname and scheme.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket('my-bucket-name')
+ >>> blob = bucket.get_blob('my-blob-name')
+ >>> url = blob.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld',
+ >>> version='v4')
+ >>> url = blob.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld',
+ >>> version='v4',scheme='https') # If using ``CDN``
+
+ This is particularly useful if you don't want publicly
+ accessible blobs, but don't want to require users to explicitly
+ log in.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Point in time when the signed URL should expire. If
+ a ``datetime`` instance is passed without an explicit
+ ``tzinfo`` set, it will be assumed to be ``UTC``.
+
+ :type api_access_endpoint: str
+ :param api_access_endpoint: (Optional) URI base.
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+
+ :type content_md5: str
+ :param content_md5: (Optional) The MD5 hash of the object referenced by
+ ``resource``.
+
+ :type content_type: str
+ :param content_type: (Optional) The content type of the object
+ referenced by ``resource``.
+
+ :type response_disposition: str
+ :param response_disposition: (Optional) Content disposition of
+ responses to requests for the signed URL.
+ For example, to enable the signed URL
+ to initiate a file of ``blog.png``, use
+ the value
+ ``'attachment; filename=blob.png'``.
+
+ :type response_type: str
+ :param response_type: (Optional) Content type of responses to requests
+ for the signed URL. Ignored if content_type is
+ set on object/blob metadata.
+
+ :type generation: str
+ :param generation: (Optional) A value that indicates which generation
+ of the resource to fetch.
+
+ :type headers: dict
+ :param headers:
+ (Optional) Additional HTTP headers to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers
+ Requests using the signed URL *must* pass the specified header
+ (name and value) with each request for the URL.
+
+ :type query_parameters: dict
+ :param query_parameters:
+ (Optional) Additional query parameters to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers#query
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type credentials: :class:`google.auth.credentials.Credentials` or
+ :class:`NoneType`
+ :param credentials: The authorization credentials to attach to requests.
+ These credentials identify this application to the service.
+ If none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+
+ :type version: str
+ :param version: (Optional) The version of signed credential to create.
+ Must be one of 'v2' | 'v4'.
+
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
+ :type virtual_hosted_style: bool
+ :param virtual_hosted_style:
+ (Optional) If true, then construct the URL relative the bucket's
+ virtual hostname, e.g., '.storage.googleapis.com'.
+
+ :type bucket_bound_hostname: str
+ :param bucket_bound_hostname:
+ (Optional) If passed, then construct the URL relative to the bucket-bound hostname.
+ Value can be a bare or with scheme, e.g., 'example.com' or 'http://example.com'.
+ See: https://cloud.google.com/storage/docs/request-endpoints#cname
+
+ :type scheme: str
+ :param scheme:
+ (Optional) If ``bucket_bound_hostname`` is passed as a bare hostname, use
+ this value as the scheme. ``https`` will work only when using a CDN.
+ Defaults to ``"http"``.
+
+ :raises: :exc:`ValueError` when version is invalid.
+ :raises: :exc:`TypeError` when expiration is not a valid type.
+ :raises: :exc:`AttributeError` if credentials is not an instance
+ of :class:`google.auth.credentials.Signing`.
+
+ :rtype: str
+ :returns: A signed URL you can use to access the resource
+ until expiration.
+ """
+ if version is None:
+ version = "v2"
+ elif version not in ("v2", "v4"):
+ raise ValueError("'version' must be either 'v2' or 'v4'")
+
+ quoted_name = _quote(self.name, safe=b"/~")
+
+ if virtual_hosted_style:
+ api_access_endpoint = "https://{bucket_name}.storage.googleapis.com".format(
+ bucket_name=self.bucket.name
+ )
+ elif bucket_bound_hostname:
+ api_access_endpoint = _bucket_bound_hostname_url(
+ bucket_bound_hostname, scheme
+ )
+ else:
+ resource = "/{bucket_name}/{quoted_name}".format(
+ bucket_name=self.bucket.name, quoted_name=quoted_name
+ )
+
+ if virtual_hosted_style or bucket_bound_hostname:
+ resource = "/{quoted_name}".format(quoted_name=quoted_name)
+
+ if credentials is None:
+ client = self._require_client(client)
+ credentials = client._credentials
+
+ if version == "v2":
+ helper = generate_signed_url_v2
+ else:
+ helper = generate_signed_url_v4
+
+ if self._encryption_key is not None:
+ encryption_headers = _get_encryption_headers(self._encryption_key)
+ if headers is None:
+ headers = {}
+ if version == "v2":
+ # See: https://cloud.google.com/storage/docs/access-control/signed-urls-v2#about-canonical-extension-headers
+ v2_copy_only = "X-Goog-Encryption-Algorithm"
+ headers[v2_copy_only] = encryption_headers[v2_copy_only]
+ else:
+ headers.update(encryption_headers)
+
+ return helper(
+ credentials,
+ resource=resource,
+ expiration=expiration,
+ api_access_endpoint=api_access_endpoint,
+ method=method.upper(),
+ content_md5=content_md5,
+ content_type=content_type,
+ response_type=response_type,
+ response_disposition=response_disposition,
+ generation=generation,
+ headers=headers,
+ query_parameters=query_parameters,
+ service_account_email=service_account_email,
+ access_token=access_token,
+ )
+
+ def exists(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Determines whether or not this blob exists.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :rtype: bool
+ :returns: True if the blob exists in Cloud Storage.
+ """
+ client = self._require_client(client)
+ # We only need the status code (200 or not) so we seek to
+ # minimize the returned payload.
+ query_params = self._query_params
+ query_params["fields"] = "name"
+
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ try:
+ # We intentionally pass `_target_object=None` since fields=name
+ # would limit the local properties.
+ client._connection.api_request(
+ method="GET",
+ path=self.path,
+ query_params=query_params,
+ _target_object=None,
+ timeout=timeout,
+ )
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ return True
+ except NotFound:
+ return False
+
+ def delete(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Deletes a blob from Cloud Storage.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :raises: :class:`google.cloud.exceptions.NotFound`
+ (propagated from
+ :meth:`google.cloud.storage.bucket.Bucket.delete_blob`).
+ """
+ self.bucket.delete_blob(
+ self.name,
+ client=client,
+ generation=self.generation,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+
+ def _get_transport(self, client):
+ """Return the client's transport.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :rtype transport:
+ :class:`~google.auth.transport.requests.AuthorizedSession`
+ :returns: The transport (with credentials) that will
+ make authenticated requests.
+ """
+ client = self._require_client(client)
+ return client._http
+
+ def _get_download_url(
+ self,
+ client,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Get the download URL for the current blob.
+
+ If the ``media_link`` has been loaded, it will be used, otherwise
+ the URL will be constructed from the current blob's path (and possibly
+ generation) to avoid a round trip.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: The client to use.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :rtype: str
+ :returns: The download URL for the current blob.
+ """
+ name_value_pairs = []
+ if self.media_link is None:
+ base_url = _DOWNLOAD_URL_TEMPLATE.format(
+ hostname=client._connection.API_BASE_URL, path=self.path
+ )
+ if self.generation is not None:
+ name_value_pairs.append(("generation", "{:d}".format(self.generation)))
+ else:
+ base_url = self.media_link
+
+ if self.user_project is not None:
+ name_value_pairs.append(("userProject", self.user_project))
+
+ _add_generation_match_parameters(
+ name_value_pairs,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ return _add_query_parameters(base_url, name_value_pairs)
+
+ def _extract_headers_from_download(self, response):
+ """Extract headers from a non-chunked request's http object.
+
+ This avoids the need to make a second request for commonly used
+ headers.
+
+ :type response:
+ :class requests.models.Response
+ :param response: The server response from downloading a non-chunked file
+ """
+ self.content_encoding = response.headers.get("Content-Encoding", None)
+ self.content_type = response.headers.get("Content-Type", None)
+ self.cache_control = response.headers.get("Cache-Control", None)
+ self.storage_class = response.headers.get("X-Goog-Storage-Class", None)
+ self.content_language = response.headers.get("Content-Language", None)
+ # 'X-Goog-Hash': 'crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==',
+ x_goog_hash = response.headers.get("X-Goog-Hash", "")
+
+ if x_goog_hash:
+ digests = {}
+ for encoded_digest in x_goog_hash.split(","):
+ match = re.match(r"(crc32c|md5)=([\w\d/\+/]+={0,3})", encoded_digest)
+ if match:
+ method, digest = match.groups()
+ digests[method] = digest
+
+ self.crc32c = digests.get("crc32c", None)
+ self.md5_hash = digests.get("md5", None)
+
+ def _do_download(
+ self,
+ transport,
+ file_obj,
+ download_url,
+ headers,
+ start=None,
+ end=None,
+ raw_download=False,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum="md5",
+ ):
+ """Perform a download without any error handling.
+
+ This is intended to be called by :meth:`download_to_file` so it can
+ be wrapped with error handling / remapping.
+
+ :type transport:
+ :class:`~google.auth.transport.requests.AuthorizedSession`
+ :param transport: The transport (with credentials) that will
+ make authenticated requests.
+
+ :type file_obj: file
+ :param file_obj: A file handle to which to write the blob's data.
+
+ :type download_url: str
+ :param download_url: The URL where the media can be accessed.
+
+ :type headers: dict
+ :param headers: Headers to be sent with the request(s).
+
+ :type start: int
+ :param start: (Optional) The first byte in a range to be downloaded.
+
+ :type end: int
+ :param end: (Optional) The last byte in a range to be downloaded.
+
+ :type raw_download: bool
+ :param raw_download:
+ (Optional) If true, download the object without any expansion.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify the integrity
+ of the object. The response headers must contain a checksum of the
+ requested type. If the headers lack an appropriate checksum (for
+ instance in the case of transcoded or ranged downloads where the
+ remote service does not know the correct checksum, including
+ downloads where chunk_size is set) an INFO-level log will be
+ emitted. Supported values are "md5", "crc32c" and None. The default
+ is "md5".
+ """
+ if self.chunk_size is None:
+ if raw_download:
+ klass = RawDownload
+ else:
+ klass = Download
+
+ download = klass(
+ download_url,
+ stream=file_obj,
+ headers=headers,
+ start=start,
+ end=end,
+ checksum=checksum,
+ )
+ response = download.consume(transport, timeout=timeout)
+ self._extract_headers_from_download(response)
+ else:
+
+ if checksum:
+ msg = _CHUNKED_DOWNLOAD_CHECKSUM_MESSAGE.format(checksum)
+ logging.info(msg)
+
+ if raw_download:
+ klass = RawChunkedDownload
+ else:
+ klass = ChunkedDownload
+
+ download = klass(
+ download_url,
+ self.chunk_size,
+ file_obj,
+ headers=headers,
+ start=start if start else 0,
+ end=end,
+ )
+
+ while not download.finished:
+ download.consume_next_chunk(transport, timeout=timeout)
+
+ def download_to_file(
+ self,
+ file_obj,
+ client=None,
+ start=None,
+ end=None,
+ raw_download=False,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum="md5",
+ ):
+ """Download the contents of this blob into a file-like object.
+
+ .. note::
+
+ If the server-set property, :attr:`media_link`, is not yet
+ initialized, makes an additional API request to load it.
+
+ Downloading a file that has been encrypted with a `customer-supplied`_
+ encryption key:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START download_to_file]
+ :end-before: [END download_to_file]
+ :dedent: 4
+
+ The ``encryption_key`` should be a str or bytes with a length of at
+ least 32.
+
+ If the :attr:`chunk_size` of a current blob is `None`, will download data
+ in single download request otherwise it will download the :attr:`chunk_size`
+ of data in each request.
+
+ For more fine-grained control over the download process, check out
+ `google-resumable-media`_. For example, this library allows
+ downloading **parts** of a blob rather than the whole thing.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type file_obj: file
+ :param file_obj: A file handle to which to write the blob's data.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type start: int
+ :param start: (Optional) The first byte in a range to be downloaded.
+
+ :type end: int
+ :param end: (Optional) The last byte in a range to be downloaded.
+
+ :type raw_download: bool
+ :param raw_download:
+ (Optional) If true, download the object without any expansion.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify the integrity
+ of the object. The response headers must contain a checksum of the
+ requested type. If the headers lack an appropriate checksum (for
+ instance in the case of transcoded or ranged downloads where the
+ remote service does not know the correct checksum, including
+ downloads where chunk_size is set) an INFO-level log will be
+ emitted. Supported values are "md5", "crc32c" and None. The default
+ is "md5".
+
+ :raises: :class:`google.cloud.exceptions.NotFound`
+ """
+ client = self._require_client(client)
+
+ download_url = self._get_download_url(
+ client,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ headers = _get_encryption_headers(self._encryption_key)
+ headers["accept-encoding"] = "gzip"
+
+ transport = self._get_transport(client)
+ try:
+ self._do_download(
+ transport,
+ file_obj,
+ download_url,
+ headers,
+ start,
+ end,
+ raw_download,
+ timeout=timeout,
+ checksum=checksum,
+ )
+ except resumable_media.InvalidResponse as exc:
+ _raise_from_invalid_response(exc)
+
+ def download_to_filename(
+ self,
+ filename,
+ client=None,
+ start=None,
+ end=None,
+ raw_download=False,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum="md5",
+ ):
+ """Download the contents of this blob into a named file.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type filename: str
+ :param filename: A filename to be passed to ``open``.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type start: int
+ :param start: (Optional) The first byte in a range to be downloaded.
+
+ :type end: int
+ :param end: (Optional) The last byte in a range to be downloaded.
+
+ :type raw_download: bool
+ :param raw_download:
+ (Optional) If true, download the object without any expansion.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify the integrity
+ of the object. The response headers must contain a checksum of the
+ requested type. If the headers lack an appropriate checksum (for
+ instance in the case of transcoded or ranged downloads where the
+ remote service does not know the correct checksum, including
+ downloads where chunk_size is set) an INFO-level log will be
+ emitted. Supported values are "md5", "crc32c" and None. The default
+ is "md5".
+
+ :raises: :class:`google.cloud.exceptions.NotFound`
+ """
+ try:
+ with open(filename, "wb") as file_obj:
+ self.download_to_file(
+ file_obj,
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+ except resumable_media.DataCorruption:
+ # Delete the corrupt downloaded file.
+ os.remove(filename)
+ raise
+
+ updated = self.updated
+ if updated is not None:
+ if six.PY2:
+ mtime = _convert_to_timestamp(updated)
+ else:
+ mtime = updated.timestamp()
+ os.utime(file_obj.name, (mtime, mtime))
+
+ def download_as_bytes(
+ self,
+ client=None,
+ start=None,
+ end=None,
+ raw_download=False,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum="md5",
+ ):
+ """Download the contents of this blob as a bytes object.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type start: int
+ :param start: (Optional) The first byte in a range to be downloaded.
+
+ :type end: int
+ :param end: (Optional) The last byte in a range to be downloaded.
+
+ :type raw_download: bool
+ :param raw_download:
+ (Optional) If true, download the object without any expansion.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify the integrity
+ of the object. The response headers must contain a checksum of the
+ requested type. If the headers lack an appropriate checksum (for
+ instance in the case of transcoded or ranged downloads where the
+ remote service does not know the correct checksum, including
+ downloads where chunk_size is set) an INFO-level log will be
+ emitted. Supported values are "md5", "crc32c" and None. The default
+ is "md5".
+
+ :rtype: bytes
+ :returns: The data stored in this blob.
+
+ :raises: :class:`google.cloud.exceptions.NotFound`
+ """
+ string_buffer = BytesIO()
+ self.download_to_file(
+ string_buffer,
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+ return string_buffer.getvalue()
+
+ def download_as_string(
+ self,
+ client=None,
+ start=None,
+ end=None,
+ raw_download=False,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """(Deprecated) Download the contents of this blob as a bytes object.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ .. note::
+ Deprecated alias for :meth:`download_as_bytes`.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type start: int
+ :param start: (Optional) The first byte in a range to be downloaded.
+
+ :type end: int
+ :param end: (Optional) The last byte in a range to be downloaded.
+
+ :type raw_download: bool
+ :param raw_download:
+ (Optional) If true, download the object without any expansion.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: bytes
+ :returns: The data stored in this blob.
+
+ :raises: :class:`google.cloud.exceptions.NotFound`
+ """
+ warnings.warn(
+ "Blob.download_as_string() is deprecated and will be removed in future."
+ "Use Blob.download_as_bytes() instead.",
+ PendingDeprecationWarning,
+ stacklevel=1,
+ )
+ return self.download_as_bytes(
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ )
+
+ def download_as_text(
+ self,
+ client=None,
+ start=None,
+ end=None,
+ raw_download=False,
+ encoding="utf-8",
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Download the contents of this blob as a string.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type start: int
+ :param start: (Optional) The first byte in a range to be downloaded.
+
+ :type end: int
+ :param end: (Optional) The last byte in a range to be downloaded.
+
+ :type raw_download: bool
+ :param raw_download:
+ (Optional) If true, download the object without any expansion.
+
+ :type encoding: str
+ :param encoding: (Optional) The data of the blob will be decoded by
+ encoding method. Defaults to UTF-8. Apply only
+ if the value of ``blob.content_encoding`` is None.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: text
+ :returns: The data stored in this blob.
+
+ :raises: :class:`google.cloud.exceptions.NotFound`
+ """
+ data = self.download_as_bytes(
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ )
+
+ if self.content_encoding:
+ return data.decode(self.content_encoding)
+ else:
+ return data.decode(encoding)
+
+ def _get_content_type(self, content_type, filename=None):
+ """Determine the content type from the current object.
+
+ The return value will be determined in order of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ :type content_type: str
+ :param content_type: (Optional) Type of content.
+
+ :type filename: str
+ :param filename: (Optional) The name of the file where the content
+ is stored.
+
+ :rtype: str
+ :returns: Type of content gathered from the object.
+ """
+ if content_type is None:
+ content_type = self.content_type
+
+ if content_type is None and filename is not None:
+ content_type, _ = mimetypes.guess_type(filename)
+
+ if content_type is None:
+ content_type = _DEFAULT_CONTENT_TYPE
+
+ return content_type
+
+ def _get_writable_metadata(self):
+ """Get the object / blob metadata which is writable.
+
+ This is intended to be used when creating a new object / blob.
+
+ See the `API reference docs`_ for more information, the fields
+ marked as writable are:
+
+ * ``acl``
+ * ``cacheControl``
+ * ``contentDisposition``
+ * ``contentEncoding``
+ * ``contentLanguage``
+ * ``contentType``
+ * ``crc32c``
+ * ``md5Hash``
+ * ``metadata``
+ * ``name``
+ * ``storageClass``
+
+ For now, we don't support ``acl``, access control lists should be
+ managed directly through :class:`ObjectACL` methods.
+ """
+ # NOTE: This assumes `self.name` is unicode.
+ object_metadata = {"name": self.name}
+ for key in self._changes:
+ if key in _WRITABLE_FIELDS:
+ object_metadata[key] = self._properties[key]
+
+ return object_metadata
+
+ def _get_upload_arguments(self, content_type):
+ """Get required arguments for performing an upload.
+
+ The content type returned will be determined in order of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ :type content_type: str
+ :param content_type: Type of content being uploaded (or :data:`None`).
+
+ :rtype: tuple
+ :returns: A triple of
+
+ * A header dictionary
+ * An object metadata dictionary
+ * The ``content_type`` as a string (according to precedence)
+ """
+ headers = _get_encryption_headers(self._encryption_key)
+ object_metadata = self._get_writable_metadata()
+ content_type = self._get_content_type(content_type)
+ return headers, object_metadata, content_type
+
+ def _do_multipart_upload(
+ self,
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl,
+ if_generation_match,
+ if_generation_not_match,
+ if_metageneration_match,
+ if_metageneration_not_match,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Perform a multipart upload.
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type stream: IO[bytes]
+ :param stream: A bytes IO object open for reading.
+
+ :type content_type: str
+ :param content_type: Type of content being uploaded (or :data:`None`).
+
+ :type size: int
+ :param size: The number of bytes to be uploaded (which will be read
+ from ``stream``). If not provided, the upload will be
+ concluded once ``stream`` is exhausted (or :data:`None`).
+
+ :type num_retries: int
+ :param num_retries: Number of upload retries. (Deprecated: This
+ argument will be removed in a future release.)
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. The request metadata will be amended
+ to include the computed value. Using this option will override a
+ manually-set checksum value. Supported values are "md5",
+ "crc32c" and None. The default is None.
+
+ :rtype: :class:`~requests.Response`
+ :returns: The "200 OK" response object returned after the multipart
+ upload request.
+ :raises: :exc:`ValueError` if ``size`` is not :data:`None` but the
+ ``stream`` has fewer than ``size`` bytes remaining.
+ """
+ if size is None:
+ data = stream.read()
+ else:
+ data = stream.read(size)
+ if len(data) < size:
+ msg = _READ_LESS_THAN_SIZE.format(size, len(data))
+ raise ValueError(msg)
+
+ transport = self._get_transport(client)
+ info = self._get_upload_arguments(content_type)
+ headers, object_metadata, content_type = info
+
+ base_url = _MULTIPART_URL_TEMPLATE.format(
+ hostname=self.client._connection.API_BASE_URL, bucket_path=self.bucket.path
+ )
+ name_value_pairs = []
+
+ if self.user_project is not None:
+ name_value_pairs.append(("userProject", self.user_project))
+
+ # When a Customer Managed Encryption Key is used to encrypt Cloud Storage object
+ # at rest, object resource metadata will store the version of the Key Management
+ # Service cryptographic material. If a Blob instance with KMS Key metadata set is
+ # used to upload a new version of the object then the existing kmsKeyName version
+ # value can't be used in the upload request and the client instead ignores it.
+ if (
+ self.kms_key_name is not None
+ and "cryptoKeyVersions" not in self.kms_key_name
+ ):
+ name_value_pairs.append(("kmsKeyName", self.kms_key_name))
+
+ if predefined_acl is not None:
+ name_value_pairs.append(("predefinedAcl", predefined_acl))
+
+ if if_generation_match is not None:
+ name_value_pairs.append(("ifGenerationMatch", if_generation_match))
+
+ if if_generation_not_match is not None:
+ name_value_pairs.append(("ifGenerationNotMatch", if_generation_not_match))
+
+ if if_metageneration_match is not None:
+ name_value_pairs.append(("ifMetagenerationMatch", if_metageneration_match))
+
+ if if_metageneration_not_match is not None:
+ name_value_pairs.append(
+ ("ifMetaGenerationNotMatch", if_metageneration_not_match)
+ )
+
+ upload_url = _add_query_parameters(base_url, name_value_pairs)
+ upload = MultipartUpload(upload_url, headers=headers, checksum=checksum)
+
+ if num_retries is not None:
+ upload._retry_strategy = resumable_media.RetryStrategy(
+ max_retries=num_retries
+ )
+
+ response = upload.transmit(
+ transport, data, object_metadata, content_type, timeout=timeout
+ )
+
+ return response
+
+ def _initiate_resumable_upload(
+ self,
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl=None,
+ extra_headers=None,
+ chunk_size=None,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Initiate a resumable upload.
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type stream: IO[bytes]
+ :param stream: A bytes IO object open for reading.
+
+ :type content_type: str
+ :param content_type: Type of content being uploaded (or :data:`None`).
+
+ :type size: int
+ :param size: The number of bytes to be uploaded (which will be read
+ from ``stream``). If not provided, the upload will be
+ concluded once ``stream`` is exhausted (or :data:`None`).
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type num_retries: int
+ :param num_retries: Number of upload retries. (Deprecated: This
+ argument will be removed in a future release.)
+
+ :type extra_headers: dict
+ :param extra_headers: (Optional) Extra headers to add to standard
+ headers.
+
+ :type chunk_size: int
+ :param chunk_size:
+ (Optional) Chunk size to use when creating a
+ :class:`~google.resumable_media.requests.ResumableUpload`.
+ If not passed, will fall back to the chunk size on the
+ current blob, if the chunk size of a current blob is also
+ `None`, will set the default value.
+ The default value of ``chunk_size`` is 100 MB.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be checked
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. On a validation failure, the client will attempt to
+ delete the uploaded object automatically. Supported values
+ are "md5", "crc32c" and None. The default is None.
+
+ :rtype: tuple
+ :returns:
+ Pair of
+
+ * The :class:`~google.resumable_media.requests.ResumableUpload`
+ that was created
+ * The ``transport`` used to initiate the upload.
+ """
+ if chunk_size is None:
+ chunk_size = self.chunk_size
+ if chunk_size is None:
+ chunk_size = _DEFAULT_CHUNKSIZE
+
+ transport = self._get_transport(client)
+ info = self._get_upload_arguments(content_type)
+ headers, object_metadata, content_type = info
+ if extra_headers is not None:
+ headers.update(extra_headers)
+
+ base_url = _RESUMABLE_URL_TEMPLATE.format(
+ hostname=self.client._connection.API_BASE_URL, bucket_path=self.bucket.path
+ )
+ name_value_pairs = []
+
+ if self.user_project is not None:
+ name_value_pairs.append(("userProject", self.user_project))
+
+ # When a Customer Managed Encryption Key is used to encrypt Cloud Storage object
+ # at rest, object resource metadata will store the version of the Key Management
+ # Service cryptographic material. If a Blob instance with KMS Key metadata set is
+ # used to upload a new version of the object then the existing kmsKeyName version
+ # value can't be used in the upload request and the client instead ignores it.
+ if (
+ self.kms_key_name is not None
+ and "cryptoKeyVersions" not in self.kms_key_name
+ ):
+ name_value_pairs.append(("kmsKeyName", self.kms_key_name))
+
+ if predefined_acl is not None:
+ name_value_pairs.append(("predefinedAcl", predefined_acl))
+
+ if if_generation_match is not None:
+ name_value_pairs.append(("ifGenerationMatch", if_generation_match))
+
+ if if_generation_not_match is not None:
+ name_value_pairs.append(("ifGenerationNotMatch", if_generation_not_match))
+
+ if if_metageneration_match is not None:
+ name_value_pairs.append(("ifMetagenerationMatch", if_metageneration_match))
+
+ if if_metageneration_not_match is not None:
+ name_value_pairs.append(
+ ("ifMetaGenerationNotMatch", if_metageneration_not_match)
+ )
+
+ upload_url = _add_query_parameters(base_url, name_value_pairs)
+ upload = ResumableUpload(
+ upload_url, chunk_size, headers=headers, checksum=checksum
+ )
+
+ if num_retries is not None:
+ upload._retry_strategy = resumable_media.RetryStrategy(
+ max_retries=num_retries
+ )
+
+ upload.initiate(
+ transport,
+ stream,
+ object_metadata,
+ content_type,
+ total_bytes=size,
+ stream_final=False,
+ timeout=timeout,
+ )
+
+ return upload, transport
+
+ def _do_resumable_upload(
+ self,
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl,
+ if_generation_match,
+ if_generation_not_match,
+ if_metageneration_match,
+ if_metageneration_not_match,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Perform a resumable upload.
+
+ Assumes ``chunk_size`` is not :data:`None` on the current blob.
+ The default value of ``chunk_size`` is 100 MB.
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type stream: IO[bytes]
+ :param stream: A bytes IO object open for reading.
+
+ :type content_type: str
+ :param content_type: Type of content being uploaded (or :data:`None`).
+
+ :type size: int
+ :param size: The number of bytes to be uploaded (which will be read
+ from ``stream``). If not provided, the upload will be
+ concluded once ``stream`` is exhausted (or :data:`None`).
+
+ :type num_retries: int
+ :param num_retries: Number of upload retries. (Deprecated: This
+ argument will be removed in a future release.)
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be checked
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. On a validation failure, the client will attempt to
+ delete the uploaded object automatically. Supported values
+ are "md5", "crc32c" and None. The default is None.
+
+ :rtype: :class:`~requests.Response`
+ :returns: The "200 OK" response object returned after the final chunk
+ is uploaded.
+ """
+ upload, transport = self._initiate_resumable_upload(
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+
+ while not upload.finished:
+ try:
+ response = upload.transmit_next_chunk(transport, timeout=timeout)
+ except resumable_media.DataCorruption:
+ # Attempt to delete the corrupted object.
+ self.delete()
+ raise
+
+ return response
+
+ def _do_upload(
+ self,
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl,
+ if_generation_match,
+ if_generation_not_match,
+ if_metageneration_match,
+ if_metageneration_not_match,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Determine an upload strategy and then perform the upload.
+
+ If the size of the data to be uploaded exceeds 8 MB a resumable media
+ request will be used, otherwise the content and the metadata will be
+ uploaded in a single multipart upload request.
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type stream: IO[bytes]
+ :param stream: A bytes IO object open for reading.
+
+ :type content_type: str
+ :param content_type: Type of content being uploaded (or :data:`None`).
+
+ :type size: int
+ :param size: The number of bytes to be uploaded (which will be read
+ from ``stream``). If not provided, the upload will be
+ concluded once ``stream`` is exhausted (or :data:`None`).
+
+ :type num_retries: int
+ :param num_retries: Number of upload retries. (Deprecated: This
+ argument will be removed in a future release.)
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. If the upload is completed in a single
+ request, the checksum will be entirely precomputed and the remote
+ server will handle verification and error handling. If the upload
+ is too large and must be transmitted in multiple requests, the
+ checksum will be incrementally computed and the client will handle
+ verification and error handling, raising
+ google.resumable_media.common.DataCorruption on a mismatch and
+ attempting to delete the corrupted file. Supported values are
+ "md5", "crc32c" and None. The default is None.
+
+ :rtype: dict
+ :returns: The parsed JSON from the "200 OK" response. This will be the
+ **only** response in the multipart case and it will be the
+ **final** response in the resumable case.
+ """
+ if size is not None and size <= _MAX_MULTIPART_SIZE:
+ response = self._do_multipart_upload(
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl,
+ if_generation_match,
+ if_generation_not_match,
+ if_metageneration_match,
+ if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+ else:
+ response = self._do_resumable_upload(
+ client,
+ stream,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl,
+ if_generation_match,
+ if_generation_not_match,
+ if_metageneration_match,
+ if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+
+ return response.json()
+
+ def upload_from_file(
+ self,
+ file_obj,
+ rewind=False,
+ size=None,
+ content_type=None,
+ num_retries=None,
+ client=None,
+ predefined_acl=None,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Upload the contents of this blob from a file-like object.
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning`_ and `lifecycle`_ API documents
+ for details.
+
+ Uploading a file with a `customer-supplied`_ encryption key:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START upload_from_file]
+ :end-before: [END upload_from_file]
+ :dedent: 4
+
+ The ``encryption_key`` should be a str or bytes with a length of at
+ least 32.
+
+ If the size of the data to be uploaded exceeds 8 MB a resumable media
+ request will be used, otherwise the content and the metadata will be
+ uploaded in a single multipart upload request.
+
+ For more fine-grained over the upload process, check out
+ `google-resumable-media`_.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type file_obj: file
+ :param file_obj: A file handle open for reading.
+
+ :type rewind: bool
+ :param rewind: If True, seek to the beginning of the file handle before
+ writing the file to Cloud Storage.
+
+ :type size: int
+ :param size: The number of bytes to be uploaded (which will be read
+ from ``file_obj``). If not provided, the upload will be
+ concluded once ``file_obj`` is exhausted.
+
+ :type content_type: str
+ :param content_type: (Optional) Type of content being uploaded.
+
+ :type num_retries: int
+ :param num_retries: Number of upload retries. (Deprecated: This
+ argument will be removed in a future release.)
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. If the upload is completed in a single
+ request, the checksum will be entirely precomputed and the remote
+ server will handle verification and error handling. If the upload
+ is too large and must be transmitted in multiple requests, the
+ checksum will be incrementally computed and the client will handle
+ verification and error handling, raising
+ google.resumable_media.common.DataCorruption on a mismatch and
+ attempting to delete the corrupted file. Supported values are
+ "md5", "crc32c" and None. The default is None.
+
+ :raises: :class:`~google.cloud.exceptions.GoogleCloudError`
+ if the upload response returns an error status.
+
+ .. _object versioning: https://cloud.google.com/storage/\
+ docs/object-versioning
+ .. _lifecycle: https://cloud.google.com/storage/docs/lifecycle
+ """
+ if num_retries is not None:
+ warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2)
+
+ _maybe_rewind(file_obj, rewind=rewind)
+ predefined_acl = ACL.validate_predefined(predefined_acl)
+
+ try:
+ created_json = self._do_upload(
+ client,
+ file_obj,
+ content_type,
+ size,
+ num_retries,
+ predefined_acl,
+ if_generation_match,
+ if_generation_not_match,
+ if_metageneration_match,
+ if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+ self._set_properties(created_json)
+ except resumable_media.InvalidResponse as exc:
+ _raise_from_invalid_response(exc)
+
+ def upload_from_filename(
+ self,
+ filename,
+ content_type=None,
+ client=None,
+ predefined_acl=None,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Upload this blob's contents from the content of a named file.
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The value given by ``mimetypes.guess_type``
+ - The default value ('application/octet-stream')
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning
+ `_ and
+ `lifecycle `_
+ API documents for details.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type filename: str
+ :param filename: The path to the file.
+
+ :type content_type: str
+ :param content_type: (Optional) Type of content being uploaded.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. If the upload is completed in a single
+ request, the checksum will be entirely precomputed and the remote
+ server will handle verification and error handling. If the upload
+ is too large and must be transmitted in multiple requests, the
+ checksum will be incrementally computed and the client will handle
+ verification and error handling, raising
+ google.resumable_media.common.DataCorruption on a mismatch and
+ attempting to delete the corrupted file. Supported values are
+ "md5", "crc32c" and None. The default is None.
+ """
+ content_type = self._get_content_type(content_type, filename=filename)
+
+ with open(filename, "rb") as file_obj:
+ total_bytes = os.fstat(file_obj.fileno()).st_size
+ self.upload_from_file(
+ file_obj,
+ content_type=content_type,
+ client=client,
+ size=total_bytes,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ )
+
+ def upload_from_string(
+ self,
+ data,
+ content_type="text/plain",
+ client=None,
+ predefined_acl=None,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Upload contents of this blob from the provided string.
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning
+ `_ and
+ `lifecycle `_
+ API documents for details.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type data: bytes or str
+ :param data: The data to store in this blob. If the value is
+ text, it will be encoded as UTF-8.
+
+ :type content_type: str
+ :param content_type: (Optional) Type of content being uploaded. Defaults
+ to ``'text/plain'``.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type predefined_acl: str
+ :param predefined_acl: (Optional) Predefined access control list
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. If the upload is completed in a single
+ request, the checksum will be entirely precomputed and the remote
+ server will handle verification and error handling. If the upload
+ is too large and must be transmitted in multiple requests, the
+ checksum will be incrementally computed and the client will handle
+ verification and error handling, raising
+ google.resumable_media.common.DataCorruption on a mismatch and
+ attempting to delete the corrupted file. Supported values are
+ "md5", "crc32c" and None. The default is None.
+ """
+ data = _to_bytes(data, encoding="utf-8")
+ string_buffer = BytesIO(data)
+ self.upload_from_file(
+ file_obj=string_buffer,
+ size=len(data),
+ content_type=content_type,
+ client=client,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ )
+
+ def create_resumable_upload_session(
+ self,
+ content_type=None,
+ size=None,
+ origin=None,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ checksum=None,
+ ):
+ """Create a resumable upload session.
+
+ Resumable upload sessions allow you to start an upload session from
+ one client and complete the session in another. This method is called
+ by the initiator to set the metadata and limits. The initiator then
+ passes the session URL to the client that will upload the binary data.
+ The client performs a PUT request on the session URL to complete the
+ upload. This process allows untrusted clients to upload to an
+ access-controlled bucket. For more details, see the
+ `documentation on signed URLs`_.
+
+ .. _documentation on signed URLs:
+ https://cloud.google.com/storage/\
+ docs/access-control/signed-urls#signing-resumable
+
+ The content type of the upload will be determined in order
+ of precedence:
+
+ - The value passed in to this method (if not :data:`None`)
+ - The value stored on the current blob
+ - The default value ('application/octet-stream')
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning
+ `_ and
+ `lifecycle `_
+ API documents for details.
+
+ If :attr:`encryption_key` is set, the blob will be encrypted with
+ a `customer-supplied`_ encryption key.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type size: int
+ :param size: (Optional) The maximum number of bytes that can be
+ uploaded using this session. If the size is not known
+ when creating the session, this should be left blank.
+
+ :type content_type: str
+ :param content_type: (Optional) Type of content being uploaded.
+
+ :type origin: str
+ :param origin: (Optional) If set, the upload can only be completed
+ by a user-agent that uploads from the given origin. This
+ can be useful when passing the session to a web client.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type checksum: str
+ :param checksum:
+ (Optional) The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be checked
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. On a validation failure, the client will attempt to
+ delete the uploaded object automatically. Supported values
+ are "md5", "crc32c" and None. The default is None.
+
+ :rtype: str
+ :returns: The resumable upload session URL. The upload can be
+ completed by making an HTTP PUT request with the
+ file's contents.
+
+ :raises: :class:`google.cloud.exceptions.GoogleCloudError`
+ if the session creation response returns an error status.
+ """
+ extra_headers = {}
+ if origin is not None:
+ # This header is specifically for client-side uploads, it
+ # determines the origins allowed for CORS.
+ extra_headers["Origin"] = origin
+
+ try:
+ dummy_stream = BytesIO(b"")
+ # Send a fake the chunk size which we **know** will be acceptable
+ # to the `ResumableUpload` constructor. The chunk size only
+ # matters when **sending** bytes to an upload.
+ upload, _ = self._initiate_resumable_upload(
+ client,
+ dummy_stream,
+ content_type,
+ size,
+ None,
+ predefined_acl=None,
+ extra_headers=extra_headers,
+ chunk_size=self._CHUNK_SIZE_MULTIPLE,
+ timeout=timeout,
+ checksum=checksum,
+ )
+
+ return upload.resumable_url
+ except resumable_media.InvalidResponse as exc:
+ _raise_from_invalid_response(exc)
+
+ def get_iam_policy(
+ self, client=None, requested_policy_version=None, timeout=_DEFAULT_TIMEOUT
+ ):
+ """Retrieve the IAM policy for the object.
+
+ .. note:
+
+ Blob- / object-level IAM support does not yet exist and methods
+ currently call an internal ACL backend not providing any utility
+ beyond the blob's :attr:`acl` at this time. The API may be enhanced
+ in the future and is currently undocumented. Use :attr:`acl` for
+ managing object access control.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current object's bucket.
+
+ :type requested_policy_version: int or ``NoneType``
+ :param requested_policy_version: (Optional) The version of IAM policies to request.
+ If a policy with a condition is requested without
+ setting this, the server will return an error.
+ This must be set to a value of 3 to retrieve IAM
+ policies containing conditions. This is to prevent
+ client code that isn't aware of IAM conditions from
+ interpreting and modifying policies incorrectly.
+ The service might return a policy with version lower
+ than the one that was requested, based on the
+ feature syntax in the policy fetched.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`google.api_core.iam.Policy`
+ :returns: the policy instance, based on the resource returned from
+ the ``getIamPolicy`` API request.
+ """
+ client = self._require_client(client)
+
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ if requested_policy_version is not None:
+ query_params["optionsRequestedPolicyVersion"] = requested_policy_version
+
+ info = client._connection.api_request(
+ method="GET",
+ path="%s/iam" % (self.path,),
+ query_params=query_params,
+ _target_object=None,
+ timeout=timeout,
+ )
+ return Policy.from_api_repr(info)
+
+ def set_iam_policy(self, policy, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Update the IAM policy for the bucket.
+
+ .. note:
+
+ Blob- / object-level IAM support does not yet exist and methods
+ currently call an internal ACL backend not providing any utility
+ beyond the blob's :attr:`acl` at this time. The API may be enhanced
+ in the future and is currently undocumented. Use :attr:`acl` for
+ managing object access control.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type policy: :class:`google.api_core.iam.Policy`
+ :param policy: policy instance used to update bucket's IAM policy.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`google.api_core.iam.Policy`
+ :returns: the policy instance, based on the resource returned from
+ the ``setIamPolicy`` API request.
+ """
+ client = self._require_client(client)
+
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ resource = policy.to_api_repr()
+ resource["resourceId"] = self.path
+ info = client._connection.api_request(
+ method="PUT",
+ path="%s/iam" % (self.path,),
+ query_params=query_params,
+ data=resource,
+ _target_object=None,
+ timeout=timeout,
+ )
+ return Policy.from_api_repr(info)
+
+ def test_iam_permissions(self, permissions, client=None, timeout=_DEFAULT_TIMEOUT):
+ """API call: test permissions
+
+ .. note:
+
+ Blob- / object-level IAM support does not yet exist and methods
+ currently call an internal ACL backend not providing any utility
+ beyond the blob's :attr:`acl` at this time. The API may be enhanced
+ in the future and is currently undocumented. Use :attr:`acl` for
+ managing object access control.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type permissions: list of string
+ :param permissions: the permissions to check
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: list of string
+ :returns: the permissions returned by the ``testIamPermissions`` API
+ request.
+ """
+ client = self._require_client(client)
+ query_params = {"permissions": permissions}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = "%s/iam/testPermissions" % (self.path,)
+ resp = client._connection.api_request(
+ method="GET", path=path, query_params=query_params, timeout=timeout
+ )
+
+ return resp.get("permissions", [])
+
+ def make_public(self, client=None):
+ """Update blob's ACL, granting read access to anonymous users.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+ """
+ self.acl.all().grant_read()
+ self.acl.save(client=client)
+
+ def make_private(self, client=None):
+ """Update blob's ACL, revoking read access for anonymous users.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+ """
+ self.acl.all().revoke_read()
+ self.acl.save(client=client)
+
+ def compose(
+ self,
+ sources,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_metageneration_match=None,
+ ):
+ """Concatenate source blobs into this one.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type sources: list of :class:`Blob`
+ :param sources: Blobs whose contents will be composed into this blob.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: list of long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob. The list must match
+ ``sources`` item-to-item.
+
+ :type if_metageneration_match: list of long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether
+ the blob's current metageneration matches the given
+ value. The list must match ``sources`` item-to-item.
+
+ Example:
+ Compose blobs using generation match preconditions.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.bucket("bucket-name")
+
+ >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")]
+ >>> if_generation_match = [None] * len(blobs)
+ >>> if_generation_match[0] = "123" # precondition for "blob-name-1"
+
+ >>> composed_blob = bucket.blob("composed-name")
+ >>> composed_blob.compose(blobs, if_generation_match)
+ """
+ sources_len = len(sources)
+ if if_generation_match is not None and len(if_generation_match) != sources_len:
+ raise ValueError(
+ "'if_generation_match' length must be the same as 'sources' length"
+ )
+
+ if (
+ if_metageneration_match is not None
+ and len(if_metageneration_match) != sources_len
+ ):
+ raise ValueError(
+ "'if_metageneration_match' length must be the same as 'sources' length"
+ )
+
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ source_objects = []
+ for index, source in enumerate(sources):
+ source_object = {"name": source.name}
+
+ preconditions = {}
+ if (
+ if_generation_match is not None
+ and if_generation_match[index] is not None
+ ):
+ preconditions["ifGenerationMatch"] = if_generation_match[index]
+
+ if (
+ if_metageneration_match is not None
+ and if_metageneration_match[index] is not None
+ ):
+ preconditions["ifMetagenerationMatch"] = if_metageneration_match[index]
+
+ if preconditions:
+ source_object["objectPreconditions"] = preconditions
+
+ source_objects.append(source_object)
+
+ request = {
+ "sourceObjects": source_objects,
+ "destination": self._properties.copy(),
+ }
+ api_response = client._connection.api_request(
+ method="POST",
+ path=self.path + "/compose",
+ query_params=query_params,
+ data=request,
+ _target_object=self,
+ timeout=timeout,
+ )
+ self._set_properties(api_response)
+
+ def rewrite(
+ self,
+ source,
+ token=None,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ if_source_generation_match=None,
+ if_source_generation_not_match=None,
+ if_source_metageneration_match=None,
+ if_source_metageneration_not_match=None,
+ ):
+ """Rewrite source blob into this one.
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type source: :class:`Blob`
+ :param source: blob whose contents will be rewritten into this blob.
+
+ :type token: str
+ :param token: (Optional) Token returned from an earlier, not-completed
+ call to rewrite the same source blob. If passed,
+ result will include updated status, total bytes written.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Makes the operation
+ conditional on whether the destination
+ object's current generation matches the
+ given value. Setting to 0 makes the
+ operation succeed only if there are no
+ live versions of the object.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ generation does not match the given
+ value. If no live object exists,
+ the precondition fails. Setting to
+ 0 makes the operation succeed only
+ if there is a live version
+ of the object.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration matches the given
+ value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration does not match
+ the given value.
+
+ :type if_source_generation_match: long
+ :param if_source_generation_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation matches the
+ given value.
+
+ :type if_source_generation_not_match: long
+ :param if_source_generation_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation does not match
+ the given value.
+
+ :type if_source_metageneration_match: long
+ :param if_source_metageneration_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ matches the given value.
+
+ :type if_source_metageneration_not_match: long
+ :param if_source_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ does not match the given value.
+
+ :rtype: tuple
+ :returns: ``(token, bytes_rewritten, total_bytes)``, where ``token``
+ is a rewrite token (``None`` if the rewrite is complete),
+ ``bytes_rewritten`` is the number of bytes rewritten so far,
+ and ``total_bytes`` is the total number of bytes to be
+ rewritten.
+ """
+ client = self._require_client(client)
+ headers = _get_encryption_headers(self._encryption_key)
+ headers.update(_get_encryption_headers(source._encryption_key, source=True))
+
+ query_params = self._query_params
+ if "generation" in query_params:
+ del query_params["generation"]
+
+ if token:
+ query_params["rewriteToken"] = token
+
+ if source.generation:
+ query_params["sourceGeneration"] = source.generation
+
+ if self.kms_key_name is not None:
+ query_params["destinationKmsKeyName"] = self.kms_key_name
+
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
+
+ api_response = client._connection.api_request(
+ method="POST",
+ path=source.path + "/rewriteTo" + self.path,
+ query_params=query_params,
+ data=self._properties,
+ headers=headers,
+ _target_object=self,
+ timeout=timeout,
+ )
+ rewritten = int(api_response["totalBytesRewritten"])
+ size = int(api_response["objectSize"])
+
+ # The resource key is set if and only if the API response is
+ # completely done. Additionally, there is no rewrite token to return
+ # in this case.
+ if api_response["done"]:
+ self._set_properties(api_response["resource"])
+ return None, rewritten, size
+
+ return api_response["rewriteToken"], rewritten, size
+
+ def update_storage_class(
+ self,
+ new_class,
+ client=None,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ if_source_generation_match=None,
+ if_source_generation_not_match=None,
+ if_source_metageneration_match=None,
+ if_source_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Update blob's storage class via a rewrite-in-place. This helper will
+ wait for the rewrite to complete before returning, so it may take some
+ time for large files.
+
+ See
+ https://cloud.google.com/storage/docs/per-object-storage-class
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type new_class: str
+ :param new_class:
+ new storage class for the object. One of:
+ :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
+ or
+ :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Makes the operation
+ conditional on whether the destination
+ object's current generation matches the
+ given value. Setting to 0 makes the
+ operation succeed only if there are no
+ live versions of the object.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ generation does not match the given
+ value. If no live object exists,
+ the precondition fails. Setting to
+ 0 makes the operation succeed only
+ if there is a live version
+ of the object.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration matches the given
+ value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration does not match
+ the given value.
+
+ :type if_source_generation_match: long
+ :param if_source_generation_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation matches the
+ given value.
+
+ :type if_source_generation_not_match: long
+ :param if_source_generation_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation does not match
+ the given value.
+
+ :type if_source_metageneration_match: long
+ :param if_source_metageneration_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ matches the given value.
+
+ :type if_source_metageneration_not_match: long
+ :param if_source_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ if new_class not in self.STORAGE_CLASSES:
+ raise ValueError("Invalid storage class: %s" % (new_class,))
+
+ # Update current blob's storage class prior to rewrite
+ self._patch_property("storageClass", new_class)
+
+ # Execute consecutive rewrite operations until operation is done
+ token, _, _ = self.rewrite(
+ self,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ timeout=timeout,
+ )
+ while token is not None:
+ token, _, _ = self.rewrite(
+ self,
+ token=token,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ timeout=timeout,
+ )
+
+ cache_control = _scalar_property("cacheControl")
+ """HTTP 'Cache-Control' header for this object.
+
+ See `RFC 7234`_ and `API reference docs`_.
+
+ :rtype: str or ``NoneType``
+
+ .. _RFC 7234: https://tools.ietf.org/html/rfc7234#section-5.2
+ """
+
+ content_disposition = _scalar_property("contentDisposition")
+ """HTTP 'Content-Disposition' header for this object.
+
+ See `RFC 6266`_ and `API reference docs`_.
+
+ :rtype: str or ``NoneType``
+
+ .. _RFC 6266: https://tools.ietf.org/html/rfc7234#section-5.2
+ """
+
+ content_encoding = _scalar_property("contentEncoding")
+ """HTTP 'Content-Encoding' header for this object.
+
+ See `RFC 7231`_ and `API reference docs`_.
+
+ :rtype: str or ``NoneType``
+
+ .. _RFC 7231: https://tools.ietf.org/html/rfc7231#section-3.1.2.2
+ """
+
+ content_language = _scalar_property("contentLanguage")
+ """HTTP 'Content-Language' header for this object.
+
+ See `BCP47`_ and `API reference docs`_.
+
+ :rtype: str or ``NoneType``
+
+ .. _BCP47: https://tools.ietf.org/html/bcp47
+ """
+
+ content_type = _scalar_property(_CONTENT_TYPE_FIELD)
+ """HTTP 'Content-Type' header for this object.
+
+ See `RFC 2616`_ and `API reference docs`_.
+
+ :rtype: str or ``NoneType``
+
+ .. _RFC 2616: https://tools.ietf.org/html/rfc2616#section-14.17
+ """
+
+ crc32c = _scalar_property("crc32c")
+ """CRC32C checksum for this object.
+
+ This returns the blob's CRC32C checksum. To retrieve the value, first use a
+ reload method of the Blob class which loads the blob's properties from the server.
+
+ See `RFC 4960`_ and `API reference docs`_.
+
+ If not set before upload, the server will compute the hash.
+
+ :rtype: str or ``NoneType``
+
+ .. _RFC 4960: https://tools.ietf.org/html/rfc4960#appendix-B
+
+ Example:
+ Retrieve the crc32c hash of blob.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket("my-bucket-name")
+ >>> blob = bucket.blob('my-blob')
+
+ >>> blob.crc32c # return None
+ >>> blob.reload()
+ >>> blob.crc32c # return crc32c hash
+
+ >>> # Another approach
+ >>> blob = bucket.get_blob('my-blob')
+ >>> blob.crc32c # return crc32c hash
+ """
+
+ @property
+ def component_count(self):
+ """Number of underlying components that make up this object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: int or ``NoneType``
+ :returns: The component count (in case of a composed object) or
+ ``None`` if the blob's resource has not been loaded from
+ the server. This property will not be set on objects
+ not created via ``compose``.
+ """
+ component_count = self._properties.get("componentCount")
+ if component_count is not None:
+ return int(component_count)
+
+ @property
+ def etag(self):
+ """Retrieve the ETag for the object.
+
+ See `RFC 2616 (etags)`_ and `API reference docs`_.
+
+ :rtype: str or ``NoneType``
+ :returns: The blob etag or ``None`` if the blob's resource has not
+ been loaded from the server.
+
+ .. _RFC 2616 (etags): https://tools.ietf.org/html/rfc2616#section-3.11
+ """
+ return self._properties.get("etag")
+
+ event_based_hold = _scalar_property("eventBasedHold")
+ """Is an event-based hold active on the object?
+
+ See `API reference docs`_.
+
+ If the property is not set locally, returns :data:`None`.
+
+ :rtype: bool or ``NoneType``
+ """
+
+ @property
+ def generation(self):
+ """Retrieve the generation for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: int or ``NoneType``
+ :returns: The generation of the blob or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ generation = self._properties.get("generation")
+ if generation is not None:
+ return int(generation)
+
+ @property
+ def id(self):
+ """Retrieve the ID for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ The ID consists of the bucket name, object name, and generation number.
+
+ :rtype: str or ``NoneType``
+ :returns: The ID of the blob or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ return self._properties.get("id")
+
+ md5_hash = _scalar_property("md5Hash")
+ """MD5 hash for this object.
+
+ This returns the blob's MD5 hash. To retrieve the value, first use a
+ reload method of the Blob class which loads the blob's properties from the server.
+
+ See `RFC 1321`_ and `API reference docs`_.
+
+ If not set before upload, the server will compute the hash.
+
+ :rtype: str or ``NoneType``
+
+ .. _RFC 1321: https://tools.ietf.org/html/rfc1321
+
+ Example:
+ Retrieve the md5 hash of blob.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket("my-bucket-name")
+ >>> blob = bucket.blob('my-blob')
+
+ >>> blob.md5_hash # return None
+ >>> blob.reload()
+ >>> blob.md5_hash # return md5 hash
+
+ >>> # Another approach
+ >>> blob = bucket.get_blob('my-blob')
+ >>> blob.md5_hash # return md5 hash
+ """
+
+ @property
+ def media_link(self):
+ """Retrieve the media download URI for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: str or ``NoneType``
+ :returns: The media link for the blob or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ return self._properties.get("mediaLink")
+
+ @property
+ def metadata(self):
+ """Retrieve arbitrary/application specific metadata for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :setter: Update arbitrary/application specific metadata for the
+ object.
+ :getter: Retrieve arbitrary/application specific metadata for
+ the object.
+
+ :rtype: dict or ``NoneType``
+ :returns: The metadata associated with the blob or ``None`` if the
+ property is not set.
+ """
+ return copy.deepcopy(self._properties.get("metadata"))
+
+ @metadata.setter
+ def metadata(self, value):
+ """Update arbitrary/application specific metadata for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :type value: dict
+ :param value: The blob metadata to set.
+ """
+ if value is not None:
+ value = {k: str(v) for k, v in value.items()}
+ self._patch_property("metadata", value)
+
+ @property
+ def metageneration(self):
+ """Retrieve the metageneration for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: int or ``NoneType``
+ :returns: The metageneration of the blob or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ metageneration = self._properties.get("metageneration")
+ if metageneration is not None:
+ return int(metageneration)
+
+ @property
+ def owner(self):
+ """Retrieve info about the owner of the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: dict or ``NoneType``
+ :returns: Mapping of owner's role/ID, or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ return copy.deepcopy(self._properties.get("owner"))
+
+ @property
+ def retention_expiration_time(self):
+ """Retrieve timestamp at which the object's retention period expires.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the property is not set locally.
+ """
+ value = self._properties.get("retentionExpirationTime")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def self_link(self):
+ """Retrieve the URI for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: str or ``NoneType``
+ :returns: The self link for the blob or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ return self._properties.get("selfLink")
+
+ @property
+ def size(self):
+ """Size of the object, in bytes.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: int or ``NoneType``
+ :returns: The size of the blob or ``None`` if the blob's
+ resource has not been loaded from the server.
+ """
+ size = self._properties.get("size")
+ if size is not None:
+ return int(size)
+
+ @property
+ def kms_key_name(self):
+ """Resource name of Cloud KMS key used to encrypt the blob's contents.
+
+ :rtype: str or ``NoneType``
+ :returns:
+ The resource name or ``None`` if no Cloud KMS key was used,
+ or the blob's resource has not been loaded from the server.
+ """
+ return self._properties.get("kmsKeyName")
+
+ storage_class = _scalar_property("storageClass")
+ """Retrieve the storage class for the object.
+
+ This can only be set at blob / object **creation** time. If you'd
+ like to change the storage class **after** the blob / object already
+ exists in a bucket, call :meth:`update_storage_class` (which uses
+ :meth:`rewrite`).
+
+ See https://cloud.google.com/storage/docs/storage-classes
+
+ :rtype: str or ``NoneType``
+ :returns:
+ If set, one of
+ :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_STORAGE_CLASS`,
+ else ``None``.
+ """
+
+ temporary_hold = _scalar_property("temporaryHold")
+ """Is a temporary hold active on the object?
+
+ See `API reference docs`_.
+
+ If the property is not set locally, returns :data:`None`.
+
+ :rtype: bool or ``NoneType``
+ """
+
+ @property
+ def time_deleted(self):
+ """Retrieve the timestamp at which the object was deleted.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the blob's resource has not been loaded from
+ the server (see :meth:`reload`). If the blob has
+ not been deleted, this will never be set.
+ """
+ value = self._properties.get("timeDeleted")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def time_created(self):
+ """Retrieve the timestamp at which the object was created.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the blob's resource has not been loaded from
+ the server (see :meth:`reload`).
+ """
+ value = self._properties.get("timeCreated")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def updated(self):
+ """Retrieve the timestamp at which the object was updated.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the blob's resource has not been loaded from
+ the server (see :meth:`reload`).
+ """
+ value = self._properties.get("updated")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def custom_time(self):
+ """Retrieve the custom time for the object.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the blob's resource has not been loaded from
+ the server (see :meth:`reload`).
+ """
+ value = self._properties.get("customTime")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @custom_time.setter
+ def custom_time(self, value):
+ """Set the custom time for the object.
+
+ Once set on the server side object, this value can't be unset, but may
+ only changed to a custom datetime in the future.
+
+ If :attr:`custom_time` must be unset, either perform a rewrite
+ operation or upload the data again.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :type value: :class:`datetime.datetime`
+ :param value: (Optional) Set the custom time of blob. Datetime object
+ parsed from RFC3339 valid timestamp.
+ """
+ if value is not None:
+ value = _datetime_to_rfc3339(value)
+
+ self._properties["customTime"] = value
+
+
+def _get_encryption_headers(key, source=False):
+ """Builds customer encryption key headers
+
+ :type key: bytes
+ :param key: 32 byte key to build request key and hash.
+
+ :type source: bool
+ :param source: If true, return headers for the "source" blob; otherwise,
+ return headers for the "destination" blob.
+
+ :rtype: dict
+ :returns: dict of HTTP headers being sent in request.
+ """
+ if key is None:
+ return {}
+
+ key = _to_bytes(key)
+ key_hash = hashlib.sha256(key).digest()
+ key_hash = base64.b64encode(key_hash)
+ key = base64.b64encode(key)
+
+ if source:
+ prefix = "X-Goog-Copy-Source-Encryption-"
+ else:
+ prefix = "X-Goog-Encryption-"
+
+ return {
+ prefix + "Algorithm": "AES256",
+ prefix + "Key": _bytes_to_unicode(key),
+ prefix + "Key-Sha256": _bytes_to_unicode(key_hash),
+ }
+
+
+def _quote(value, safe=b"~"):
+ """URL-quote a string.
+
+ If the value is unicode, this method first UTF-8 encodes it as bytes and
+ then quotes the bytes. (In Python 3, ``urllib.parse.quote`` does this
+ encoding automatically, but in Python 2, non-ASCII characters cannot be
+ quoted.)
+
+ :type value: str or bytes
+ :param value: The value to be URL-quoted.
+
+ :type safe: bytes
+ :param safe: Bytes *not* to be quoted. By default, includes only ``b'~'``.
+
+ :rtype: str
+ :returns: The encoded value (bytes in Python 2, unicode in Python 3).
+ """
+ value = _to_bytes(value, encoding="utf-8")
+ return quote(value, safe=safe)
+
+
+def _maybe_rewind(stream, rewind=False):
+ """Rewind the stream if desired.
+
+ :type stream: IO[bytes]
+ :param stream: A bytes IO object open for reading.
+
+ :type rewind: bool
+ :param rewind: Indicates if we should seek to the beginning of the stream.
+ """
+ if rewind:
+ stream.seek(0, os.SEEK_SET)
+
+
+def _raise_from_invalid_response(error):
+ """Re-wrap and raise an ``InvalidResponse`` exception.
+
+ :type error: :exc:`google.resumable_media.InvalidResponse`
+ :param error: A caught exception from the ``google-resumable-media``
+ library.
+
+ :raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding
+ to the failed status code
+ """
+ response = error.response
+
+ # The 'response.text' gives the actual reason of error, where 'error' gives
+ # the message of expected status code.
+ if response.text:
+ error_message = response.text + ": " + str(error)
+ else:
+ error_message = str(error)
+
+ message = u"{method} {url}: {error}".format(
+ method=response.request.method, url=response.request.url, error=error_message
+ )
+
+ raise exceptions.from_http_status(response.status_code, message, response=response)
+
+
+def _add_query_parameters(base_url, name_value_pairs):
+ """Add one query parameter to a base URL.
+
+ :type base_url: string
+ :param base_url: Base URL (may already contain query parameters)
+
+ :type name_value_pairs: list of (string, string) tuples.
+ :param name_value_pairs: Names and values of the query parameters to add
+
+ :rtype: string
+ :returns: URL with additional query strings appended.
+ """
+ if len(name_value_pairs) == 0:
+ return base_url
+
+ scheme, netloc, path, query, frag = urlsplit(base_url)
+ query = parse_qsl(query)
+ query.extend(name_value_pairs)
+ return urlunsplit((scheme, netloc, path, urlencode(query), frag))
diff --git a/venv/Lib/site-packages/google/cloud/storage/bucket.py b/venv/Lib/site-packages/google/cloud/storage/bucket.py
new file mode 100644
index 000000000..adf37d398
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/bucket.py
@@ -0,0 +1,3146 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with Google Cloud Storage buckets."""
+
+import base64
+import copy
+import datetime
+import functools
+import json
+import warnings
+
+import six
+from six.moves.urllib.parse import urlsplit
+
+from google.api_core import page_iterator
+from google.api_core import datetime_helpers
+from google.cloud._helpers import _datetime_to_rfc3339
+from google.cloud._helpers import _NOW
+from google.cloud._helpers import _rfc3339_to_datetime
+from google.cloud.exceptions import NotFound
+from google.api_core.iam import Policy
+from google.cloud.storage import _signing
+from google.cloud.storage._helpers import _add_generation_match_parameters
+from google.cloud.storage._helpers import _PropertyMixin
+from google.cloud.storage._helpers import _scalar_property
+from google.cloud.storage._helpers import _validate_name
+from google.cloud.storage._signing import generate_signed_url_v2
+from google.cloud.storage._signing import generate_signed_url_v4
+from google.cloud.storage._helpers import _bucket_bound_hostname_url
+from google.cloud.storage.acl import BucketACL
+from google.cloud.storage.acl import DefaultObjectACL
+from google.cloud.storage.blob import Blob
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+from google.cloud.storage.constants import ARCHIVE_STORAGE_CLASS
+from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS
+from google.cloud.storage.constants import DUAL_REGION_LOCATION_TYPE
+from google.cloud.storage.constants import (
+ DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS,
+)
+from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS
+from google.cloud.storage.constants import MULTI_REGION_LOCATION_TYPE
+from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS
+from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS
+from google.cloud.storage.constants import REGION_LOCATION_TYPE
+from google.cloud.storage.constants import STANDARD_STORAGE_CLASS
+from google.cloud.storage.notification import BucketNotification
+from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT
+
+
+_UBLA_BPO_ENABLED_MESSAGE = (
+ "Pass only one of 'uniform_bucket_level_access_enabled' / "
+ "'bucket_policy_only_enabled' to 'IAMConfiguration'."
+)
+_BPO_ENABLED_MESSAGE = (
+ "'IAMConfiguration.bucket_policy_only_enabled' is deprecated. "
+ "Instead, use 'IAMConfiguration.uniform_bucket_level_access_enabled'."
+)
+_UBLA_BPO_LOCK_TIME_MESSAGE = (
+ "Pass only one of 'uniform_bucket_level_access_lock_time' / "
+ "'bucket_policy_only_lock_time' to 'IAMConfiguration'."
+)
+_BPO_LOCK_TIME_MESSAGE = (
+ "'IAMConfiguration.bucket_policy_only_lock_time' is deprecated. "
+ "Instead, use 'IAMConfiguration.uniform_bucket_level_access_lock_time'."
+)
+_LOCATION_SETTER_MESSAGE = (
+ "Assignment to 'Bucket.location' is deprecated, as it is only "
+ "valid before the bucket is created. Instead, pass the location "
+ "to `Bucket.create`."
+)
+_API_ACCESS_ENDPOINT = "https://storage.googleapis.com"
+
+
+def _blobs_page_start(iterator, page, response):
+ """Grab prefixes after a :class:`~google.cloud.iterator.Page` started.
+
+ :type iterator: :class:`~google.api_core.page_iterator.Iterator`
+ :param iterator: The iterator that is currently in use.
+
+ :type page: :class:`~google.cloud.api.core.page_iterator.Page`
+ :param page: The page that was just created.
+
+ :type response: dict
+ :param response: The JSON API response for a page of blobs.
+ """
+ page.prefixes = tuple(response.get("prefixes", ()))
+ iterator.prefixes.update(page.prefixes)
+
+
+def _item_to_blob(iterator, item):
+ """Convert a JSON blob to the native object.
+
+ .. note::
+
+ This assumes that the ``bucket`` attribute has been
+ added to the iterator after being created.
+
+ :type iterator: :class:`~google.api_core.page_iterator.Iterator`
+ :param iterator: The iterator that has retrieved the item.
+
+ :type item: dict
+ :param item: An item to be converted to a blob.
+
+ :rtype: :class:`.Blob`
+ :returns: The next blob in the page.
+ """
+ name = item.get("name")
+ blob = Blob(name, bucket=iterator.bucket)
+ blob._set_properties(item)
+ return blob
+
+
+def _item_to_notification(iterator, item):
+ """Convert a JSON blob to the native object.
+
+ .. note::
+
+ This assumes that the ``bucket`` attribute has been
+ added to the iterator after being created.
+
+ :type iterator: :class:`~google.api_core.page_iterator.Iterator`
+ :param iterator: The iterator that has retrieved the item.
+
+ :type item: dict
+ :param item: An item to be converted to a blob.
+
+ :rtype: :class:`.BucketNotification`
+ :returns: The next notification being iterated.
+ """
+ return BucketNotification.from_api_repr(item, bucket=iterator.bucket)
+
+
+class LifecycleRuleConditions(dict):
+ """Map a single lifecycle rule for a bucket.
+
+ See: https://cloud.google.com/storage/docs/lifecycle
+
+ :type age: int
+ :param age: (Optional) Apply rule action to items whose age, in days,
+ exceeds this value.
+
+ :type created_before: datetime.date
+ :param created_before: (Optional) Apply rule action to items created
+ before this date.
+
+ :type is_live: bool
+ :param is_live: (Optional) If true, apply rule action to non-versioned
+ items, or to items with no newer versions. If false, apply
+ rule action to versioned items with at least one newer
+ version.
+
+ :type matches_storage_class: list(str), one or more of
+ :attr:`Bucket.STORAGE_CLASSES`.
+ :param matches_storage_class: (Optional) Apply rule action to items which
+ whose storage class matches this value.
+
+ :type number_of_newer_versions: int
+ :param number_of_newer_versions: (Optional) Apply rule action to versioned
+ items having N newer versions.
+
+ :type days_since_custom_time: int
+ :param days_since_custom_time: (Optional) Apply rule action to items whose number of days
+ elapsed since the custom timestamp. This condition is relevant
+ only for versioned objects. The value of the field must be a non
+ negative integer. If it's zero, the object version will become
+ eligible for lifecycle action as soon as it becomes custom.
+
+ :type custom_time_before: :class:`datetime.date`
+ :param custom_time_before: (Optional) Date object parsed from RFC3339 valid date, apply rule action
+ to items whose custom time is before this date. This condition is relevant
+ only for versioned objects, e.g., 2019-03-16.
+
+ :type days_since_noncurrent_time: int
+ :param days_since_noncurrent_time: (Optional) Apply rule action to items whose number of days
+ elapsed since the non current timestamp. This condition
+ is relevant only for versioned objects. The value of the field
+ must be a non negative integer. If it's zero, the object version
+ will become eligible for lifecycle action as soon as it becomes
+ non current.
+
+ :type noncurrent_time_before: :class:`datetime.date`
+ :param noncurrent_time_before: (Optional) Date object parsed from RFC3339 valid date, apply
+ rule action to items whose non current time is before this date.
+ This condition is relevant only for versioned objects, e.g, 2019-03-16.
+
+ :raises ValueError: if no arguments are passed.
+ """
+
+ def __init__(
+ self,
+ age=None,
+ created_before=None,
+ is_live=None,
+ matches_storage_class=None,
+ number_of_newer_versions=None,
+ days_since_custom_time=None,
+ custom_time_before=None,
+ days_since_noncurrent_time=None,
+ noncurrent_time_before=None,
+ _factory=False,
+ ):
+ conditions = {}
+
+ if age is not None:
+ conditions["age"] = age
+
+ if created_before is not None:
+ conditions["createdBefore"] = created_before.isoformat()
+
+ if is_live is not None:
+ conditions["isLive"] = is_live
+
+ if matches_storage_class is not None:
+ conditions["matchesStorageClass"] = matches_storage_class
+
+ if number_of_newer_versions is not None:
+ conditions["numNewerVersions"] = number_of_newer_versions
+
+ if days_since_custom_time is not None:
+ conditions["daysSinceCustomTime"] = days_since_custom_time
+
+ if custom_time_before is not None:
+ conditions["customTimeBefore"] = custom_time_before.isoformat()
+
+ if not _factory and not conditions:
+ raise ValueError("Supply at least one condition")
+
+ if days_since_noncurrent_time is not None:
+ conditions["daysSinceNoncurrentTime"] = days_since_noncurrent_time
+
+ if noncurrent_time_before is not None:
+ conditions["noncurrentTimeBefore"] = noncurrent_time_before.isoformat()
+
+ super(LifecycleRuleConditions, self).__init__(conditions)
+
+ @classmethod
+ def from_api_repr(cls, resource):
+ """Factory: construct instance from resource.
+
+ :type resource: dict
+ :param resource: mapping as returned from API call.
+
+ :rtype: :class:`LifecycleRuleConditions`
+ :returns: Instance created from resource.
+ """
+ instance = cls(_factory=True)
+ instance.update(resource)
+ return instance
+
+ @property
+ def age(self):
+ """Conditon's age value."""
+ return self.get("age")
+
+ @property
+ def created_before(self):
+ """Conditon's created_before value."""
+ before = self.get("createdBefore")
+ if before is not None:
+ return datetime_helpers.from_iso8601_date(before)
+
+ @property
+ def is_live(self):
+ """Conditon's 'is_live' value."""
+ return self.get("isLive")
+
+ @property
+ def matches_storage_class(self):
+ """Conditon's 'matches_storage_class' value."""
+ return self.get("matchesStorageClass")
+
+ @property
+ def number_of_newer_versions(self):
+ """Conditon's 'number_of_newer_versions' value."""
+ return self.get("numNewerVersions")
+
+ @property
+ def days_since_custom_time(self):
+ """Conditon's 'days_since_custom_time' value."""
+ return self.get("daysSinceCustomTime")
+
+ @property
+ def custom_time_before(self):
+ """Conditon's 'custom_time_before' value."""
+ before = self.get("customTimeBefore")
+ if before is not None:
+ return datetime_helpers.from_iso8601_date(before)
+
+ @property
+ def days_since_noncurrent_time(self):
+ """Conditon's 'days_since_noncurrent_time' value."""
+ return self.get("daysSinceNoncurrentTime")
+
+ @property
+ def noncurrent_time_before(self):
+ """Conditon's 'noncurrent_time_before' value."""
+ before = self.get("noncurrentTimeBefore")
+ if before is not None:
+ return datetime_helpers.from_iso8601_date(before)
+
+
+class LifecycleRuleDelete(dict):
+ """Map a lifecycle rule deleting matching items.
+
+ :type kw: dict
+ :params kw: arguments passed to :class:`LifecycleRuleConditions`.
+ """
+
+ def __init__(self, **kw):
+ conditions = LifecycleRuleConditions(**kw)
+ rule = {"action": {"type": "Delete"}, "condition": dict(conditions)}
+ super(LifecycleRuleDelete, self).__init__(rule)
+
+ @classmethod
+ def from_api_repr(cls, resource):
+ """Factory: construct instance from resource.
+
+ :type resource: dict
+ :param resource: mapping as returned from API call.
+
+ :rtype: :class:`LifecycleRuleDelete`
+ :returns: Instance created from resource.
+ """
+ instance = cls(_factory=True)
+ instance.update(resource)
+ return instance
+
+
+class LifecycleRuleSetStorageClass(dict):
+ """Map a lifecycle rule updating storage class of matching items.
+
+ :type storage_class: str, one of :attr:`Bucket.STORAGE_CLASSES`.
+ :param storage_class: new storage class to assign to matching items.
+
+ :type kw: dict
+ :params kw: arguments passed to :class:`LifecycleRuleConditions`.
+ """
+
+ def __init__(self, storage_class, **kw):
+ conditions = LifecycleRuleConditions(**kw)
+ rule = {
+ "action": {"type": "SetStorageClass", "storageClass": storage_class},
+ "condition": dict(conditions),
+ }
+ super(LifecycleRuleSetStorageClass, self).__init__(rule)
+
+ @classmethod
+ def from_api_repr(cls, resource):
+ """Factory: construct instance from resource.
+
+ :type resource: dict
+ :param resource: mapping as returned from API call.
+
+ :rtype: :class:`LifecycleRuleDelete`
+ :returns: Instance created from resource.
+ """
+ action = resource["action"]
+ instance = cls(action["storageClass"], _factory=True)
+ instance.update(resource)
+ return instance
+
+
+_default = object()
+
+
+class IAMConfiguration(dict):
+ """Map a bucket's IAM configuration.
+
+ :type bucket: :class:`Bucket`
+ :params bucket: Bucket for which this instance is the policy.
+
+ :type uniform_bucket_level_access_enabled: bool
+ :params bucket_policy_only_enabled:
+ (Optional) Whether the IAM-only policy is enabled for the bucket.
+
+ :type uniform_bucket_level_access_locked_time: :class:`datetime.datetime`
+ :params uniform_bucket_level_locked_time:
+ (Optional) When the bucket's IAM-only policy was enabled.
+ This value should normally only be set by the back-end API.
+
+ :type bucket_policy_only_enabled: bool
+ :params bucket_policy_only_enabled:
+ Deprecated alias for :data:`uniform_bucket_level_access_enabled`.
+
+ :type bucket_policy_only_locked_time: :class:`datetime.datetime`
+ :params bucket_policy_only_locked_time:
+ Deprecated alias for :data:`uniform_bucket_level_access_locked_time`.
+ """
+
+ def __init__(
+ self,
+ bucket,
+ uniform_bucket_level_access_enabled=_default,
+ uniform_bucket_level_access_locked_time=_default,
+ bucket_policy_only_enabled=_default,
+ bucket_policy_only_locked_time=_default,
+ ):
+ if bucket_policy_only_enabled is not _default:
+
+ if uniform_bucket_level_access_enabled is not _default:
+ raise ValueError(_UBLA_BPO_ENABLED_MESSAGE)
+
+ warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2)
+ uniform_bucket_level_access_enabled = bucket_policy_only_enabled
+
+ if bucket_policy_only_locked_time is not _default:
+
+ if uniform_bucket_level_access_locked_time is not _default:
+ raise ValueError(_UBLA_BPO_LOCK_TIME_MESSAGE)
+
+ warnings.warn(_BPO_LOCK_TIME_MESSAGE, DeprecationWarning, stacklevel=2)
+ uniform_bucket_level_access_locked_time = bucket_policy_only_locked_time
+
+ if uniform_bucket_level_access_enabled is _default:
+ uniform_bucket_level_access_enabled = False
+
+ data = {
+ "uniformBucketLevelAccess": {"enabled": uniform_bucket_level_access_enabled}
+ }
+ if uniform_bucket_level_access_locked_time is not _default:
+ data["uniformBucketLevelAccess"]["lockedTime"] = _datetime_to_rfc3339(
+ uniform_bucket_level_access_locked_time
+ )
+ super(IAMConfiguration, self).__init__(data)
+ self._bucket = bucket
+
+ @classmethod
+ def from_api_repr(cls, resource, bucket):
+ """Factory: construct instance from resource.
+
+ :type bucket: :class:`Bucket`
+ :params bucket: Bucket for which this instance is the policy.
+
+ :type resource: dict
+ :param resource: mapping as returned from API call.
+
+ :rtype: :class:`IAMConfiguration`
+ :returns: Instance created from resource.
+ """
+ instance = cls(bucket)
+ instance.update(resource)
+ return instance
+
+ @property
+ def bucket(self):
+ """Bucket for which this instance is the policy.
+
+ :rtype: :class:`Bucket`
+ :returns: the instance's bucket.
+ """
+ return self._bucket
+
+ @property
+ def uniform_bucket_level_access_enabled(self):
+ """If set, access checks only use bucket-level IAM policies or above.
+
+ :rtype: bool
+ :returns: whether the bucket is configured to allow only IAM.
+ """
+ ubla = self.get("uniformBucketLevelAccess", {})
+ return ubla.get("enabled", False)
+
+ @uniform_bucket_level_access_enabled.setter
+ def uniform_bucket_level_access_enabled(self, value):
+ ubla = self.setdefault("uniformBucketLevelAccess", {})
+ ubla["enabled"] = bool(value)
+ self.bucket._patch_property("iamConfiguration", self)
+
+ @property
+ def uniform_bucket_level_access_locked_time(self):
+ """Deadline for changing :attr:`uniform_bucket_level_access_enabled` from true to false.
+
+ If the bucket's :attr:`uniform_bucket_level_access_enabled` is true, this property
+ is time time after which that setting becomes immutable.
+
+ If the bucket's :attr:`uniform_bucket_level_access_enabled` is false, this property
+ is ``None``.
+
+ :rtype: Union[:class:`datetime.datetime`, None]
+ :returns: (readonly) Time after which :attr:`uniform_bucket_level_access_enabled` will
+ be frozen as true.
+ """
+ ubla = self.get("uniformBucketLevelAccess", {})
+ stamp = ubla.get("lockedTime")
+ if stamp is not None:
+ stamp = _rfc3339_to_datetime(stamp)
+ return stamp
+
+ @property
+ def bucket_policy_only_enabled(self):
+ """Deprecated alias for :attr:`uniform_bucket_level_access_enabled`.
+
+ :rtype: bool
+ :returns: whether the bucket is configured to allow only IAM.
+ """
+ return self.uniform_bucket_level_access_enabled
+
+ @bucket_policy_only_enabled.setter
+ def bucket_policy_only_enabled(self, value):
+ warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2)
+ self.uniform_bucket_level_access_enabled = value
+
+ @property
+ def bucket_policy_only_locked_time(self):
+ """Deprecated alias for :attr:`uniform_bucket_level_access_locked_time`.
+
+ :rtype: Union[:class:`datetime.datetime`, None]
+ :returns:
+ (readonly) Time after which :attr:`bucket_policy_only_enabled` will
+ be frozen as true.
+ """
+ return self.uniform_bucket_level_access_locked_time
+
+
+class Bucket(_PropertyMixin):
+ """A class representing a Bucket on Cloud Storage.
+
+ :type client: :class:`google.cloud.storage.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the bucket (which requires a project).
+
+ :type name: str
+ :param name: The name of the bucket. Bucket names must start and end with a
+ number or letter.
+
+ :type user_project: str
+ :param user_project: (Optional) the project ID to be billed for API
+ requests made via this instance.
+ """
+
+ _MAX_OBJECTS_FOR_ITERATION = 256
+ """Maximum number of existing objects allowed in iteration.
+
+ This is used in Bucket.delete() and Bucket.make_public().
+ """
+
+ STORAGE_CLASSES = (
+ STANDARD_STORAGE_CLASS,
+ NEARLINE_STORAGE_CLASS,
+ COLDLINE_STORAGE_CLASS,
+ ARCHIVE_STORAGE_CLASS,
+ MULTI_REGIONAL_LEGACY_STORAGE_CLASS, # legacy
+ REGIONAL_LEGACY_STORAGE_CLASS, # legacy
+ DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, # legacy
+ )
+ """Allowed values for :attr:`storage_class`.
+
+ Default value is :attr:`STANDARD_STORAGE_CLASS`.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets#storageClass
+ https://cloud.google.com/storage/docs/storage-classes
+ """
+
+ _LOCATION_TYPES = (
+ MULTI_REGION_LOCATION_TYPE,
+ REGION_LOCATION_TYPE,
+ DUAL_REGION_LOCATION_TYPE,
+ )
+ """Allowed values for :attr:`location_type`."""
+
+ def __init__(self, client, name=None, user_project=None):
+ """
+ property :attr:`name`
+ Get the bucket's name.
+ """
+ name = _validate_name(name)
+ super(Bucket, self).__init__(name=name)
+ self._client = client
+ self._acl = BucketACL(self)
+ self._default_object_acl = DefaultObjectACL(self)
+ self._label_removals = set()
+ self._user_project = user_project
+
+ def __repr__(self):
+ return "" % (self.name,)
+
+ @property
+ def client(self):
+ """The client bound to this bucket."""
+ return self._client
+
+ def _set_properties(self, value):
+ """Set the properties for the current object.
+
+ :type value: dict or :class:`google.cloud.storage.batch._FutureDict`
+ :param value: The properties to be set.
+ """
+ self._label_removals.clear()
+ return super(Bucket, self)._set_properties(value)
+
+ @property
+ def user_project(self):
+ """Project ID to be billed for API requests made via this bucket.
+
+ If unset, API requests are billed to the bucket owner.
+
+ :rtype: str
+ """
+ return self._user_project
+
+ @classmethod
+ def from_string(cls, uri, client=None):
+ """Get a constructor for bucket object by URI.
+
+ :type uri: str
+ :param uri: The bucket uri pass to get bucket object.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use.
+
+ :rtype: :class:`google.cloud.storage.bucket.Bucket`
+ :returns: The bucket object created.
+
+ Example:
+ Get a constructor for bucket object by URI..
+
+ >>> from google.cloud import storage
+ >>> from google.cloud.storage.bucket import Bucket
+ >>> client = storage.Client()
+ >>> bucket = Bucket.from_string("gs://bucket", client)
+ """
+ scheme, netloc, path, query, frag = urlsplit(uri)
+
+ if scheme != "gs":
+ raise ValueError("URI scheme must be gs")
+
+ return cls(client, name=netloc)
+
+ def blob(
+ self,
+ blob_name,
+ chunk_size=None,
+ encryption_key=None,
+ kms_key_name=None,
+ generation=None,
+ ):
+ """Factory constructor for blob object.
+
+ .. note::
+ This will not make an HTTP request; it simply instantiates
+ a blob object owned by this bucket.
+
+ :type blob_name: str
+ :param blob_name: The name of the blob to be instantiated.
+
+ :type chunk_size: int
+ :param chunk_size: The size of a chunk of data whenever iterating
+ (in bytes). This must be a multiple of 256 KB per
+ the API specification.
+
+ :type encryption_key: bytes
+ :param encryption_key:
+ (Optional) 32 byte encryption key for customer-supplied encryption.
+
+ :type kms_key_name: str
+ :param kms_key_name:
+ (Optional) Resource name of KMS key used to encrypt blob's content.
+
+ :type generation: long
+ :param generation: (Optional) If present, selects a specific revision of
+ this object.
+
+ :rtype: :class:`google.cloud.storage.blob.Blob`
+ :returns: The blob object created.
+ """
+ return Blob(
+ name=blob_name,
+ bucket=self,
+ chunk_size=chunk_size,
+ encryption_key=encryption_key,
+ kms_key_name=kms_key_name,
+ generation=generation,
+ )
+
+ def notification(
+ self,
+ topic_name=None,
+ topic_project=None,
+ custom_attributes=None,
+ event_types=None,
+ blob_name_prefix=None,
+ payload_format=NONE_PAYLOAD_FORMAT,
+ notification_id=None,
+ ):
+ """Factory: create a notification resource for the bucket.
+
+ See: :class:`.BucketNotification` for parameters.
+
+ :rtype: :class:`.BucketNotification`
+ """
+ return BucketNotification(
+ self,
+ topic_name=topic_name,
+ topic_project=topic_project,
+ custom_attributes=custom_attributes,
+ event_types=event_types,
+ blob_name_prefix=blob_name_prefix,
+ payload_format=payload_format,
+ notification_id=notification_id,
+ )
+
+ def exists(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Determines whether or not this bucket exists.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :rtype: bool
+ :returns: True if the bucket exists in Cloud Storage.
+ """
+ client = self._require_client(client)
+ # We only need the status code (200 or not) so we seek to
+ # minimize the returned payload.
+ query_params = {"fields": "name"}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ _add_generation_match_parameters(
+ query_params,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ try:
+ # We intentionally pass `_target_object=None` since fields=name
+ # would limit the local properties.
+ client._connection.api_request(
+ method="GET",
+ path=self.path,
+ query_params=query_params,
+ _target_object=None,
+ timeout=timeout,
+ )
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ return True
+ except NotFound:
+ return False
+
+ def create(
+ self,
+ client=None,
+ project=None,
+ location=None,
+ predefined_acl=None,
+ predefined_default_object_acl=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """DEPRECATED. Creates current bucket.
+
+ If the bucket already exists, will raise
+ :class:`google.cloud.exceptions.Conflict`.
+
+ This implements "storage.buckets.insert".
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type project: str
+ :param project: (Optional) The project under which the bucket is to
+ be created. If not passed, uses the project set on
+ the client.
+ :raises ValueError: if :attr:`user_project` is set.
+ :raises ValueError: if ``project`` is None and client's
+ :attr:`project` is also None.
+
+ :type location: str
+ :param location: (Optional) The location of the bucket. If not passed,
+ the default location, US, will be used. See
+ https://cloud.google.com/storage/docs/bucket-locations
+
+ :type predefined_acl: str
+ :param predefined_acl:
+ (Optional) Name of predefined ACL to apply to bucket. See:
+ https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+
+ :type predefined_default_object_acl: str
+ :param predefined_default_object_acl:
+ (Optional) Name of predefined ACL to apply to bucket's objects. See:
+ https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ warnings.warn(
+ "Bucket.create() is deprecated and will be removed in future."
+ "Use Client.create_bucket() instead.",
+ PendingDeprecationWarning,
+ stacklevel=1,
+ )
+ if self.user_project is not None:
+ raise ValueError("Cannot create bucket with 'user_project' set.")
+
+ client = self._require_client(client)
+ client.create_bucket(
+ bucket_or_name=self,
+ project=project,
+ location=location,
+ predefined_acl=predefined_acl,
+ predefined_default_object_acl=predefined_default_object_acl,
+ timeout=timeout,
+ )
+
+ def update(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Sends all properties in a PUT request.
+
+ Updates the ``_properties`` with the response from the backend.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ """
+ super(Bucket, self).update(
+ client=client,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+
+ def reload(
+ self,
+ client=None,
+ projection="noAcl",
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Reload properties from Cloud Storage.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :type projection: str
+ :param projection: (Optional) If used, must be 'full' or 'noAcl'.
+ Defaults to ``'noAcl'``. Specifies the set of
+ properties to return.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ """
+ super(Bucket, self).reload(
+ client=client,
+ projection=projection,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+
+ def patch(
+ self,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Sends all changed properties in a PATCH request.
+
+ Updates the ``_properties`` with the response from the backend.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current object.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ """
+ # Special case: For buckets, it is possible that labels are being
+ # removed; this requires special handling.
+ if self._label_removals:
+ self._changes.add("labels")
+ self._properties.setdefault("labels", {})
+ for removed_label in self._label_removals:
+ self._properties["labels"][removed_label] = None
+
+ # Call the superclass method.
+ super(Bucket, self).patch(
+ client=client,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+
+ @property
+ def acl(self):
+ """Create our ACL on demand."""
+ return self._acl
+
+ @property
+ def default_object_acl(self):
+ """Create our defaultObjectACL on demand."""
+ return self._default_object_acl
+
+ @staticmethod
+ def path_helper(bucket_name):
+ """Relative URL path for a bucket.
+
+ :type bucket_name: str
+ :param bucket_name: The bucket name in the path.
+
+ :rtype: str
+ :returns: The relative URL path for ``bucket_name``.
+ """
+ return "/b/" + bucket_name
+
+ @property
+ def path(self):
+ """The URL path to this bucket."""
+ if not self.name:
+ raise ValueError("Cannot determine path without bucket name.")
+
+ return self.path_helper(self.name)
+
+ def get_blob(
+ self,
+ blob_name,
+ client=None,
+ encryption_key=None,
+ generation=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ **kwargs
+ ):
+ """Get a blob object by name.
+
+ This will return None if the blob doesn't exist:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START get_blob]
+ :end-before: [END get_blob]
+ :dedent: 4
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type blob_name: str
+ :param blob_name: The name of the blob to retrieve.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type encryption_key: bytes
+ :param encryption_key:
+ (Optional) 32 byte encryption key for customer-supplied encryption.
+ See
+ https://cloud.google.com/storage/docs/encryption#customer-supplied.
+
+ :type generation: long
+ :param generation: (Optional) If present, selects a specific revision of
+ this object.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :param kwargs: Keyword arguments to pass to the
+ :class:`~google.cloud.storage.blob.Blob` constructor.
+
+ :rtype: :class:`google.cloud.storage.blob.Blob` or None
+ :returns: The blob object if it exists, otherwise None.
+ """
+ blob = Blob(
+ bucket=self,
+ name=blob_name,
+ encryption_key=encryption_key,
+ generation=generation,
+ **kwargs
+ )
+ try:
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ blob.reload(
+ client=client,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ except NotFound:
+ return None
+ else:
+ return blob
+
+ def list_blobs(
+ self,
+ max_results=None,
+ page_token=None,
+ prefix=None,
+ delimiter=None,
+ start_offset=None,
+ end_offset=None,
+ include_trailing_delimiter=None,
+ versions=None,
+ projection="noAcl",
+ fields=None,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Return an iterator used to find blobs in the bucket.
+
+ .. note::
+ Direct use of this method is deprecated. Use ``Client.list_blobs`` instead.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type max_results: int
+ :param max_results:
+ (Optional) The maximum number of blobs to return.
+
+ :type page_token: str
+ :param page_token:
+ (Optional) If present, return the next batch of blobs, using the
+ value, which must correspond to the ``nextPageToken`` value
+ returned in the previous response. Deprecated: use the ``pages``
+ property of the returned iterator instead of manually passing the
+ token.
+
+ :type prefix: str
+ :param prefix: (Optional) Prefix used to filter blobs.
+
+ :type delimiter: str
+ :param delimiter: (Optional) Delimiter, used with ``prefix`` to
+ emulate hierarchy.
+
+ :type start_offset: str
+ :param start_offset:
+ (Optional) Filter results to objects whose names are
+ lexicographically equal to or after ``startOffset``. If
+ ``endOffset`` is also set, the objects listed will have names
+ between ``startOffset`` (inclusive) and ``endOffset`` (exclusive).
+
+ :type end_offset: str
+ :param end_offset:
+ (Optional) Filter results to objects whose names are
+ lexicographically before ``endOffset``. If ``startOffset`` is also
+ set, the objects listed will have names between ``startOffset``
+ (inclusive) and ``endOffset`` (exclusive).
+
+ :type include_trailing_delimiter: boolean
+ :param include_trailing_delimiter:
+ (Optional) If true, objects that end in exactly one instance of
+ ``delimiter`` will have their metadata included in ``items`` in
+ addition to ``prefixes``.
+
+ :type versions: bool
+ :param versions: (Optional) Whether object versions should be returned
+ as separate blobs.
+
+ :type projection: str
+ :param projection: (Optional) If used, must be 'full' or 'noAcl'.
+ Defaults to ``'noAcl'``. Specifies the set of
+ properties to return.
+
+ :type fields: str
+ :param fields:
+ (Optional) Selector specifying which fields to include
+ in a partial response. Must be a list of fields. For
+ example to get a partial response with just the next
+ page token and the name and language of each blob returned:
+ ``'items(name,contentLanguage),nextPageToken'``.
+ See: https://cloud.google.com/storage/docs/json_api/v1/parameters#fields
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`~google.api_core.page_iterator.Iterator`
+ :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob`
+ in this bucket matching the arguments.
+
+ Example:
+ List blobs in the bucket with user_project.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+
+ >>> bucket = storage.Bucket("my-bucket-name", user_project='my-project')
+ >>> all_blobs = list(bucket.list_blobs())
+ """
+ extra_params = {"projection": projection}
+
+ if prefix is not None:
+ extra_params["prefix"] = prefix
+
+ if delimiter is not None:
+ extra_params["delimiter"] = delimiter
+
+ if start_offset is not None:
+ extra_params["startOffset"] = start_offset
+
+ if end_offset is not None:
+ extra_params["endOffset"] = end_offset
+
+ if include_trailing_delimiter is not None:
+ extra_params["includeTrailingDelimiter"] = include_trailing_delimiter
+
+ if versions is not None:
+ extra_params["versions"] = versions
+
+ if fields is not None:
+ extra_params["fields"] = fields
+
+ if self.user_project is not None:
+ extra_params["userProject"] = self.user_project
+
+ client = self._require_client(client)
+ path = self.path + "/o"
+ api_request = functools.partial(client._connection.api_request, timeout=timeout)
+ iterator = page_iterator.HTTPIterator(
+ client=client,
+ api_request=api_request,
+ path=path,
+ item_to_value=_item_to_blob,
+ page_token=page_token,
+ max_results=max_results,
+ extra_params=extra_params,
+ page_start=_blobs_page_start,
+ )
+ iterator.bucket = self
+ iterator.prefixes = set()
+ return iterator
+
+ def list_notifications(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """List Pub / Sub notifications for this bucket.
+
+ See:
+ https://cloud.google.com/storage/docs/json_api/v1/notifications/list
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: list of :class:`.BucketNotification`
+ :returns: notification instances
+ """
+ client = self._require_client(client)
+ path = self.path + "/notificationConfigs"
+ api_request = functools.partial(client._connection.api_request, timeout=timeout)
+ iterator = page_iterator.HTTPIterator(
+ client=client,
+ api_request=api_request,
+ path=path,
+ item_to_value=_item_to_notification,
+ )
+ iterator.bucket = self
+ return iterator
+
+ def get_notification(self, notification_id, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Get Pub / Sub notification for this bucket.
+
+ See:
+ https://cloud.google.com/storage/docs/json_api/v1/notifications/get
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type notification_id: str
+ :param notification_id: The notification id to retrieve the notification configuration.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`.BucketNotification`
+ :returns: notification instance.
+
+ Example:
+ Get notification using notification id.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket('my-bucket-name') # API request.
+ >>> notification = bucket.get_notification(notification_id='id') # API request.
+
+ """
+ notification = self.notification(notification_id=notification_id)
+ notification.reload(client=client, timeout=timeout)
+ return notification
+
+ def delete(
+ self,
+ force=False,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Delete this bucket.
+
+ The bucket **must** be empty in order to submit a delete request. If
+ ``force=True`` is passed, this will first attempt to delete all the
+ objects / blobs in the bucket (i.e. try to empty the bucket).
+
+ If the bucket doesn't exist, this will raise
+ :class:`google.cloud.exceptions.NotFound`. If the bucket is not empty
+ (and ``force=False``), will raise :class:`google.cloud.exceptions.Conflict`.
+
+ If ``force=True`` and the bucket contains more than 256 objects / blobs
+ this will cowardly refuse to delete the objects (or the bucket). This
+ is to prevent accidental bucket deletion and to prevent extremely long
+ runtime of this method.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type force: bool
+ :param force: If True, empties the bucket's objects then deletes it.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response on each request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
+ contains more than 256 objects / blobs.
+ """
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ _add_generation_match_parameters(
+ query_params,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ if force:
+ blobs = list(
+ self.list_blobs(
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
+ )
+ )
+ if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
+ message = (
+ "Refusing to delete bucket with more than "
+ "%d objects. If you actually want to delete "
+ "this bucket, please delete the objects "
+ "yourself before calling Bucket.delete()."
+ ) % (self._MAX_OBJECTS_FOR_ITERATION,)
+ raise ValueError(message)
+
+ # Ignore 404 errors on delete.
+ self.delete_blobs(
+ blobs, on_error=lambda blob: None, client=client, timeout=timeout
+ )
+
+ # We intentionally pass `_target_object=None` since a DELETE
+ # request has no response value (whether in a standard request or
+ # in a batch request).
+ client._connection.api_request(
+ method="DELETE",
+ path=self.path,
+ query_params=query_params,
+ _target_object=None,
+ timeout=timeout,
+ )
+
+ def delete_blob(
+ self,
+ blob_name,
+ client=None,
+ generation=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Deletes a blob from the current bucket.
+
+ If the blob isn't found (backend 404), raises a
+ :class:`google.cloud.exceptions.NotFound`.
+
+ For example:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START delete_blob]
+ :end-before: [END delete_blob]
+ :dedent: 4
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type blob_name: str
+ :param blob_name: A blob name to delete.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type generation: long
+ :param generation: (Optional) If present, permanently deletes a specific
+ revision of this object.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :raises: :class:`google.cloud.exceptions.NotFound` (to suppress
+ the exception, call ``delete_blobs``, passing a no-op
+ ``on_error`` callback, e.g.:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START delete_blobs]
+ :end-before: [END delete_blobs]
+ :dedent: 4
+
+ """
+ client = self._require_client(client)
+ blob = Blob(blob_name, bucket=self, generation=generation)
+
+ query_params = copy.deepcopy(blob._query_params)
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ # We intentionally pass `_target_object=None` since a DELETE
+ # request has no response value (whether in a standard request or
+ # in a batch request).
+ client._connection.api_request(
+ method="DELETE",
+ path=blob.path,
+ query_params=query_params,
+ _target_object=None,
+ timeout=timeout,
+ )
+
+ def delete_blobs(
+ self,
+ blobs,
+ on_error=None,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Deletes a list of blobs from the current bucket.
+
+ Uses :meth:`delete_blob` to delete each individual blob.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type blobs: list
+ :param blobs: A list of :class:`~google.cloud.storage.blob.Blob`-s or
+ blob names to delete.
+
+ :type on_error: callable
+ :param on_error: (Optional) Takes single argument: ``blob``. Called
+ called once for each blob raising
+ :class:`~google.cloud.exceptions.NotFound`;
+ otherwise, the exception is propagated.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each individual
+ blob delete request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: list of long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob. The list must match
+ ``blobs`` item-to-item.
+
+ :type if_generation_not_match: list of long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob. The list must match
+ ``blobs`` item-to-item.
+
+ :type if_metageneration_match: list of long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+ The list must match ``blobs`` item-to-item.
+
+ :type if_metageneration_not_match: list of long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ The list must match ``blobs`` item-to-item.
+
+ :raises: :class:`~google.cloud.exceptions.NotFound` (if
+ `on_error` is not passed).
+
+ Example:
+ Delete blobs using generation match preconditions.
+
+ >>> from google.cloud import storage
+
+ >>> client = storage.Client()
+ >>> bucket = client.bucket("bucket-name")
+
+ >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")]
+ >>> if_generation_match = [None] * len(blobs)
+ >>> if_generation_match[0] = "123" # precondition for "blob-name-1"
+
+ >>> bucket.delete_blobs(blobs, if_generation_match=if_generation_match)
+ """
+ _raise_if_len_differs(
+ len(blobs),
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ if_generation_match = iter(if_generation_match or [])
+ if_generation_not_match = iter(if_generation_not_match or [])
+ if_metageneration_match = iter(if_metageneration_match or [])
+ if_metageneration_not_match = iter(if_metageneration_not_match or [])
+
+ for blob in blobs:
+ try:
+ blob_name = blob
+ if not isinstance(blob_name, six.string_types):
+ blob_name = blob.name
+ self.delete_blob(
+ blob_name,
+ client=client,
+ timeout=timeout,
+ if_generation_match=next(if_generation_match, None),
+ if_generation_not_match=next(if_generation_not_match, None),
+ if_metageneration_match=next(if_metageneration_match, None),
+ if_metageneration_not_match=next(if_metageneration_not_match, None),
+ )
+ except NotFound:
+ if on_error is not None:
+ on_error(blob)
+ else:
+ raise
+
+ def copy_blob(
+ self,
+ blob,
+ destination_bucket,
+ new_name=None,
+ client=None,
+ preserve_acl=True,
+ source_generation=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ if_source_generation_match=None,
+ if_source_generation_not_match=None,
+ if_source_metageneration_match=None,
+ if_source_metageneration_not_match=None,
+ ):
+ """Copy the given blob to the given bucket, optionally with a new name.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type blob: :class:`google.cloud.storage.blob.Blob`
+ :param blob: The blob to be copied.
+
+ :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket`
+ :param destination_bucket: The bucket into which the blob should be
+ copied.
+
+ :type new_name: str
+ :param new_name: (Optional) The new name for the copied file.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type preserve_acl: bool
+ :param preserve_acl: DEPRECATED. This argument is not functional!
+ (Optional) Copies ACL from old blob to new blob.
+ Default: True.
+
+ :type source_generation: long
+ :param source_generation: (Optional) The generation of the blob to be
+ copied.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Makes the operation
+ conditional on whether the destination
+ object's current generation matches the
+ given value. Setting to 0 makes the
+ operation succeed only if there are no
+ live versions of the object.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ generation does not match the given
+ value. If no live object exists,
+ the precondition fails. Setting to
+ 0 makes the operation succeed only
+ if there is a live version
+ of the object.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration matches the given
+ value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration does not match
+ the given value.
+
+ :type if_source_generation_match: long
+ :param if_source_generation_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation matches the
+ given value.
+
+ :type if_source_generation_not_match: long
+ :param if_source_generation_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation does not match
+ the given value.
+
+ :type if_source_metageneration_match: long
+ :param if_source_metageneration_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ matches the given value.
+
+ :type if_source_metageneration_not_match: long
+ :param if_source_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ does not match the given value.
+
+ :rtype: :class:`google.cloud.storage.blob.Blob`
+ :returns: The new Blob.
+
+ Example:
+ Copy a blob including ACL.
+
+ >>> from google.cloud import storage
+
+ >>> client = storage.Client(project="project")
+
+ >>> bucket = client.bucket("bucket")
+ >>> dst_bucket = client.bucket("destination-bucket")
+
+ >>> blob = bucket.blob("file.ext")
+ >>> new_blob = bucket.copy_blob(blob, dst_bucket)
+ >>> new_blob.acl.save(blob.acl)
+ """
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ if source_generation is not None:
+ query_params["sourceGeneration"] = source_generation
+
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
+
+ if new_name is None:
+ new_name = blob.name
+
+ new_blob = Blob(bucket=destination_bucket, name=new_name)
+ api_path = blob.path + "/copyTo" + new_blob.path
+ copy_result = client._connection.api_request(
+ method="POST",
+ path=api_path,
+ query_params=query_params,
+ _target_object=new_blob,
+ timeout=timeout,
+ )
+
+ if not preserve_acl:
+ new_blob.acl.save(acl={}, client=client, timeout=timeout)
+
+ new_blob._set_properties(copy_result)
+ return new_blob
+
+ def rename_blob(
+ self,
+ blob,
+ new_name,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ if_source_generation_match=None,
+ if_source_generation_not_match=None,
+ if_source_metageneration_match=None,
+ if_source_metageneration_not_match=None,
+ ):
+ """Rename the given blob using copy and delete operations.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ Effectively, copies blob to the same bucket with a new name, then
+ deletes the blob.
+
+ .. warning::
+
+ This method will first duplicate the data and then delete the
+ old blob. This means that with very large objects renaming
+ could be a very (temporarily) costly or a very slow operation.
+
+ :type blob: :class:`google.cloud.storage.blob.Blob`
+ :param blob: The blob to be renamed.
+
+ :type new_name: str
+ :param new_name: The new name for this blob.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each individual
+ request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_generation_match: long
+ :param if_generation_match: (Optional) Makes the operation
+ conditional on whether the destination
+ object's current generation matches the
+ given value. Setting to 0 makes the
+ operation succeed only if there are no
+ live versions of the object.
+
+ :type if_generation_not_match: long
+ :param if_generation_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ generation does not match the given
+ value. If no live object exists,
+ the precondition fails. Setting to
+ 0 makes the operation succeed only
+ if there is a live version
+ of the object.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration matches the given
+ value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the
+ destination object's current
+ metageneration does not match
+ the given value.
+
+ :type if_source_generation_match: long
+ :param if_source_generation_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation matches the
+ given value.
+
+ :type if_source_generation_not_match: long
+ :param if_source_generation_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's generation does not match
+ the given value.
+
+ :type if_source_metageneration_match: long
+ :param if_source_metageneration_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ matches the given value.
+
+ :type if_source_metageneration_not_match: long
+ :param if_source_metageneration_not_match: (Optional) Makes the operation
+ conditional on whether the source
+ object's current metageneration
+ does not match the given value.
+
+ :rtype: :class:`Blob`
+ :returns: The newly-renamed blob.
+ """
+ same_name = blob.name == new_name
+
+ new_blob = self.copy_blob(
+ blob,
+ self,
+ new_name,
+ client=client,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
+
+ if not same_name:
+ blob.delete(
+ client=client,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ return new_blob
+
+ @property
+ def cors(self):
+ """Retrieve or set CORS policies configured for this bucket.
+
+ See http://www.w3.org/TR/cors/ and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ .. note::
+
+ The getter for this property returns a list which contains
+ *copies* of the bucket's CORS policy mappings. Mutating the list
+ or one of its dicts has no effect unless you then re-assign the
+ dict via the setter. E.g.:
+
+ >>> policies = bucket.cors
+ >>> policies.append({'origin': '/foo', ...})
+ >>> policies[1]['maxAgeSeconds'] = 3600
+ >>> del policies[0]
+ >>> bucket.cors = policies
+ >>> bucket.update()
+
+ :setter: Set CORS policies for this bucket.
+ :getter: Gets the CORS policies for this bucket.
+
+ :rtype: list of dictionaries
+ :returns: A sequence of mappings describing each CORS policy.
+ """
+ return [copy.deepcopy(policy) for policy in self._properties.get("cors", ())]
+
+ @cors.setter
+ def cors(self, entries):
+ """Set CORS policies configured for this bucket.
+
+ See http://www.w3.org/TR/cors/ and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :type entries: list of dictionaries
+ :param entries: A sequence of mappings describing each CORS policy.
+ """
+ self._patch_property("cors", entries)
+
+ default_event_based_hold = _scalar_property("defaultEventBasedHold")
+ """Are uploaded objects automatically placed under an even-based hold?
+
+ If True, uploaded objects will be placed under an event-based hold to
+ be released at a future time. When released an object will then begin
+ the retention period determined by the policy retention period for the
+ object bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: bool or ``NoneType``
+ """
+
+ @property
+ def default_kms_key_name(self):
+ """Retrieve / set default KMS encryption key for objects in the bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :setter: Set default KMS encryption key for items in this bucket.
+ :getter: Get default KMS encryption key for items in this bucket.
+
+ :rtype: str
+ :returns: Default KMS encryption key, or ``None`` if not set.
+ """
+ encryption_config = self._properties.get("encryption", {})
+ return encryption_config.get("defaultKmsKeyName")
+
+ @default_kms_key_name.setter
+ def default_kms_key_name(self, value):
+ """Set default KMS encryption key for objects in the bucket.
+
+ :type value: str or None
+ :param value: new KMS key name (None to clear any existing key).
+ """
+ encryption_config = self._properties.get("encryption", {})
+ encryption_config["defaultKmsKeyName"] = value
+ self._patch_property("encryption", encryption_config)
+
+ @property
+ def labels(self):
+ """Retrieve or set labels assigned to this bucket.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets#labels
+
+ .. note::
+
+ The getter for this property returns a dict which is a *copy*
+ of the bucket's labels. Mutating that dict has no effect unless
+ you then re-assign the dict via the setter. E.g.:
+
+ >>> labels = bucket.labels
+ >>> labels['new_key'] = 'some-label'
+ >>> del labels['old_key']
+ >>> bucket.labels = labels
+ >>> bucket.update()
+
+ :setter: Set labels for this bucket.
+ :getter: Gets the labels for this bucket.
+
+ :rtype: :class:`dict`
+ :returns: Name-value pairs (string->string) labelling the bucket.
+ """
+ labels = self._properties.get("labels")
+ if labels is None:
+ return {}
+ return copy.deepcopy(labels)
+
+ @labels.setter
+ def labels(self, mapping):
+ """Set labels assigned to this bucket.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets#labels
+
+ :type mapping: :class:`dict`
+ :param mapping: Name-value pairs (string->string) labelling the bucket.
+ """
+ # If any labels have been expressly removed, we need to track this
+ # so that a future .patch() call can do the correct thing.
+ existing = set([k for k in self.labels.keys()])
+ incoming = set([k for k in mapping.keys()])
+ self._label_removals = self._label_removals.union(existing.difference(incoming))
+ mapping = {k: str(v) for k, v in mapping.items()}
+
+ # Actually update the labels on the object.
+ self._patch_property("labels", copy.deepcopy(mapping))
+
+ @property
+ def etag(self):
+ """Retrieve the ETag for the bucket.
+
+ See https://tools.ietf.org/html/rfc2616#section-3.11 and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: str or ``NoneType``
+ :returns: The bucket etag or ``None`` if the bucket's
+ resource has not been loaded from the server.
+ """
+ return self._properties.get("etag")
+
+ @property
+ def id(self):
+ """Retrieve the ID for the bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: str or ``NoneType``
+ :returns: The ID of the bucket or ``None`` if the bucket's
+ resource has not been loaded from the server.
+ """
+ return self._properties.get("id")
+
+ @property
+ def iam_configuration(self):
+ """Retrieve IAM configuration for this bucket.
+
+ :rtype: :class:`IAMConfiguration`
+ :returns: an instance for managing the bucket's IAM configuration.
+ """
+ info = self._properties.get("iamConfiguration", {})
+ return IAMConfiguration.from_api_repr(info, self)
+
+ @property
+ def lifecycle_rules(self):
+ """Retrieve or set lifecycle rules configured for this bucket.
+
+ See https://cloud.google.com/storage/docs/lifecycle and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ .. note::
+
+ The getter for this property returns a list which contains
+ *copies* of the bucket's lifecycle rules mappings. Mutating the
+ list or one of its dicts has no effect unless you then re-assign
+ the dict via the setter. E.g.:
+
+ >>> rules = bucket.lifecycle_rules
+ >>> rules.append({'origin': '/foo', ...})
+ >>> rules[1]['rule']['action']['type'] = 'Delete'
+ >>> del rules[0]
+ >>> bucket.lifecycle_rules = rules
+ >>> bucket.update()
+
+ :setter: Set lifestyle rules for this bucket.
+ :getter: Gets the lifestyle rules for this bucket.
+
+ :rtype: generator(dict)
+ :returns: A sequence of mappings describing each lifecycle rule.
+ """
+ info = self._properties.get("lifecycle", {})
+ for rule in info.get("rule", ()):
+ action_type = rule["action"]["type"]
+ if action_type == "Delete":
+ yield LifecycleRuleDelete.from_api_repr(rule)
+ elif action_type == "SetStorageClass":
+ yield LifecycleRuleSetStorageClass.from_api_repr(rule)
+ else:
+ raise ValueError("Unknown lifecycle rule: {}".format(rule))
+
+ @lifecycle_rules.setter
+ def lifecycle_rules(self, rules):
+ """Set lifestyle rules configured for this bucket.
+
+ See https://cloud.google.com/storage/docs/lifecycle and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :type rules: list of dictionaries
+ :param rules: A sequence of mappings describing each lifecycle rule.
+ """
+ rules = [dict(rule) for rule in rules] # Convert helpers if needed
+ self._patch_property("lifecycle", {"rule": rules})
+
+ def clear_lifecyle_rules(self):
+ """Set lifestyle rules configured for this bucket.
+
+ See https://cloud.google.com/storage/docs/lifecycle and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+ """
+ self.lifecycle_rules = []
+
+ def add_lifecycle_delete_rule(self, **kw):
+ """Add a "delete" rule to lifestyle rules configured for this bucket.
+
+ See https://cloud.google.com/storage/docs/lifecycle and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ .. literalinclude:: snippets.py
+ :start-after: [START add_lifecycle_delete_rule]
+ :end-before: [END add_lifecycle_delete_rule]
+ :dedent: 4
+
+ :type kw: dict
+ :params kw: arguments passed to :class:`LifecycleRuleConditions`.
+ """
+ rules = list(self.lifecycle_rules)
+ rules.append(LifecycleRuleDelete(**kw))
+ self.lifecycle_rules = rules
+
+ def add_lifecycle_set_storage_class_rule(self, storage_class, **kw):
+ """Add a "delete" rule to lifestyle rules configured for this bucket.
+
+ See https://cloud.google.com/storage/docs/lifecycle and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ .. literalinclude:: snippets.py
+ :start-after: [START add_lifecycle_set_storage_class_rule]
+ :end-before: [END add_lifecycle_set_storage_class_rule]
+ :dedent: 4
+
+ :type storage_class: str, one of :attr:`STORAGE_CLASSES`.
+ :param storage_class: new storage class to assign to matching items.
+
+ :type kw: dict
+ :params kw: arguments passed to :class:`LifecycleRuleConditions`.
+ """
+ rules = list(self.lifecycle_rules)
+ rules.append(LifecycleRuleSetStorageClass(storage_class, **kw))
+ self.lifecycle_rules = rules
+
+ _location = _scalar_property("location")
+
+ @property
+ def location(self):
+ """Retrieve location configured for this bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets and
+ https://cloud.google.com/storage/docs/bucket-locations
+
+ Returns ``None`` if the property has not been set before creation,
+ or if the bucket's resource has not been loaded from the server.
+ :rtype: str or ``NoneType``
+ """
+ return self._location
+
+ @location.setter
+ def location(self, value):
+ """(Deprecated) Set `Bucket.location`
+
+ This can only be set at bucket **creation** time.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets and
+ https://cloud.google.com/storage/docs/bucket-locations
+
+ .. warning::
+
+ Assignment to 'Bucket.location' is deprecated, as it is only
+ valid before the bucket is created. Instead, pass the location
+ to `Bucket.create`.
+ """
+ warnings.warn(_LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2)
+ self._location = value
+
+ @property
+ def location_type(self):
+ """Retrieve or set the location type for the bucket.
+
+ See https://cloud.google.com/storage/docs/storage-classes
+
+ :setter: Set the location type for this bucket.
+ :getter: Gets the the location type for this bucket.
+
+ :rtype: str or ``NoneType``
+ :returns:
+ If set, one of
+ :attr:`~google.cloud.storage.constants.MULTI_REGION_LOCATION_TYPE`,
+ :attr:`~google.cloud.storage.constants.REGION_LOCATION_TYPE`, or
+ :attr:`~google.cloud.storage.constants.DUAL_REGION_LOCATION_TYPE`,
+ else ``None``.
+ """
+ return self._properties.get("locationType")
+
+ def get_logging(self):
+ """Return info about access logging for this bucket.
+
+ See https://cloud.google.com/storage/docs/access-logs#status
+
+ :rtype: dict or None
+ :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
+ (if logging is enabled), or None (if not).
+ """
+ info = self._properties.get("logging")
+ return copy.deepcopy(info)
+
+ def enable_logging(self, bucket_name, object_prefix=""):
+ """Enable access logging for this bucket.
+
+ See https://cloud.google.com/storage/docs/access-logs
+
+ :type bucket_name: str
+ :param bucket_name: name of bucket in which to store access logs
+
+ :type object_prefix: str
+ :param object_prefix: prefix for access log filenames
+ """
+ info = {"logBucket": bucket_name, "logObjectPrefix": object_prefix}
+ self._patch_property("logging", info)
+
+ def disable_logging(self):
+ """Disable access logging for this bucket.
+
+ See https://cloud.google.com/storage/docs/access-logs#disabling
+ """
+ self._patch_property("logging", None)
+
+ @property
+ def metageneration(self):
+ """Retrieve the metageneration for the bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: int or ``NoneType``
+ :returns: The metageneration of the bucket or ``None`` if the bucket's
+ resource has not been loaded from the server.
+ """
+ metageneration = self._properties.get("metageneration")
+ if metageneration is not None:
+ return int(metageneration)
+
+ @property
+ def owner(self):
+ """Retrieve info about the owner of the bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: dict or ``NoneType``
+ :returns: Mapping of owner's role/ID. Returns ``None`` if the bucket's
+ resource has not been loaded from the server.
+ """
+ return copy.deepcopy(self._properties.get("owner"))
+
+ @property
+ def project_number(self):
+ """Retrieve the number of the project to which the bucket is assigned.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: int or ``NoneType``
+ :returns: The project number that owns the bucket or ``None`` if
+ the bucket's resource has not been loaded from the server.
+ """
+ project_number = self._properties.get("projectNumber")
+ if project_number is not None:
+ return int(project_number)
+
+ @property
+ def retention_policy_effective_time(self):
+ """Retrieve the effective time of the bucket's retention policy.
+
+ :rtype: datetime.datetime or ``NoneType``
+ :returns: point-in time at which the bucket's retention policy is
+ effective, or ``None`` if the property is not
+ set locally.
+ """
+ policy = self._properties.get("retentionPolicy")
+ if policy is not None:
+ timestamp = policy.get("effectiveTime")
+ if timestamp is not None:
+ return _rfc3339_to_datetime(timestamp)
+
+ @property
+ def retention_policy_locked(self):
+ """Retrieve whthere the bucket's retention policy is locked.
+
+ :rtype: bool
+ :returns: True if the bucket's policy is locked, or else False
+ if the policy is not locked, or the property is not
+ set locally.
+ """
+ policy = self._properties.get("retentionPolicy")
+ if policy is not None:
+ return policy.get("isLocked")
+
+ @property
+ def retention_period(self):
+ """Retrieve or set the retention period for items in the bucket.
+
+ :rtype: int or ``NoneType``
+ :returns: number of seconds to retain items after upload or release
+ from event-based lock, or ``None`` if the property is not
+ set locally.
+ """
+ policy = self._properties.get("retentionPolicy")
+ if policy is not None:
+ period = policy.get("retentionPeriod")
+ if period is not None:
+ return int(period)
+
+ @retention_period.setter
+ def retention_period(self, value):
+ """Set the retention period for items in the bucket.
+
+ :type value: int
+ :param value:
+ number of seconds to retain items after upload or release from
+ event-based lock.
+
+ :raises ValueError: if the bucket's retention policy is locked.
+ """
+ policy = self._properties.setdefault("retentionPolicy", {})
+ if value is not None:
+ policy["retentionPeriod"] = str(value)
+ else:
+ policy = None
+ self._patch_property("retentionPolicy", policy)
+
+ @property
+ def self_link(self):
+ """Retrieve the URI for the bucket.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: str or ``NoneType``
+ :returns: The self link for the bucket or ``None`` if
+ the bucket's resource has not been loaded from the server.
+ """
+ return self._properties.get("selfLink")
+
+ @property
+ def storage_class(self):
+ """Retrieve or set the storage class for the bucket.
+
+ See https://cloud.google.com/storage/docs/storage-classes
+
+ :setter: Set the storage class for this bucket.
+ :getter: Gets the the storage class for this bucket.
+
+ :rtype: str or ``NoneType``
+ :returns:
+ If set, one of
+ :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`,
+ or
+ :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
+ else ``None``.
+ """
+ return self._properties.get("storageClass")
+
+ @storage_class.setter
+ def storage_class(self, value):
+ """Set the storage class for the bucket.
+
+ See https://cloud.google.com/storage/docs/storage-classes
+
+ :type value: str
+ :param value:
+ One of
+ :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
+ :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`,
+ or
+ :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
+ """
+ if value not in self.STORAGE_CLASSES:
+ raise ValueError("Invalid storage class: %s" % (value,))
+ self._patch_property("storageClass", value)
+
+ @property
+ def time_created(self):
+ """Retrieve the timestamp at which the bucket was created.
+
+ See https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the bucket's resource has not been loaded
+ from the server.
+ """
+ value = self._properties.get("timeCreated")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def versioning_enabled(self):
+ """Is versioning enabled for this bucket?
+
+ See https://cloud.google.com/storage/docs/object-versioning for
+ details.
+
+ :setter: Update whether versioning is enabled for this bucket.
+ :getter: Query whether versioning is enabled for this bucket.
+
+ :rtype: bool
+ :returns: True if enabled, else False.
+ """
+ versioning = self._properties.get("versioning", {})
+ return versioning.get("enabled", False)
+
+ @versioning_enabled.setter
+ def versioning_enabled(self, value):
+ """Enable versioning for this bucket.
+
+ See https://cloud.google.com/storage/docs/object-versioning for
+ details.
+
+ :type value: convertible to boolean
+ :param value: should versioning be enabled for the bucket?
+ """
+ self._patch_property("versioning", {"enabled": bool(value)})
+
+ @property
+ def requester_pays(self):
+ """Does the requester pay for API requests for this bucket?
+
+ See https://cloud.google.com/storage/docs/requester-pays for
+ details.
+
+ :setter: Update whether requester pays for this bucket.
+ :getter: Query whether requester pays for this bucket.
+
+ :rtype: bool
+ :returns: True if requester pays for API requests for the bucket,
+ else False.
+ """
+ versioning = self._properties.get("billing", {})
+ return versioning.get("requesterPays", False)
+
+ @requester_pays.setter
+ def requester_pays(self, value):
+ """Update whether requester pays for API requests for this bucket.
+
+ See https://cloud.google.com/storage/docs/using-requester-pays for
+ details.
+
+ :type value: convertible to boolean
+ :param value: should requester pay for API requests for the bucket?
+ """
+ self._patch_property("billing", {"requesterPays": bool(value)})
+
+ def configure_website(self, main_page_suffix=None, not_found_page=None):
+ """Configure website-related properties.
+
+ See https://cloud.google.com/storage/docs/hosting-static-website
+
+ .. note::
+ This (apparently) only works
+ if your bucket name is a domain name
+ (and to do that, you need to get approved somehow...).
+
+ If you want this bucket to host a website, just provide the name
+ of an index page and a page to use when a blob isn't found:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START configure_website]
+ :end-before: [END configure_website]
+ :dedent: 4
+
+ You probably should also make the whole bucket public:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START make_public]
+ :end-before: [END make_public]
+ :dedent: 4
+
+ This says: "Make the bucket public, and all the stuff already in
+ the bucket, and anything else I add to the bucket. Just make it
+ all public."
+
+ :type main_page_suffix: str
+ :param main_page_suffix: The page to use as the main page
+ of a directory.
+ Typically something like index.html.
+
+ :type not_found_page: str
+ :param not_found_page: The file to use when a page isn't found.
+ """
+ data = {"mainPageSuffix": main_page_suffix, "notFoundPage": not_found_page}
+ self._patch_property("website", data)
+
+ def disable_website(self):
+ """Disable the website configuration for this bucket.
+
+ This is really just a shortcut for setting the website-related
+ attributes to ``None``.
+ """
+ return self.configure_website(None, None)
+
+ def get_iam_policy(
+ self, client=None, requested_policy_version=None, timeout=_DEFAULT_TIMEOUT
+ ):
+ """Retrieve the IAM policy for the bucket.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type requested_policy_version: int or ``NoneType``
+ :param requested_policy_version: (Optional) The version of IAM policies to request.
+ If a policy with a condition is requested without
+ setting this, the server will return an error.
+ This must be set to a value of 3 to retrieve IAM
+ policies containing conditions. This is to prevent
+ client code that isn't aware of IAM conditions from
+ interpreting and modifying policies incorrectly.
+ The service might return a policy with version lower
+ than the one that was requested, based on the
+ feature syntax in the policy fetched.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`google.api_core.iam.Policy`
+ :returns: the policy instance, based on the resource returned from
+ the ``getIamPolicy`` API request.
+
+ Example:
+
+ .. code-block:: python
+
+ from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE
+
+ policy = bucket.get_iam_policy(requested_policy_version=3)
+
+ policy.version = 3
+
+ # Add a binding to the policy via it's bindings property
+ policy.bindings.append({
+ "role": STORAGE_OBJECT_VIEWER_ROLE,
+ "members": {"serviceAccount:account@project.iam.gserviceaccount.com", ...},
+ # Optional:
+ "condition": {
+ "title": "prefix"
+ "description": "Objects matching prefix"
+ "expression": "resource.name.startsWith(\"projects/project-name/buckets/bucket-name/objects/prefix\")"
+ }
+ })
+
+ bucket.set_iam_policy(policy)
+ """
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ if requested_policy_version is not None:
+ query_params["optionsRequestedPolicyVersion"] = requested_policy_version
+
+ info = client._connection.api_request(
+ method="GET",
+ path="%s/iam" % (self.path,),
+ query_params=query_params,
+ _target_object=None,
+ timeout=timeout,
+ )
+ return Policy.from_api_repr(info)
+
+ def set_iam_policy(self, policy, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Update the IAM policy for the bucket.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type policy: :class:`google.api_core.iam.Policy`
+ :param policy: policy instance used to update bucket's IAM policy.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`google.api_core.iam.Policy`
+ :returns: the policy instance, based on the resource returned from
+ the ``setIamPolicy`` API request.
+ """
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ resource = policy.to_api_repr()
+ resource["resourceId"] = self.path
+ info = client._connection.api_request(
+ method="PUT",
+ path="%s/iam" % (self.path,),
+ query_params=query_params,
+ data=resource,
+ _target_object=None,
+ timeout=timeout,
+ )
+ return Policy.from_api_repr(info)
+
+ def test_iam_permissions(self, permissions, client=None, timeout=_DEFAULT_TIMEOUT):
+ """API call: test permissions
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ :type permissions: list of string
+ :param permissions: the permissions to check
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: list of string
+ :returns: the permissions returned by the ``testIamPermissions`` API
+ request.
+ """
+ client = self._require_client(client)
+ query_params = {"permissions": permissions}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = "%s/iam/testPermissions" % (self.path,)
+ resp = client._connection.api_request(
+ method="GET", path=path, query_params=query_params, timeout=timeout
+ )
+ return resp.get("permissions", [])
+
+ def make_public(
+ self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT
+ ):
+ """Update bucket's ACL, granting read access to anonymous users.
+
+ :type recursive: bool
+ :param recursive: If True, this will make all blobs inside the bucket
+ public as well.
+
+ :type future: bool
+ :param future: If True, this will make all objects created in the
+ future public as well.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each underlying
+ request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises ValueError:
+ If ``recursive`` is True, and the bucket contains more than 256
+ blobs. This is to prevent extremely long runtime of this
+ method. For such buckets, iterate over the blobs returned by
+ :meth:`list_blobs` and call
+ :meth:`~google.cloud.storage.blob.Blob.make_public`
+ for each blob.
+ """
+ self.acl.all().grant_read()
+ self.acl.save(client=client, timeout=timeout)
+
+ if future:
+ doa = self.default_object_acl
+ if not doa.loaded:
+ doa.reload(client=client, timeout=timeout)
+ doa.all().grant_read()
+ doa.save(client=client, timeout=timeout)
+
+ if recursive:
+ blobs = list(
+ self.list_blobs(
+ projection="full",
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
+ )
+ )
+ if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
+ message = (
+ "Refusing to make public recursively with more than "
+ "%d objects. If you actually want to make every object "
+ "in this bucket public, iterate through the blobs "
+ "returned by 'Bucket.list_blobs()' and call "
+ "'make_public' on each one."
+ ) % (self._MAX_OBJECTS_FOR_ITERATION,)
+ raise ValueError(message)
+
+ for blob in blobs:
+ blob.acl.all().grant_read()
+ blob.acl.save(client=client, timeout=timeout)
+
+ def make_private(
+ self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT
+ ):
+ """Update bucket's ACL, revoking read access for anonymous users.
+
+ :type recursive: bool
+ :param recursive: If True, this will make all blobs inside the bucket
+ private as well.
+
+ :type future: bool
+ :param future: If True, this will make all objects created in the
+ future private as well.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each underlying
+ request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises ValueError:
+ If ``recursive`` is True, and the bucket contains more than 256
+ blobs. This is to prevent extremely long runtime of this
+ method. For such buckets, iterate over the blobs returned by
+ :meth:`list_blobs` and call
+ :meth:`~google.cloud.storage.blob.Blob.make_private`
+ for each blob.
+ """
+ self.acl.all().revoke_read()
+ self.acl.save(client=client, timeout=timeout)
+
+ if future:
+ doa = self.default_object_acl
+ if not doa.loaded:
+ doa.reload(client=client, timeout=timeout)
+ doa.all().revoke_read()
+ doa.save(client=client, timeout=timeout)
+
+ if recursive:
+ blobs = list(
+ self.list_blobs(
+ projection="full",
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
+ )
+ )
+ if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
+ message = (
+ "Refusing to make private recursively with more than "
+ "%d objects. If you actually want to make every object "
+ "in this bucket private, iterate through the blobs "
+ "returned by 'Bucket.list_blobs()' and call "
+ "'make_private' on each one."
+ ) % (self._MAX_OBJECTS_FOR_ITERATION,)
+ raise ValueError(message)
+
+ for blob in blobs:
+ blob.acl.all().revoke_read()
+ blob.acl.save(client=client, timeout=timeout)
+
+ def generate_upload_policy(self, conditions, expiration=None, client=None):
+ """Create a signed upload policy for uploading objects.
+
+ This method generates and signs a policy document. You can use
+ `policy documents`_ to allow visitors to a website to upload files to
+ Google Cloud Storage without giving them direct write access.
+
+ For example:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START policy_document]
+ :end-before: [END policy_document]
+ :dedent: 4
+
+ .. _policy documents:
+ https://cloud.google.com/storage/docs/xml-api\
+ /post-object#policydocument
+
+ :type expiration: datetime
+ :param expiration: (Optional) Expiration in UTC. If not specified, the
+ policy will expire in 1 hour.
+
+ :type conditions: list
+ :param conditions: A list of conditions as described in the
+ `policy documents`_ documentation.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :rtype: dict
+ :returns: A dictionary of (form field name, form field value) of form
+ fields that should be added to your HTML upload form in order
+ to attach the signature.
+ """
+ client = self._require_client(client)
+ credentials = client._base_connection.credentials
+ _signing.ensure_signed_credentials(credentials)
+
+ if expiration is None:
+ expiration = _NOW() + datetime.timedelta(hours=1)
+
+ conditions = conditions + [{"bucket": self.name}]
+
+ policy_document = {
+ "expiration": _datetime_to_rfc3339(expiration),
+ "conditions": conditions,
+ }
+
+ encoded_policy_document = base64.b64encode(
+ json.dumps(policy_document).encode("utf-8")
+ )
+ signature = base64.b64encode(credentials.sign_bytes(encoded_policy_document))
+
+ fields = {
+ "bucket": self.name,
+ "GoogleAccessId": credentials.signer_email,
+ "policy": encoded_policy_document.decode("utf-8"),
+ "signature": signature.decode("utf-8"),
+ }
+
+ return fields
+
+ def lock_retention_policy(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Lock the bucket's retention policy.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises ValueError:
+ if the bucket has no metageneration (i.e., new or never reloaded);
+ if the bucket has no retention policy assigned;
+ if the bucket's retention policy is already locked.
+ """
+ if "metageneration" not in self._properties:
+ raise ValueError("Bucket has no retention policy assigned: try 'reload'?")
+
+ policy = self._properties.get("retentionPolicy")
+
+ if policy is None:
+ raise ValueError("Bucket has no retention policy assigned: try 'reload'?")
+
+ if policy.get("isLocked"):
+ raise ValueError("Bucket's retention policy is already locked.")
+
+ client = self._require_client(client)
+
+ query_params = {"ifMetagenerationMatch": self.metageneration}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = "/b/{}/lockRetentionPolicy".format(self.name)
+ api_response = client._connection.api_request(
+ method="POST",
+ path=path,
+ query_params=query_params,
+ _target_object=self,
+ timeout=timeout,
+ )
+ self._set_properties(api_response)
+
+ def generate_signed_url(
+ self,
+ expiration=None,
+ api_access_endpoint=_API_ACCESS_ENDPOINT,
+ method="GET",
+ headers=None,
+ query_parameters=None,
+ client=None,
+ credentials=None,
+ version=None,
+ virtual_hosted_style=False,
+ bucket_bound_hostname=None,
+ scheme="http",
+ ):
+ """Generates a signed URL for this bucket.
+
+ .. note::
+
+ If you are on Google Compute Engine, you can't generate a signed
+ URL using GCE service account. Follow `Issue 50`_ for updates on
+ this. If you'd like to be able to generate a signed URL from GCE,
+ you can use a standard service account from a JSON file rather
+ than a GCE service account.
+
+ .. _Issue 50: https://github.com/GoogleCloudPlatform/\
+ google-auth-library-python/issues/50
+
+ If you have a bucket that you want to allow access to for a set
+ amount of time, you can use this method to generate a URL that
+ is only valid within a certain time period.
+
+ If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`,
+ ``https`` works only if using a ``CDN``.
+
+ Example:
+ Generates a signed URL for this bucket using bucket_bound_hostname and scheme.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket('my-bucket-name')
+ >>> url = bucket.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld',
+ >>> version='v4')
+ >>> url = bucket.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld',
+ >>> version='v4',scheme='https') # If using ``CDN``
+
+ This is particularly useful if you don't want publicly
+ accessible buckets, but don't want to require users to explicitly
+ log in.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Point in time when the signed URL should expire. If
+ a ``datetime`` instance is passed without an explicit
+ ``tzinfo`` set, it will be assumed to be ``UTC``.
+
+ :type api_access_endpoint: str
+ :param api_access_endpoint: (Optional) URI base.
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+
+ :type headers: dict
+ :param headers:
+ (Optional) Additional HTTP headers to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers
+ Requests using the signed URL *must* pass the specified header
+ (name and value) with each request for the URL.
+
+ :type query_parameters: dict
+ :param query_parameters:
+ (Optional) Additional query parameters to be included as part of the
+ signed URLs. See:
+ https://cloud.google.com/storage/docs/xml-api/reference-headers#query
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+
+ :type credentials: :class:`google.auth.credentials.Credentials` or
+ :class:`NoneType`
+ :param credentials: The authorization credentials to attach to requests.
+ These credentials identify this application to the service.
+ If none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+
+ :type version: str
+ :param version: (Optional) The version of signed credential to create.
+ Must be one of 'v2' | 'v4'.
+
+ :type virtual_hosted_style: bool
+ :param virtual_hosted_style:
+ (Optional) If true, then construct the URL relative the bucket's
+ virtual hostname, e.g., '.storage.googleapis.com'.
+
+ :type bucket_bound_hostname: str
+ :param bucket_bound_hostname:
+ (Optional) If pass, then construct the URL relative to the bucket-bound hostname.
+ Value cane be a bare or with scheme, e.g., 'example.com' or 'http://example.com'.
+ See: https://cloud.google.com/storage/docs/request-endpoints#cname
+
+ :type scheme: str
+ :param scheme:
+ (Optional) If ``bucket_bound_hostname`` is passed as a bare hostname, use
+ this value as the scheme. ``https`` will work only when using a CDN.
+ Defaults to ``"http"``.
+
+ :raises: :exc:`ValueError` when version is invalid.
+ :raises: :exc:`TypeError` when expiration is not a valid type.
+ :raises: :exc:`AttributeError` if credentials is not an instance
+ of :class:`google.auth.credentials.Signing`.
+
+ :rtype: str
+ :returns: A signed URL you can use to access the resource
+ until expiration.
+ """
+ if version is None:
+ version = "v2"
+ elif version not in ("v2", "v4"):
+ raise ValueError("'version' must be either 'v2' or 'v4'")
+
+ if virtual_hosted_style:
+ api_access_endpoint = "https://{bucket_name}.storage.googleapis.com".format(
+ bucket_name=self.name
+ )
+ elif bucket_bound_hostname:
+ api_access_endpoint = _bucket_bound_hostname_url(
+ bucket_bound_hostname, scheme
+ )
+ else:
+ resource = "/{bucket_name}".format(bucket_name=self.name)
+
+ if virtual_hosted_style or bucket_bound_hostname:
+ resource = "/"
+
+ if credentials is None:
+ client = self._require_client(client)
+ credentials = client._credentials
+
+ if version == "v2":
+ helper = generate_signed_url_v2
+ else:
+ helper = generate_signed_url_v4
+
+ return helper(
+ credentials,
+ resource=resource,
+ expiration=expiration,
+ api_access_endpoint=api_access_endpoint,
+ method=method.upper(),
+ headers=headers,
+ query_parameters=query_parameters,
+ )
+
+
+def _raise_if_len_differs(expected_len, **generation_match_args):
+ """
+ Raise an error if any generation match argument
+ is set and its len differs from the given value.
+
+ :type expected_len: int
+ :param expected_len: Expected argument length in case it's set.
+
+ :type generation_match_args: dict
+ :param generation_match_args: Lists, which length must be checked.
+
+ :raises: :exc:`ValueError` if any argument set, but has an unexpected length.
+ """
+ for name, value in generation_match_args.items():
+ if value is not None and len(value) != expected_len:
+ raise ValueError(
+ "'{}' length must be the same as 'blobs' length".format(name)
+ )
diff --git a/venv/Lib/site-packages/google/cloud/storage/client.py b/venv/Lib/site-packages/google/cloud/storage/client.py
new file mode 100644
index 000000000..fd29abe9c
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/client.py
@@ -0,0 +1,1139 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Storage API."""
+
+import base64
+import binascii
+import collections
+import datetime
+import functools
+import json
+import warnings
+import google.api_core.client_options
+
+from google.auth.credentials import AnonymousCredentials
+
+from google.api_core import page_iterator
+from google.cloud._helpers import _LocalStack, _NOW
+from google.cloud.client import ClientWithProject
+from google.cloud.exceptions import NotFound
+from google.cloud.storage._helpers import _get_storage_host
+from google.cloud.storage._helpers import _bucket_bound_hostname_url
+from google.cloud.storage._http import Connection
+from google.cloud.storage._signing import (
+ get_expiration_seconds_v4,
+ get_v4_now_dtstamps,
+ ensure_signed_credentials,
+ _sign_message,
+)
+from google.cloud.storage.batch import Batch
+from google.cloud.storage.bucket import Bucket
+from google.cloud.storage.blob import Blob
+from google.cloud.storage.hmac_key import HMACKeyMetadata
+from google.cloud.storage.acl import BucketACL
+from google.cloud.storage.acl import DefaultObjectACL
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
+_marker = object()
+
+
+class Client(ClientWithProject):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: str or None
+ :param project: the project which the client acts on behalf of. Will be
+ passed when creating a topic. If not passed,
+ falls back to the default inferred from the environment.
+
+ :type credentials: :class:`~google.auth.credentials.Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ client. If not passed (and if no ``_http`` object is
+ passed), falls back to the default inferred from the
+ environment.
+
+ :type _http: :class:`~requests.Session`
+ :param _http: (Optional) HTTP object to make requests. Can be any object
+ that defines ``request()`` with the same interface as
+ :meth:`requests.Session.request`. If not passed, an
+ ``_http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ This parameter should be considered private, and could
+ change in the future.
+
+ :type client_info: :class:`~google.api_core.client_info.ClientInfo`
+ :param client_info:
+ The client info used to send a user-agent string along with API
+ requests. If ``None``, then default info will be used. Generally,
+ you only need to set this if you're developing your own library
+ or partner tool.
+
+ :type client_options: :class:`~google.api_core.client_options.ClientOptions` or :class:`dict`
+ :param client_options: (Optional) Client options used to set user options on the client.
+ API Endpoint should be set through client_options.
+ """
+
+ SCOPE = (
+ "https://www.googleapis.com/auth/devstorage.full_control",
+ "https://www.googleapis.com/auth/devstorage.read_only",
+ "https://www.googleapis.com/auth/devstorage.read_write",
+ )
+ """The scopes required for authenticating as a Cloud Storage consumer."""
+
+ def __init__(
+ self,
+ project=_marker,
+ credentials=None,
+ _http=None,
+ client_info=None,
+ client_options=None,
+ ):
+ self._base_connection = None
+
+ if project is None:
+ no_project = True
+ project = ""
+ else:
+ no_project = False
+
+ if project is _marker:
+ project = None
+
+ super(Client, self).__init__(
+ project=project,
+ credentials=credentials,
+ client_options=client_options,
+ _http=_http,
+ )
+
+ kw_args = {"client_info": client_info}
+
+ kw_args["api_endpoint"] = _get_storage_host()
+
+ if client_options:
+ if type(client_options) == dict:
+ client_options = google.api_core.client_options.from_dict(
+ client_options
+ )
+ if client_options.api_endpoint:
+ api_endpoint = client_options.api_endpoint
+ kw_args["api_endpoint"] = api_endpoint
+
+ if no_project:
+ self.project = None
+
+ self._connection = Connection(self, **kw_args)
+ self._batch_stack = _LocalStack()
+
+ @classmethod
+ def create_anonymous_client(cls):
+ """Factory: return client with anonymous credentials.
+
+ .. note::
+
+ Such a client has only limited access to "public" buckets:
+ listing their contents and downloading their blobs.
+
+ :rtype: :class:`google.cloud.storage.client.Client`
+ :returns: Instance w/ anonymous credentials and no project.
+ """
+ client = cls(project="", credentials=AnonymousCredentials())
+ client.project = None
+ return client
+
+ @property
+ def _connection(self):
+ """Get connection or batch on the client.
+
+ :rtype: :class:`google.cloud.storage._http.Connection`
+ :returns: The connection set on the client, or the batch
+ if one is set.
+ """
+ if self.current_batch is not None:
+ return self.current_batch
+ else:
+ return self._base_connection
+
+ @_connection.setter
+ def _connection(self, value):
+ """Set connection on the client.
+
+ Intended to be used by constructor (since the base class calls)
+ self._connection = connection
+ Will raise if the connection is set more than once.
+
+ :type value: :class:`google.cloud.storage._http.Connection`
+ :param value: The connection set on the client.
+
+ :raises: :class:`ValueError` if connection has already been set.
+ """
+ if self._base_connection is not None:
+ raise ValueError("Connection already set on client")
+ self._base_connection = value
+
+ def _push_batch(self, batch):
+ """Push a batch onto our stack.
+
+ "Protected", intended for use by batch context mgrs.
+
+ :type batch: :class:`google.cloud.storage.batch.Batch`
+ :param batch: newly-active batch
+ """
+ self._batch_stack.push(batch)
+
+ def _pop_batch(self):
+ """Pop a batch from our stack.
+
+ "Protected", intended for use by batch context mgrs.
+
+ :raises: IndexError if the stack is empty.
+ :rtype: :class:`google.cloud.storage.batch.Batch`
+ :returns: the top-most batch/transaction, after removing it.
+ """
+ return self._batch_stack.pop()
+
+ def _bucket_arg_to_bucket(self, bucket_or_name):
+ """Helper to return given bucket or create new by name.
+
+ Args:
+ bucket_or_name (Union[ \
+ :class:`~google.cloud.storage.bucket.Bucket`, \
+ str, \
+ ]):
+ The bucket resource to pass or name to create.
+
+ Returns:
+ google.cloud.storage.bucket.Bucket
+ The newly created bucket or the given one.
+ """
+ if isinstance(bucket_or_name, Bucket):
+ bucket = bucket_or_name
+ else:
+ bucket = Bucket(self, name=bucket_or_name)
+ return bucket
+
+ @property
+ def current_batch(self):
+ """Currently-active batch.
+
+ :rtype: :class:`google.cloud.storage.batch.Batch` or ``NoneType`` (if
+ no batch is active).
+ :returns: The batch at the top of the batch stack.
+ """
+ return self._batch_stack.top
+
+ def get_service_account_email(self, project=None, timeout=_DEFAULT_TIMEOUT):
+ """Get the email address of the project's GCS service account
+
+ :type project: str
+ :param project:
+ (Optional) Project ID to use for retreiving GCS service account
+ email address. Defaults to the client's project.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: str
+ :returns: service account email address
+ """
+ if project is None:
+ project = self.project
+ path = "/projects/%s/serviceAccount" % (project,)
+ api_response = self._base_connection.api_request(
+ method="GET", path=path, timeout=timeout
+ )
+ return api_response["email_address"]
+
+ def bucket(self, bucket_name, user_project=None):
+ """Factory constructor for bucket object.
+
+ .. note::
+ This will not make an HTTP request; it simply instantiates
+ a bucket object owned by this client.
+
+ :type bucket_name: str
+ :param bucket_name: The name of the bucket to be instantiated.
+
+ :type user_project: str
+ :param user_project: (Optional) The project ID to be billed for API
+ requests made via the bucket.
+
+ :rtype: :class:`google.cloud.storage.bucket.Bucket`
+ :returns: The bucket object created.
+ """
+ return Bucket(client=self, name=bucket_name, user_project=user_project)
+
+ def batch(self):
+ """Factory constructor for batch object.
+
+ .. note::
+ This will not make an HTTP request; it simply instantiates
+ a batch object owned by this client.
+
+ :rtype: :class:`google.cloud.storage.batch.Batch`
+ :returns: The batch object created.
+ """
+ return Batch(client=self)
+
+ def get_bucket(
+ self,
+ bucket_or_name,
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """API call: retrieve a bucket via a GET request.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets/get
+
+ Args:
+ bucket_or_name (Union[ \
+ :class:`~google.cloud.storage.bucket.Bucket`, \
+ str, \
+ ]):
+ The bucket resource to pass or name to create.
+
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time, in seconds, to wait for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ if_metageneration_match (Optional[long]):
+ Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ if_metageneration_not_match (Optional[long]):
+ Make the operation conditional on whether the blob's
+ current metageneration does not match the given value.
+
+ Returns:
+ google.cloud.storage.bucket.Bucket
+ The bucket matching the name provided.
+
+ Raises:
+ google.cloud.exceptions.NotFound
+ If the bucket is not found.
+
+ Examples:
+ Retrieve a bucket using a string.
+
+ .. literalinclude:: snippets.py
+ :start-after: [START get_bucket]
+ :end-before: [END get_bucket]
+ :dedent: 4
+
+ Get a bucket using a resource.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+
+ >>> # Set properties on a plain resource object.
+ >>> bucket = client.get_bucket("my-bucket-name")
+
+ >>> # Time passes. Another program may have modified the bucket
+ ... # in the meantime, so you want to get the latest state.
+ >>> bucket = client.get_bucket(bucket) # API request.
+
+ """
+ bucket = self._bucket_arg_to_bucket(bucket_or_name)
+ bucket.reload(
+ client=self,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ return bucket
+
+ def lookup_bucket(
+ self,
+ bucket_name,
+ timeout=_DEFAULT_TIMEOUT,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
+ """Get a bucket by name, returning None if not found.
+
+ You can use this if you would rather check for a None value
+ than catching an exception:
+
+ .. literalinclude:: snippets.py
+ :start-after: [START lookup_bucket]
+ :end-before: [END lookup_bucket]
+ :dedent: 4
+
+ :type bucket_name: str
+ :param bucket_name: The name of the bucket to get.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type if_metageneration_match: long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+
+ :type if_metageneration_not_match: long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+
+ :rtype: :class:`google.cloud.storage.bucket.Bucket`
+ :returns: The bucket matching the name provided or None if not found.
+ """
+ try:
+ return self.get_bucket(
+ bucket_name,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ except NotFound:
+ return None
+
+ def create_bucket(
+ self,
+ bucket_or_name,
+ requester_pays=None,
+ project=None,
+ user_project=None,
+ location=None,
+ predefined_acl=None,
+ predefined_default_object_acl=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """API call: create a new bucket via a POST request.
+
+ See
+ https://cloud.google.com/storage/docs/json_api/v1/buckets/insert
+
+ Args:
+ bucket_or_name (Union[ \
+ :class:`~google.cloud.storage.bucket.Bucket`, \
+ str, \
+ ]):
+ The bucket resource to pass or name to create.
+ requester_pays (bool):
+ DEPRECATED. Use Bucket().requester_pays instead.
+ (Optional) Whether requester pays for API requests for
+ this bucket and its blobs.
+ project (str):
+ (Optional) The project under which the bucket is to be created.
+ If not passed, uses the project set on the client.
+ user_project (str):
+ (Optional) The project ID to be billed for API requests
+ made via created bucket.
+ location (str):
+ (Optional) The location of the bucket. If not passed,
+ the default location, US, will be used. See
+ https://cloud.google.com/storage/docs/bucket-locations
+ predefined_acl (str):
+ (Optional) Name of predefined ACL to apply to bucket. See:
+ https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+ predefined_default_object_acl (str):
+ (Optional) Name of predefined ACL to apply to bucket's objects. See:
+ https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time, in seconds, to wait for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ google.cloud.storage.bucket.Bucket
+ The newly created bucket.
+
+ Raises:
+ google.cloud.exceptions.Conflict
+ If the bucket already exists.
+
+ Examples:
+ Create a bucket using a string.
+
+ .. literalinclude:: snippets.py
+ :start-after: [START create_bucket]
+ :end-before: [END create_bucket]
+ :dedent: 4
+
+ Create a bucket using a resource.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+
+ >>> # Set properties on a plain resource object.
+ >>> bucket = storage.Bucket("my-bucket-name")
+ >>> bucket.location = "europe-west6"
+ >>> bucket.storage_class = "COLDLINE"
+
+ >>> # Pass that resource object to the client.
+ >>> bucket = client.create_bucket(bucket) # API request.
+
+ """
+ bucket = self._bucket_arg_to_bucket(bucket_or_name)
+
+ if project is None:
+ project = self.project
+
+ if project is None:
+ raise ValueError("Client project not set: pass an explicit project.")
+
+ if requester_pays is not None:
+ warnings.warn(
+ "requester_pays arg is deprecated. Use Bucket().requester_pays instead.",
+ PendingDeprecationWarning,
+ stacklevel=1,
+ )
+ bucket.requester_pays = requester_pays
+
+ query_params = {"project": project}
+
+ if predefined_acl is not None:
+ predefined_acl = BucketACL.validate_predefined(predefined_acl)
+ query_params["predefinedAcl"] = predefined_acl
+
+ if predefined_default_object_acl is not None:
+ predefined_default_object_acl = DefaultObjectACL.validate_predefined(
+ predefined_default_object_acl
+ )
+ query_params["predefinedDefaultObjectAcl"] = predefined_default_object_acl
+
+ if user_project is not None:
+ query_params["userProject"] = user_project
+
+ properties = {key: bucket._properties[key] for key in bucket._changes}
+ properties["name"] = bucket.name
+
+ if location is not None:
+ properties["location"] = location
+
+ api_response = self._connection.api_request(
+ method="POST",
+ path="/b",
+ query_params=query_params,
+ data=properties,
+ _target_object=bucket,
+ timeout=timeout,
+ )
+
+ bucket._set_properties(api_response)
+ return bucket
+
+ def download_blob_to_file(self, blob_or_uri, file_obj, start=None, end=None):
+ """Download the contents of a blob object or blob URI into a file-like object.
+
+ Args:
+ blob_or_uri (Union[ \
+ :class:`~google.cloud.storage.blob.Blob`, \
+ str, \
+ ]):
+ The blob resource to pass or URI to download.
+ file_obj (file):
+ A file handle to which to write the blob's data.
+ start (int):
+ (Optional) The first byte in a range to be downloaded.
+ end (int):
+ (Optional) The last byte in a range to be downloaded.
+
+ Examples:
+ Download a blob using a blob resource.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+
+ >>> bucket = client.get_bucket('my-bucket-name')
+ >>> blob = storage.Blob('path/to/blob', bucket)
+
+ >>> with open('file-to-download-to') as file_obj:
+ >>> client.download_blob_to_file(blob, file_obj) # API request.
+
+
+ Download a blob using a URI.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+
+ >>> with open('file-to-download-to') as file_obj:
+ >>> client.download_blob_to_file(
+ >>> 'gs://bucket_name/path/to/blob', file_obj)
+
+
+ """
+ try:
+ blob_or_uri.download_to_file(file_obj, client=self, start=start, end=end)
+ except AttributeError:
+ blob = Blob.from_string(blob_or_uri)
+ blob.download_to_file(file_obj, client=self, start=start, end=end)
+
+ def list_blobs(
+ self,
+ bucket_or_name,
+ max_results=None,
+ page_token=None,
+ prefix=None,
+ delimiter=None,
+ start_offset=None,
+ end_offset=None,
+ include_trailing_delimiter=None,
+ versions=None,
+ projection="noAcl",
+ fields=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Return an iterator used to find blobs in the bucket.
+
+ If :attr:`user_project` is set, bills the API request to that project.
+
+ Args:
+ bucket_or_name (Union[ \
+ :class:`~google.cloud.storage.bucket.Bucket`, \
+ str, \
+ ]):
+ The bucket resource to pass or name to create.
+
+ max_results (int):
+ (Optional) The maximum number of blobs to return.
+
+ page_token (str):
+ (Optional) If present, return the next batch of blobs, using the
+ value, which must correspond to the ``nextPageToken`` value
+ returned in the previous response. Deprecated: use the ``pages``
+ property of the returned iterator instead of manually passing the
+ token.
+
+ prefix (str):
+ (Optional) Prefix used to filter blobs.
+
+ delimiter (str):
+ (Optional) Delimiter, used with ``prefix`` to
+ emulate hierarchy.
+
+ start_offset (str):
+ (Optional) Filter results to objects whose names are
+ lexicographically equal to or after ``startOffset``. If
+ ``endOffset`` is also set, the objects listed will have names
+ between ``startOffset`` (inclusive) and ``endOffset``
+ (exclusive).
+
+ end_offset (str):
+ (Optional) Filter results to objects whose names are
+ lexicographically before ``endOffset``. If ``startOffset`` is
+ also set, the objects listed will have names between
+ ``startOffset`` (inclusive) and ``endOffset`` (exclusive).
+
+ include_trailing_delimiter (boolean):
+ (Optional) If true, objects that end in exactly one instance of
+ ``delimiter`` will have their metadata included in ``items`` in
+ addition to ``prefixes``.
+
+ versions (bool):
+ (Optional) Whether object versions should be returned
+ as separate blobs.
+
+ projection (str):
+ (Optional) If used, must be 'full' or 'noAcl'.
+ Defaults to ``'noAcl'``. Specifies the set of
+ properties to return.
+
+ fields (str):
+ (Optional) Selector specifying which fields to include
+ in a partial response. Must be a list of fields. For
+ example to get a partial response with just the next
+ page token and the name and language of each blob returned:
+ ``'items(name,contentLanguage),nextPageToken'``.
+ See: https://cloud.google.com/storage/docs/json_api/v1/parameters#fields
+
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time, in seconds, to wait for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ Iterator of all :class:`~google.cloud.storage.blob.Blob`
+ in this bucket matching the arguments.
+
+ Example:
+ List blobs in the bucket with user_project.
+
+ >>> from google.cloud import storage
+ >>> client = storage.Client()
+
+ >>> bucket = storage.Bucket("my-bucket-name", user_project='my-project')
+ >>> all_blobs = list(client.list_blobs(bucket))
+ """
+ bucket = self._bucket_arg_to_bucket(bucket_or_name)
+ return bucket.list_blobs(
+ max_results=max_results,
+ page_token=page_token,
+ prefix=prefix,
+ delimiter=delimiter,
+ start_offset=start_offset,
+ end_offset=end_offset,
+ include_trailing_delimiter=include_trailing_delimiter,
+ versions=versions,
+ projection=projection,
+ fields=fields,
+ client=self,
+ timeout=timeout,
+ )
+
+ def list_buckets(
+ self,
+ max_results=None,
+ page_token=None,
+ prefix=None,
+ projection="noAcl",
+ fields=None,
+ project=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Get all buckets in the project associated to the client.
+
+ This will not populate the list of blobs available in each
+ bucket.
+
+ .. literalinclude:: snippets.py
+ :start-after: [START list_buckets]
+ :end-before: [END list_buckets]
+ :dedent: 4
+
+ This implements "storage.buckets.list".
+
+ :type max_results: int
+ :param max_results: (Optional) The maximum number of buckets to return.
+
+ :type page_token: str
+ :param page_token:
+ (Optional) If present, return the next batch of buckets, using the
+ value, which must correspond to the ``nextPageToken`` value
+ returned in the previous response. Deprecated: use the ``pages``
+ property of the returned iterator instead of manually passing the
+ token.
+
+ :type prefix: str
+ :param prefix: (Optional) Filter results to buckets whose names begin
+ with this prefix.
+
+ :type projection: str
+ :param projection:
+ (Optional) Specifies the set of properties to return. If used, must
+ be 'full' or 'noAcl'. Defaults to 'noAcl'.
+
+ :type fields: str
+ :param fields:
+ (Optional) Selector specifying which fields to include in a partial
+ response. Must be a list of fields. For example to get a partial
+ response with just the next page token and the language of each
+ bucket returned: 'items/id,nextPageToken'
+
+ :type project: str
+ :param project: (Optional) The project whose buckets are to be listed.
+ If not passed, uses the project set on the client.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: :class:`~google.api_core.page_iterator.Iterator`
+ :raises ValueError: if both ``project`` is ``None`` and the client's
+ project is also ``None``.
+ :returns: Iterator of all :class:`~google.cloud.storage.bucket.Bucket`
+ belonging to this project.
+ """
+ if project is None:
+ project = self.project
+
+ if project is None:
+ raise ValueError("Client project not set: pass an explicit project.")
+
+ extra_params = {"project": project}
+
+ if prefix is not None:
+ extra_params["prefix"] = prefix
+
+ extra_params["projection"] = projection
+
+ if fields is not None:
+ extra_params["fields"] = fields
+
+ api_request = functools.partial(self._connection.api_request, timeout=timeout)
+
+ return page_iterator.HTTPIterator(
+ client=self,
+ api_request=api_request,
+ path="/b",
+ item_to_value=_item_to_bucket,
+ page_token=page_token,
+ max_results=max_results,
+ extra_params=extra_params,
+ )
+
+ def create_hmac_key(
+ self,
+ service_account_email,
+ project_id=None,
+ user_project=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """Create an HMAC key for a service account.
+
+ :type service_account_email: str
+ :param service_account_email: e-mail address of the service account
+
+ :type project_id: str
+ :param project_id: (Optional) Explicit project ID for the key.
+ Defaults to the client's project.
+
+ :type user_project: str
+ :param user_project: (Optional) This parameter is currently ignored.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype:
+ Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str]
+ :returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string.
+ """
+ if project_id is None:
+ project_id = self.project
+
+ path = "/projects/{}/hmacKeys".format(project_id)
+ qs_params = {"serviceAccountEmail": service_account_email}
+
+ if user_project is not None:
+ qs_params["userProject"] = user_project
+
+ api_response = self._connection.api_request(
+ method="POST", path=path, query_params=qs_params, timeout=timeout
+ )
+ metadata = HMACKeyMetadata(self)
+ metadata._properties = api_response["metadata"]
+ secret = api_response["secret"]
+ return metadata, secret
+
+ def list_hmac_keys(
+ self,
+ max_results=None,
+ service_account_email=None,
+ show_deleted_keys=None,
+ project_id=None,
+ user_project=None,
+ timeout=_DEFAULT_TIMEOUT,
+ ):
+ """List HMAC keys for a project.
+
+ :type max_results: int
+ :param max_results:
+ (Optional) Max number of keys to return in a given page.
+
+ :type service_account_email: str
+ :param service_account_email:
+ (Optional) Limit keys to those created by the given service account.
+
+ :type show_deleted_keys: bool
+ :param show_deleted_keys:
+ (Optional) Included deleted keys in the list. Default is to
+ exclude them.
+
+ :type project_id: str
+ :param project_id: (Optional) Explicit project ID for the key.
+ Defaults to the client's project.
+
+ :type user_project: str
+ :param user_project: (Optional) This parameter is currently ignored.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype:
+ Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str]
+ :returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string.
+ """
+ if project_id is None:
+ project_id = self.project
+
+ path = "/projects/{}/hmacKeys".format(project_id)
+ extra_params = {}
+
+ if service_account_email is not None:
+ extra_params["serviceAccountEmail"] = service_account_email
+
+ if show_deleted_keys is not None:
+ extra_params["showDeletedKeys"] = show_deleted_keys
+
+ if user_project is not None:
+ extra_params["userProject"] = user_project
+
+ api_request = functools.partial(self._connection.api_request, timeout=timeout)
+
+ return page_iterator.HTTPIterator(
+ client=self,
+ api_request=api_request,
+ path=path,
+ item_to_value=_item_to_hmac_key_metadata,
+ max_results=max_results,
+ extra_params=extra_params,
+ )
+
+ def get_hmac_key_metadata(
+ self, access_id, project_id=None, user_project=None, timeout=_DEFAULT_TIMEOUT
+ ):
+ """Return a metadata instance for the given HMAC key.
+
+ :type access_id: str
+ :param access_id: Unique ID of an existing key.
+
+ :type project_id: str
+ :param project_id: (Optional) Project ID of an existing key.
+ Defaults to client's project.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :type user_project: str
+ :param user_project: (Optional) This parameter is currently ignored.
+ """
+ metadata = HMACKeyMetadata(self, access_id, project_id, user_project)
+ metadata.reload(timeout=timeout) # raises NotFound for missing key
+ return metadata
+
+ def generate_signed_post_policy_v4(
+ self,
+ bucket_name,
+ blob_name,
+ expiration,
+ conditions=None,
+ fields=None,
+ credentials=None,
+ virtual_hosted_style=False,
+ bucket_bound_hostname=None,
+ scheme="http",
+ service_account_email=None,
+ access_token=None,
+ ):
+ """Generate a V4 signed policy object.
+
+ .. note::
+
+ Assumes ``credentials`` implements the
+ :class:`google.auth.credentials.Signing` interface. Also assumes
+ ``credentials`` has a ``service_account_email`` property which
+ identifies the credentials.
+
+ Generated policy object allows user to upload objects with a POST request.
+
+ :type bucket_name: str
+ :param bucket_name: Bucket name.
+
+ :type blob_name: str
+ :param blob_name: Object name.
+
+ :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
+ :param expiration: Policy expiration time. If a ``datetime`` instance is
+ passed without an explicit ``tzinfo`` set, it will be
+ assumed to be ``UTC``.
+
+ :type conditions: list
+ :param conditions: (Optional) List of POST policy conditions, which are
+ used to restrict what is allowed in the request.
+
+ :type fields: dict
+ :param fields: (Optional) Additional elements to include into request.
+
+ :type credentials: :class:`google.auth.credentials.Signing`
+ :param credentials: (Optional) Credentials object with an associated private
+ key to sign text.
+
+ :type virtual_hosted_style: bool
+ :param virtual_hosted_style: (Optional) If True, construct the URL relative to the bucket
+ virtual hostname, e.g., '.storage.googleapis.com'.
+
+ :type bucket_bound_hostname: str
+ :param bucket_bound_hostname:
+ (Optional) If passed, construct the URL relative to the bucket-bound hostname.
+ Value can be bare or with a scheme, e.g., 'example.com' or 'http://example.com'.
+ See: https://cloud.google.com/storage/docs/request-endpoints#cname
+
+ :type scheme: str
+ :param scheme:
+ (Optional) If ``bucket_bound_hostname`` is passed as a bare hostname, use
+ this value as a scheme. ``https`` will work only when using a CDN.
+ Defaults to ``"http"``.
+
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
+ :rtype: dict
+ :returns: Signed POST policy.
+
+ Example:
+ Generate signed POST policy and upload a file.
+
+ >>> from google.cloud import storage
+ >>> import pytz
+ >>> client = storage.Client()
+ >>> tz = pytz.timezone('America/New_York')
+ >>> policy = client.generate_signed_post_policy_v4(
+ "bucket-name",
+ "blob-name",
+ expiration=datetime.datetime(2020, 3, 17, tzinfo=tz),
+ conditions=[
+ ["content-length-range", 0, 255]
+ ],
+ fields=[
+ "x-goog-meta-hello" => "world"
+ ],
+ )
+ >>> with open("bucket-name", "rb") as f:
+ files = {"file": ("bucket-name", f)}
+ requests.post(policy["url"], data=policy["fields"], files=files)
+ """
+ credentials = self._credentials if credentials is None else credentials
+ ensure_signed_credentials(credentials)
+
+ # prepare policy conditions and fields
+ timestamp, datestamp = get_v4_now_dtstamps()
+
+ x_goog_credential = "{email}/{datestamp}/auto/storage/goog4_request".format(
+ email=credentials.signer_email, datestamp=datestamp
+ )
+ required_conditions = [
+ {"bucket": bucket_name},
+ {"key": blob_name},
+ {"x-goog-date": timestamp},
+ {"x-goog-credential": x_goog_credential},
+ {"x-goog-algorithm": "GOOG4-RSA-SHA256"},
+ ]
+
+ conditions = conditions or []
+ policy_fields = {}
+ for key, value in sorted((fields or {}).items()):
+ if not key.startswith("x-ignore-"):
+ policy_fields[key] = value
+ conditions.append({key: value})
+
+ conditions += required_conditions
+
+ # calculate policy expiration time
+ now = _NOW()
+ if expiration is None:
+ expiration = now + datetime.timedelta(hours=1)
+
+ policy_expires = now + datetime.timedelta(
+ seconds=get_expiration_seconds_v4(expiration)
+ )
+
+ # encode policy for signing
+ policy = json.dumps(
+ collections.OrderedDict(
+ sorted(
+ {
+ "conditions": conditions,
+ "expiration": policy_expires.isoformat() + "Z",
+ }.items()
+ )
+ ),
+ separators=(",", ":"),
+ )
+ str_to_sign = base64.b64encode(policy.encode("utf-8"))
+
+ # sign the policy and get its cryptographic signature
+ if access_token and service_account_email:
+ signature = _sign_message(str_to_sign, access_token, service_account_email)
+ signature_bytes = base64.b64decode(signature)
+ else:
+ signature_bytes = credentials.sign_bytes(str_to_sign)
+
+ # get hexadecimal representation of the signature
+ signature = binascii.hexlify(signature_bytes).decode("utf-8")
+
+ policy_fields.update(
+ {
+ "key": blob_name,
+ "x-goog-algorithm": "GOOG4-RSA-SHA256",
+ "x-goog-credential": x_goog_credential,
+ "x-goog-date": timestamp,
+ "x-goog-signature": signature,
+ "policy": str_to_sign,
+ }
+ )
+ # designate URL
+ if virtual_hosted_style:
+ url = "https://{}.storage.googleapis.com/".format(bucket_name)
+ elif bucket_bound_hostname:
+ url = _bucket_bound_hostname_url(bucket_bound_hostname, scheme)
+ else:
+ url = "https://storage.googleapis.com/{}/".format(bucket_name)
+
+ return {"url": url, "fields": policy_fields}
+
+
+def _item_to_bucket(iterator, item):
+ """Convert a JSON bucket to the native object.
+
+ :type iterator: :class:`~google.api_core.page_iterator.Iterator`
+ :param iterator: The iterator that has retrieved the item.
+
+ :type item: dict
+ :param item: An item to be converted to a bucket.
+
+ :rtype: :class:`.Bucket`
+ :returns: The next bucket in the page.
+ """
+ name = item.get("name")
+ bucket = Bucket(iterator.client, name)
+ bucket._set_properties(item)
+ return bucket
+
+
+def _item_to_hmac_key_metadata(iterator, item):
+ """Convert a JSON key metadata resource to the native object.
+
+ :type iterator: :class:`~google.api_core.page_iterator.Iterator`
+ :param iterator: The iterator that has retrieved the item.
+
+ :type item: dict
+ :param item: An item to be converted to a key metadata instance.
+
+ :rtype: :class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`
+ :returns: The next key metadata instance in the page.
+ """
+ metadata = HMACKeyMetadata(iterator.client)
+ metadata._properties = item
+ return metadata
diff --git a/venv/Lib/site-packages/google/cloud/storage/constants.py b/venv/Lib/site-packages/google/cloud/storage/constants.py
new file mode 100644
index 000000000..621508669
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/constants.py
@@ -0,0 +1,98 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Constants used acros google.cloud.storage modules."""
+
+# Storage classes
+
+STANDARD_STORAGE_CLASS = "STANDARD"
+"""Storage class for objects accessed more than once per month.
+
+See: https://cloud.google.com/storage/docs/storage-classes
+"""
+
+NEARLINE_STORAGE_CLASS = "NEARLINE"
+"""Storage class for objects accessed at most once per month.
+
+See: https://cloud.google.com/storage/docs/storage-classes
+"""
+
+COLDLINE_STORAGE_CLASS = "COLDLINE"
+"""Storage class for objects accessed at most once per year.
+
+See: https://cloud.google.com/storage/docs/storage-classes
+"""
+
+ARCHIVE_STORAGE_CLASS = "ARCHIVE"
+"""Storage class for objects accessed less frequently than once per year.
+
+See: https://cloud.google.com/storage/docs/storage-classes
+"""
+
+MULTI_REGIONAL_LEGACY_STORAGE_CLASS = "MULTI_REGIONAL"
+"""Legacy storage class.
+
+Alias for :attr:`STANDARD_STORAGE_CLASS`.
+
+Can only be used for objects in buckets whose
+:attr:`~google.cloud.storage.bucket.Bucket.location_type` is
+:attr:`~google.cloud.storage.bucket.Bucket.MULTI_REGION_LOCATION_TYPE`.
+
+See: https://cloud.google.com/storage/docs/storage-classes
+"""
+
+REGIONAL_LEGACY_STORAGE_CLASS = "REGIONAL"
+"""Legacy storage class.
+
+Alias for :attr:`STANDARD_STORAGE_CLASS`.
+
+Can only be used for objects in buckets whose
+:attr:`~google.cloud.storage.bucket.Bucket.location_type` is
+:attr:`~google.cloud.storage.bucket.Bucket.REGION_LOCATION_TYPE`.
+
+See: https://cloud.google.com/storage/docs/storage-classes
+"""
+
+DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS = "DURABLE_REDUCED_AVAILABILITY"
+"""Legacy storage class.
+
+Similar to :attr:`NEARLINE_STORAGE_CLASS`.
+"""
+
+
+# Location types
+
+MULTI_REGION_LOCATION_TYPE = "multi-region"
+"""Location type: data will be replicated across regions in a multi-region.
+
+Provides highest availability across largest area.
+"""
+
+REGION_LOCATION_TYPE = "region"
+"""Location type: data will be stored within a single region.
+
+Provides lowest latency within a single region.
+"""
+
+DUAL_REGION_LOCATION_TYPE = "dual-region"
+"""Location type: data will be stored within two primary regions.
+
+Provides high availability and low latency across two regions.
+"""
+
+
+# Internal constants
+
+_DEFAULT_TIMEOUT = 60 # in seconds
+"""The default request timeout in seconds if a timeout is not explicitly given.
+"""
diff --git a/venv/Lib/site-packages/google/cloud/storage/hmac_key.py b/venv/Lib/site-packages/google/cloud/storage/hmac_key.py
new file mode 100644
index 000000000..d9c451c68
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/hmac_key.py
@@ -0,0 +1,287 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.cloud.exceptions import NotFound
+from google.cloud._helpers import _rfc3339_to_datetime
+
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
+class HMACKeyMetadata(object):
+ """Metadata about an HMAC service account key withn Cloud Storage.
+
+ :type client: :class:`~google.cloud.stoage.client.Client`
+ :param client: client associated with the key metadata.
+
+ :type access_id: str
+ :param access_id: (Optional) Unique ID of an existing key.
+
+ :type project_id: str
+ :param project_id: (Optional) Project ID of an existing key.
+ Defaults to client's project.
+
+ :type user_project: str
+ :param user_project: (Optional) This parameter is currently ignored.
+ """
+
+ ACTIVE_STATE = "ACTIVE"
+ """Key is active, and may be used to sign requests."""
+ INACTIVE_STATE = "INACTIVE"
+ """Key is inactive, and may not be used to sign requests.
+
+ It can be re-activated via :meth:`update`.
+ """
+ DELETED_STATE = "DELETED"
+ """Key is deleted. It cannot be re-activated."""
+
+ _SETTABLE_STATES = (ACTIVE_STATE, INACTIVE_STATE)
+
+ def __init__(self, client, access_id=None, project_id=None, user_project=None):
+ self._client = client
+ self._properties = {}
+
+ if access_id is not None:
+ self._properties["accessId"] = access_id
+
+ if project_id is not None:
+ self._properties["projectId"] = project_id
+
+ self._user_project = user_project
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._client == other._client and self.access_id == other.access_id
+
+ def __hash__(self):
+ return hash(self._client) + hash(self.access_id)
+
+ @property
+ def access_id(self):
+ """Access ID of the key.
+
+ :rtype: str or None
+ :returns: unique identifier of the key within a project.
+ """
+ return self._properties.get("accessId")
+
+ @property
+ def etag(self):
+ """ETag identifying the version of the key metadata.
+
+ :rtype: str or None
+ :returns: ETag for the version of the key's metadata.
+ """
+ return self._properties.get("etag")
+
+ @property
+ def id(self):
+ """ID of the key, including the Project ID and the Access ID.
+
+ :rtype: str or None
+ :returns: ID of the key.
+ """
+ return self._properties.get("id")
+
+ @property
+ def project(self):
+ """Project ID associated with the key.
+
+ :rtype: str or None
+ :returns: project identfier for the key.
+ """
+ return self._properties.get("projectId")
+
+ @property
+ def service_account_email(self):
+ """Service account e-mail address associated with the key.
+
+ :rtype: str or None
+ :returns: e-mail address for the service account which created the key.
+ """
+ return self._properties.get("serviceAccountEmail")
+
+ @property
+ def state(self):
+ """Get / set key's state.
+
+ One of:
+ - ``ACTIVE``
+ - ``INACTIVE``
+ - ``DELETED``
+
+ :rtype: str or None
+ :returns: key's current state.
+ """
+ return self._properties.get("state")
+
+ @state.setter
+ def state(self, value):
+ if value not in self._SETTABLE_STATES:
+ raise ValueError(
+ "State may only be set to one of: {}".format(
+ ", ".join(self._SETTABLE_STATES)
+ )
+ )
+
+ self._properties["state"] = value
+
+ @property
+ def time_created(self):
+ """Retrieve the timestamp at which the HMAC key was created.
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the bucket's resource has not been loaded
+ from the server.
+ """
+ value = self._properties.get("timeCreated")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def updated(self):
+ """Retrieve the timestamp at which the HMAC key was created.
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the bucket's resource has not been loaded
+ from the server.
+ """
+ value = self._properties.get("updated")
+ if value is not None:
+ return _rfc3339_to_datetime(value)
+
+ @property
+ def path(self):
+ """Resource path for the metadata's key."""
+
+ if self.access_id is None:
+ raise ValueError("No 'access_id' set.")
+
+ project = self.project
+ if project is None:
+ project = self._client.project
+
+ return "/projects/{}/hmacKeys/{}".format(project, self.access_id)
+
+ @property
+ def user_project(self):
+ """Project ID to be billed for API requests made via this bucket.
+
+ This property is currently ignored by the server.
+
+ :rtype: str
+ """
+ return self._user_project
+
+ def exists(self, timeout=_DEFAULT_TIMEOUT):
+ """Determine whether or not the key for this metadata exists.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: bool
+ :returns: True if the key exists in Cloud Storage.
+ """
+ try:
+ qs_params = {}
+
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ self._client._connection.api_request(
+ method="GET", path=self.path, query_params=qs_params, timeout=timeout
+ )
+ except NotFound:
+ return False
+ else:
+ return True
+
+ def reload(self, timeout=_DEFAULT_TIMEOUT):
+ """Reload properties from Cloud Storage.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises :class:`~google.api_core.exceptions.NotFound`:
+ if the key does not exist on the back-end.
+ """
+ qs_params = {}
+
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ self._properties = self._client._connection.api_request(
+ method="GET", path=self.path, query_params=qs_params, timeout=timeout
+ )
+
+ def update(self, timeout=_DEFAULT_TIMEOUT):
+ """Save writable properties to Cloud Storage.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises :class:`~google.api_core.exceptions.NotFound`:
+ if the key does not exist on the back-end.
+ """
+ qs_params = {}
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ payload = {"state": self.state}
+ self._properties = self._client._connection.api_request(
+ method="PUT",
+ path=self.path,
+ data=payload,
+ query_params=qs_params,
+ timeout=timeout,
+ )
+
+ def delete(self, timeout=_DEFAULT_TIMEOUT):
+ """Delete the key from Cloud Storage.
+
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises :class:`~google.api_core.exceptions.NotFound`:
+ if the key does not exist on the back-end.
+ """
+ if self.state != self.INACTIVE_STATE:
+ raise ValueError("Cannot delete key if not in 'INACTIVE' state.")
+
+ qs_params = {}
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ self._client._connection.api_request(
+ method="DELETE", path=self.path, query_params=qs_params, timeout=timeout
+ )
diff --git a/venv/Lib/site-packages/google/cloud/storage/iam.py b/venv/Lib/site-packages/google/cloud/storage/iam.py
new file mode 100644
index 000000000..36c7412b8
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/iam.py
@@ -0,0 +1,86 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Storage API IAM policy definitions
+
+For allowed roles / permissions, see:
+https://cloud.google.com/storage/docs/access-control/iam
+"""
+
+# Storage-specific IAM roles
+
+STORAGE_OBJECT_CREATOR_ROLE = "roles/storage.objectCreator"
+"""Role implying rights to create objects, but not delete or overwrite them."""
+
+STORAGE_OBJECT_VIEWER_ROLE = "roles/storage.objectViewer"
+"""Role implying rights to view object properties, excluding ACLs."""
+
+STORAGE_OBJECT_ADMIN_ROLE = "roles/storage.objectAdmin"
+"""Role implying full control of objects."""
+
+STORAGE_ADMIN_ROLE = "roles/storage.admin"
+"""Role implying full control of objects and buckets."""
+
+STORAGE_VIEWER_ROLE = "Viewer"
+"""Can list buckets."""
+
+STORAGE_EDITOR_ROLE = "Editor"
+"""Can create, list, and delete buckets."""
+
+STORAGE_OWNER_ROLE = "Owners"
+"""Can create, list, and delete buckets."""
+
+
+# Storage-specific permissions
+
+STORAGE_BUCKETS_CREATE = "storage.buckets.create"
+"""Permission: create buckets."""
+
+STORAGE_BUCKETS_DELETE = "storage.buckets.delete"
+"""Permission: delete buckets."""
+
+STORAGE_BUCKETS_GET = "storage.buckets.get"
+"""Permission: read bucket metadata, excluding ACLs."""
+
+STORAGE_BUCKETS_GET_IAM_POLICY = "storage.buckets.getIamPolicy"
+"""Permission: read bucket ACLs."""
+
+STORAGE_BUCKETS_LIST = "storage.buckets.list"
+"""Permission: list buckets."""
+
+STORAGE_BUCKETS_SET_IAM_POLICY = "storage.buckets.setIamPolicy"
+"""Permission: update bucket ACLs."""
+
+STORAGE_BUCKETS_UPDATE = "storage.buckets.list"
+"""Permission: update buckets, excluding ACLS."""
+
+STORAGE_OBJECTS_CREATE = "storage.objects.create"
+"""Permission: add new objects to a bucket."""
+
+STORAGE_OBJECTS_DELETE = "storage.objects.delete"
+"""Permission: delete objects."""
+
+STORAGE_OBJECTS_GET = "storage.objects.get"
+"""Permission: read object data / metadata, excluding ACLs."""
+
+STORAGE_OBJECTS_GET_IAM_POLICY = "storage.objects.getIamPolicy"
+"""Permission: read object ACLs."""
+
+STORAGE_OBJECTS_LIST = "storage.objects.list"
+"""Permission: list objects in a bucket."""
+
+STORAGE_OBJECTS_SET_IAM_POLICY = "storage.objects.setIamPolicy"
+"""Permission: update object ACLs."""
+
+STORAGE_OBJECTS_UPDATE = "storage.objects.update"
+"""Permission: update object metadat, excluding ACLs."""
diff --git a/venv/Lib/site-packages/google/cloud/storage/notification.py b/venv/Lib/site-packages/google/cloud/storage/notification.py
new file mode 100644
index 000000000..434a44dd1
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/storage/notification.py
@@ -0,0 +1,426 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Support for bucket notification resources."""
+
+import re
+
+from google.api_core.exceptions import NotFound
+
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
+OBJECT_FINALIZE_EVENT_TYPE = "OBJECT_FINALIZE"
+OBJECT_METADATA_UPDATE_EVENT_TYPE = "OBJECT_METADATA_UPDATE"
+OBJECT_DELETE_EVENT_TYPE = "OBJECT_DELETE"
+OBJECT_ARCHIVE_EVENT_TYPE = "OBJECT_ARCHIVE"
+
+JSON_API_V1_PAYLOAD_FORMAT = "JSON_API_V1"
+NONE_PAYLOAD_FORMAT = "NONE"
+
+_TOPIC_REF_FMT = "//pubsub.googleapis.com/projects/{}/topics/{}"
+_PROJECT_PATTERN = r"(?P[a-z][a-z0-9-]{4,28}[a-z0-9])"
+_TOPIC_NAME_PATTERN = r"(?P[A-Za-z](\w|[-_.~+%])+)"
+_TOPIC_REF_PATTERN = _TOPIC_REF_FMT.format(_PROJECT_PATTERN, _TOPIC_NAME_PATTERN)
+_TOPIC_REF_RE = re.compile(_TOPIC_REF_PATTERN)
+_BAD_TOPIC = (
+ "Resource has invalid topic: {}; see "
+ "https://cloud.google.com/storage/docs/json_api/v1/"
+ "notifications/insert#topic"
+)
+
+
+class BucketNotification(object):
+ """Represent a single notification resource for a bucket.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/notifications
+
+ :type bucket: :class:`google.cloud.storage.bucket.Bucket`
+ :param bucket: Bucket to which the notification is bound.
+
+ :type topic_name: str
+ :param topic_name:
+ (Optional) Topic name to which notifications are published.
+
+ :type topic_project: str
+ :param topic_project:
+ (Optional) Project ID of topic to which notifications are published.
+ If not passed, uses the project ID of the bucket's client.
+
+ :type custom_attributes: dict
+ :param custom_attributes:
+ (Optional) Additional attributes passed with notification events.
+
+ :type event_types: list(str)
+ :param event_types:
+ (Optional) Event types for which notification events are published.
+
+ :type blob_name_prefix: str
+ :param blob_name_prefix:
+ (Optional) Prefix of blob names for which notification events are
+ published.
+
+ :type payload_format: str
+ :param payload_format:
+ (Optional) Format of payload for notification events.
+
+ :type notification_id: str
+ :param notification_id:
+ (Optional) The ID of the notification.
+ """
+
+ def __init__(
+ self,
+ bucket,
+ topic_name=None,
+ topic_project=None,
+ custom_attributes=None,
+ event_types=None,
+ blob_name_prefix=None,
+ payload_format=NONE_PAYLOAD_FORMAT,
+ notification_id=None,
+ ):
+ self._bucket = bucket
+ self._topic_name = topic_name
+
+ if topic_project is None:
+ topic_project = bucket.client.project
+
+ if topic_project is None:
+ raise ValueError("Client project not set: pass an explicit topic_project.")
+
+ self._topic_project = topic_project
+
+ self._properties = {}
+
+ if custom_attributes is not None:
+ self._properties["custom_attributes"] = custom_attributes
+
+ if event_types is not None:
+ self._properties["event_types"] = event_types
+
+ if blob_name_prefix is not None:
+ self._properties["object_name_prefix"] = blob_name_prefix
+
+ if notification_id is not None:
+ self._properties["id"] = notification_id
+
+ self._properties["payload_format"] = payload_format
+
+ @classmethod
+ def from_api_repr(cls, resource, bucket):
+ """Construct an instance from the JSON repr returned by the server.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/notifications
+
+ :type resource: dict
+ :param resource: JSON repr of the notification
+
+ :type bucket: :class:`google.cloud.storage.bucket.Bucket`
+ :param bucket: Bucket to which the notification is bound.
+
+ :rtype: :class:`BucketNotification`
+ :returns: the new notification instance
+ """
+ topic_path = resource.get("topic")
+ if topic_path is None:
+ raise ValueError("Resource has no topic")
+
+ name, project = _parse_topic_path(topic_path)
+ instance = cls(bucket, name, topic_project=project)
+ instance._properties = resource
+
+ return instance
+
+ @property
+ def bucket(self):
+ """Bucket to which the notification is bound."""
+ return self._bucket
+
+ @property
+ def topic_name(self):
+ """Topic name to which notifications are published."""
+ return self._topic_name
+
+ @property
+ def topic_project(self):
+ """Project ID of topic to which notifications are published.
+ """
+ return self._topic_project
+
+ @property
+ def custom_attributes(self):
+ """Custom attributes passed with notification events.
+ """
+ return self._properties.get("custom_attributes")
+
+ @property
+ def event_types(self):
+ """Event types for which notification events are published.
+ """
+ return self._properties.get("event_types")
+
+ @property
+ def blob_name_prefix(self):
+ """Prefix of blob names for which notification events are published.
+ """
+ return self._properties.get("object_name_prefix")
+
+ @property
+ def payload_format(self):
+ """Format of payload of notification events."""
+ return self._properties.get("payload_format")
+
+ @property
+ def notification_id(self):
+ """Server-set ID of notification resource."""
+ return self._properties.get("id")
+
+ @property
+ def etag(self):
+ """Server-set ETag of notification resource."""
+ return self._properties.get("etag")
+
+ @property
+ def self_link(self):
+ """Server-set ETag of notification resource."""
+ return self._properties.get("selfLink")
+
+ @property
+ def client(self):
+ """The client bound to this notfication."""
+ return self.bucket.client
+
+ @property
+ def path(self):
+ """The URL path for this notification."""
+ return "/b/{}/notificationConfigs/{}".format(
+ self.bucket.name, self.notification_id
+ )
+
+ def _require_client(self, client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: the client to use.
+
+ :rtype: :class:`google.cloud.storage.client.Client`
+ :returns: The client passed in or the bucket's client.
+ """
+ if client is None:
+ client = self.client
+ return client
+
+ def _set_properties(self, response):
+ """Helper for :meth:`reload`.
+
+ :type response: dict
+ :param response: resource mapping from server
+ """
+ self._properties.clear()
+ self._properties.update(response)
+
+ def create(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """API wrapper: create the notification.
+
+ See:
+ https://cloud.google.com/storage/docs/json_api/v1/notifications/insert
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client`
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the notification's bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
+ if self.notification_id is not None:
+ raise ValueError(
+ "Notification already exists w/ id: {}".format(self.notification_id)
+ )
+
+ client = self._require_client(client)
+
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
+
+ path = "/b/{}/notificationConfigs".format(self.bucket.name)
+ properties = self._properties.copy()
+ properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, self.topic_name)
+ self._properties = client._connection.api_request(
+ method="POST",
+ path=path,
+ query_params=query_params,
+ data=properties,
+ timeout=timeout,
+ )
+
+ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Test whether this notification exists.
+
+ See:
+ https://cloud.google.com/storage/docs/json_api/v1/notifications/get
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :rtype: bool
+ :returns: True, if the notification exists, else False.
+ :raises ValueError: if the notification has no ID.
+ """
+ if self.notification_id is None:
+ raise ValueError("Notification not intialized by server")
+
+ client = self._require_client(client)
+
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
+
+ try:
+ client._connection.api_request(
+ method="GET", path=self.path, query_params=query_params, timeout=timeout
+ )
+ except NotFound:
+ return False
+ else:
+ return True
+
+ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Update this notification from the server configuration.
+
+ See:
+ https://cloud.google.com/storage/docs/json_api/v1/notifications/get
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises ValueError: if the notification has no ID.
+ """
+ if self.notification_id is None:
+ raise ValueError("Notification not intialized by server")
+
+ client = self._require_client(client)
+
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
+
+ response = client._connection.api_request(
+ method="GET", path=self.path, query_params=query_params, timeout=timeout
+ )
+ self._set_properties(response)
+
+ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT):
+ """Delete this notification.
+
+ See:
+ https://cloud.google.com/storage/docs/json_api/v1/notifications/delete
+
+ If :attr:`user_project` is set on the bucket, bills the API request
+ to that project.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (Optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ :raises: :class:`google.api_core.exceptions.NotFound`:
+ if the notification does not exist.
+ :raises ValueError: if the notification has no ID.
+ """
+ if self.notification_id is None:
+ raise ValueError("Notification not intialized by server")
+
+ client = self._require_client(client)
+
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
+
+ client._connection.api_request(
+ method="DELETE", path=self.path, query_params=query_params, timeout=timeout
+ )
+
+
+def _parse_topic_path(topic_path):
+ """Verify that a topic path is in the correct format.
+
+ .. _resource manager docs: https://cloud.google.com/resource-manager/\
+ reference/rest/v1beta1/projects#\
+ Project.FIELDS.project_id
+ .. _topic spec: https://cloud.google.com/storage/docs/json_api/v1/\
+ notifications/insert#topic
+
+ Expected to be of the form:
+
+ //pubsub.googleapis.com/projects/{project}/topics/{topic}
+
+ where the ``project`` value must be "6 to 30 lowercase letters, digits,
+ or hyphens. It must start with a letter. Trailing hyphens are prohibited."
+ (see `resource manager docs`_) and ``topic`` must have length at least two,
+ must start with a letter and may only contain alphanumeric characters or
+ ``-``, ``_``, ``.``, ``~``, ``+`` or ``%`` (i.e characters used for URL
+ encoding, see `topic spec`_).
+
+ Args:
+ topic_path (str): The topic path to be verified.
+
+ Returns:
+ Tuple[str, str]: The ``project`` and ``topic`` parsed from the
+ ``topic_path``.
+
+ Raises:
+ ValueError: If the topic path is invalid.
+ """
+ match = _TOPIC_REF_RE.match(topic_path)
+ if match is None:
+ raise ValueError(_BAD_TOPIC.format(topic_path))
+
+ return match.group("name"), match.group("project")
diff --git a/venv/Lib/site-packages/google/cloud/version.py b/venv/Lib/site-packages/google/cloud/version.py
new file mode 100644
index 000000000..1f57e5028
--- /dev/null
+++ b/venv/Lib/site-packages/google/cloud/version.py
@@ -0,0 +1,15 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "1.4.3"
diff --git a/venv/Lib/site-packages/google/oauth2/__init__.py b/venv/Lib/site-packages/google/oauth2/__init__.py
new file mode 100644
index 000000000..4fb71fd1a
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google OAuth 2.0 Library for Python."""
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..6b7547387
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/_client.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/_client.cpython-36.pyc
new file mode 100644
index 000000000..c43914d2d
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/_client.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/_client_async.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/_client_async.cpython-36.pyc
new file mode 100644
index 000000000..0f6faadc0
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/_client_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/_credentials_async.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/_credentials_async.cpython-36.pyc
new file mode 100644
index 000000000..dad413480
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/_credentials_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/_id_token_async.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/_id_token_async.cpython-36.pyc
new file mode 100644
index 000000000..33a5d5fdb
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/_id_token_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/_service_account_async.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/_service_account_async.cpython-36.pyc
new file mode 100644
index 000000000..0dd9f6889
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/_service_account_async.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/credentials.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/credentials.cpython-36.pyc
new file mode 100644
index 000000000..3f9967756
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/credentials.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/id_token.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/id_token.cpython-36.pyc
new file mode 100644
index 000000000..9f3c4d8a4
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/id_token.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/__pycache__/service_account.cpython-36.pyc b/venv/Lib/site-packages/google/oauth2/__pycache__/service_account.cpython-36.pyc
new file mode 100644
index 000000000..b57d6292a
Binary files /dev/null and b/venv/Lib/site-packages/google/oauth2/__pycache__/service_account.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/oauth2/_client.py b/venv/Lib/site-packages/google/oauth2/_client.py
new file mode 100644
index 000000000..448716329
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/_client.py
@@ -0,0 +1,259 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 client.
+
+This is a client for interacting with an OAuth 2.0 authorization server's
+token endpoint.
+
+For more information about the token endpoint, see
+`Section 3.1 of rfc6749`_
+
+.. _Section 3.1 of rfc6749: https://tools.ietf.org/html/rfc6749#section-3.2
+"""
+
+import datetime
+import json
+
+import six
+from six.moves import http_client
+from six.moves import urllib
+
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth import jwt
+
+_URLENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
+_JWT_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
+_REFRESH_GRANT_TYPE = "refresh_token"
+
+
+def _handle_error_response(response_body):
+ """"Translates an error response into an exception.
+
+ Args:
+ response_body (str): The decoded response data.
+
+ Raises:
+ google.auth.exceptions.RefreshError
+ """
+ try:
+ error_data = json.loads(response_body)
+ error_details = "{}: {}".format(
+ error_data["error"], error_data.get("error_description")
+ )
+ # If no details could be extracted, use the response data.
+ except (KeyError, ValueError):
+ error_details = response_body
+
+ raise exceptions.RefreshError(error_details, response_body)
+
+
+def _parse_expiry(response_data):
+ """Parses the expiry field from a response into a datetime.
+
+ Args:
+ response_data (Mapping): The JSON-parsed response data.
+
+ Returns:
+ Optional[datetime]: The expiration or ``None`` if no expiration was
+ specified.
+ """
+ expires_in = response_data.get("expires_in", None)
+
+ if expires_in is not None:
+ return _helpers.utcnow() + datetime.timedelta(seconds=expires_in)
+ else:
+ return None
+
+
+def _token_endpoint_request(request, token_uri, body):
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ body (Mapping[str, str]): The parameters to send in the request body.
+
+ Returns:
+ Mapping[str, str]: The JSON-decoded response data.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = urllib.parse.urlencode(body).encode("utf-8")
+ headers = {"content-type": _URLENCODED_CONTENT_TYPE}
+
+ retry = 0
+ # retry to fetch token for maximum of two times if any internal failure
+ # occurs.
+ while True:
+ response = request(method="POST", url=token_uri, headers=headers, body=body)
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+ response_data = json.loads(response_body)
+
+ if response.status == http_client.OK:
+ break
+ else:
+ error_desc = response_data.get("error_description") or ""
+ error_code = response_data.get("error") or ""
+ if (
+ any(e == "internal_failure" for e in (error_code, error_desc))
+ and retry < 1
+ ):
+ retry += 1
+ continue
+ _handle_error_response(response_body)
+
+ return response_data
+
+
+def jwt_grant(request, token_uri, assertion):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants.
+
+ For more details, see `rfc7523 section 4`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ assertion (str): The OAuth 2.0 assertion.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]: The access token,
+ expiration, and additional data returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
+ """
+ body = {"assertion": assertion, "grant_type": _JWT_GRANT_TYPE}
+
+ response_data = _token_endpoint_request(request, token_uri, body)
+
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
+ six.raise_from(new_exc, caught_exc)
+
+ expiry = _parse_expiry(response_data)
+
+ return access_token, expiry, response_data
+
+
+def id_token_jwt_grant(request, token_uri, assertion):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but
+ requests an OpenID Connect ID Token instead of an access token.
+
+ This is a variant on the standard JWT Profile that is currently unique
+ to Google. This was added for the benefit of authenticating to services
+ that require ID Tokens instead of access tokens or JWT bearer tokens.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorization server's token endpoint
+ URI.
+ assertion (str): JWT token signed by a service account. The token's
+ payload must include a ``target_audience`` claim.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]:
+ The (encoded) Open ID Connect ID Token, expiration, and additional
+ data returned by the endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = {"assertion": assertion, "grant_type": _JWT_GRANT_TYPE}
+
+ response_data = _token_endpoint_request(request, token_uri, body)
+
+ try:
+ id_token = response_data["id_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError("No ID token in response.", response_data)
+ six.raise_from(new_exc, caught_exc)
+
+ payload = jwt.decode(id_token, verify=False)
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
+
+ return id_token, expiry, response_data
+
+
+def refresh_grant(
+ request, token_uri, refresh_token, client_id, client_secret, scopes=None
+):
+ """Implements the OAuth 2.0 refresh token grant.
+
+ For more details, see `rfc678 section 6`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ refresh_token (str): The refresh token to use to get a new access
+ token.
+ client_id (str): The OAuth 2.0 application's client ID.
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
+ scopes must be authorized for the refresh token. Useful if refresh
+ token has a wild card scope (e.g.
+ 'https://www.googleapis.com/auth/any-api').
+
+ Returns:
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The
+ access token, new refresh token, expiration, and additional data
+ returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
+ """
+ body = {
+ "grant_type": _REFRESH_GRANT_TYPE,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "refresh_token": refresh_token,
+ }
+ if scopes:
+ body["scope"] = " ".join(scopes)
+
+ response_data = _token_endpoint_request(request, token_uri, body)
+
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
+ six.raise_from(new_exc, caught_exc)
+
+ refresh_token = response_data.get("refresh_token", refresh_token)
+ expiry = _parse_expiry(response_data)
+
+ return access_token, refresh_token, expiry, response_data
diff --git a/venv/Lib/site-packages/google/oauth2/_client_async.py b/venv/Lib/site-packages/google/oauth2/_client_async.py
new file mode 100644
index 000000000..4817ea40e
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/_client_async.py
@@ -0,0 +1,264 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 async client.
+
+This is a client for interacting with an OAuth 2.0 authorization server's
+token endpoint.
+
+For more information about the token endpoint, see
+`Section 3.1 of rfc6749`_
+
+.. _Section 3.1 of rfc6749: https://tools.ietf.org/html/rfc6749#section-3.2
+"""
+
+import datetime
+import json
+
+import six
+from six.moves import http_client
+from six.moves import urllib
+
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth import jwt
+from google.oauth2 import _client as client
+
+
+def _handle_error_response(response_body):
+ """"Translates an error response into an exception.
+
+ Args:
+ response_body (str): The decoded response data.
+
+ Raises:
+ google.auth.exceptions.RefreshError
+ """
+ try:
+ error_data = json.loads(response_body)
+ error_details = "{}: {}".format(
+ error_data["error"], error_data.get("error_description")
+ )
+ # If no details could be extracted, use the response data.
+ except (KeyError, ValueError):
+ error_details = response_body
+
+ raise exceptions.RefreshError(error_details, response_body)
+
+
+def _parse_expiry(response_data):
+ """Parses the expiry field from a response into a datetime.
+
+ Args:
+ response_data (Mapping): The JSON-parsed response data.
+
+ Returns:
+ Optional[datetime]: The expiration or ``None`` if no expiration was
+ specified.
+ """
+ expires_in = response_data.get("expires_in", None)
+
+ if expires_in is not None:
+ return _helpers.utcnow() + datetime.timedelta(seconds=expires_in)
+ else:
+ return None
+
+
+async def _token_endpoint_request(request, token_uri, body):
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ body (Mapping[str, str]): The parameters to send in the request body.
+
+ Returns:
+ Mapping[str, str]: The JSON-decoded response data.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = urllib.parse.urlencode(body).encode("utf-8")
+ headers = {"content-type": client._URLENCODED_CONTENT_TYPE}
+
+ retry = 0
+ # retry to fetch token for maximum of two times if any internal failure
+ # occurs.
+ while True:
+
+ response = await request(
+ method="POST", url=token_uri, headers=headers, body=body
+ )
+
+ # Using data.read() resulted in zlib decompression errors. This may require future investigation.
+ response_body1 = await response.content()
+
+ response_body = (
+ response_body1.decode("utf-8")
+ if hasattr(response_body1, "decode")
+ else response_body1
+ )
+
+ response_data = json.loads(response_body)
+
+ if response.status == http_client.OK:
+ break
+ else:
+ error_desc = response_data.get("error_description") or ""
+ error_code = response_data.get("error") or ""
+ if (
+ any(e == "internal_failure" for e in (error_code, error_desc))
+ and retry < 1
+ ):
+ retry += 1
+ continue
+ _handle_error_response(response_body)
+
+ return response_data
+
+
+async def jwt_grant(request, token_uri, assertion):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants.
+
+ For more details, see `rfc7523 section 4`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ assertion (str): The OAuth 2.0 assertion.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]: The access token,
+ expiration, and additional data returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
+ """
+ body = {"assertion": assertion, "grant_type": client._JWT_GRANT_TYPE}
+
+ response_data = await _token_endpoint_request(request, token_uri, body)
+
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
+ six.raise_from(new_exc, caught_exc)
+
+ expiry = _parse_expiry(response_data)
+
+ return access_token, expiry, response_data
+
+
+async def id_token_jwt_grant(request, token_uri, assertion):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but
+ requests an OpenID Connect ID Token instead of an access token.
+
+ This is a variant on the standard JWT Profile that is currently unique
+ to Google. This was added for the benefit of authenticating to services
+ that require ID Tokens instead of access tokens or JWT bearer tokens.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorization server's token endpoint
+ URI.
+ assertion (str): JWT token signed by a service account. The token's
+ payload must include a ``target_audience`` claim.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]:
+ The (encoded) Open ID Connect ID Token, expiration, and additional
+ data returned by the endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = {"assertion": assertion, "grant_type": client._JWT_GRANT_TYPE}
+
+ response_data = await _token_endpoint_request(request, token_uri, body)
+
+ try:
+ id_token = response_data["id_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError("No ID token in response.", response_data)
+ six.raise_from(new_exc, caught_exc)
+
+ payload = jwt.decode(id_token, verify=False)
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
+
+ return id_token, expiry, response_data
+
+
+async def refresh_grant(
+ request, token_uri, refresh_token, client_id, client_secret, scopes=None
+):
+ """Implements the OAuth 2.0 refresh token grant.
+
+ For more details, see `rfc678 section 6`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ refresh_token (str): The refresh token to use to get a new access
+ token.
+ client_id (str): The OAuth 2.0 application's client ID.
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
+ scopes must be authorized for the refresh token. Useful if refresh
+ token has a wild card scope (e.g.
+ 'https://www.googleapis.com/auth/any-api').
+
+ Returns:
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The
+ access token, new refresh token, expiration, and additional data
+ returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
+ """
+ body = {
+ "grant_type": client._REFRESH_GRANT_TYPE,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "refresh_token": refresh_token,
+ }
+ if scopes:
+ body["scope"] = " ".join(scopes)
+
+ response_data = await _token_endpoint_request(request, token_uri, body)
+
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
+ six.raise_from(new_exc, caught_exc)
+
+ refresh_token = response_data.get("refresh_token", refresh_token)
+ expiry = _parse_expiry(response_data)
+
+ return access_token, refresh_token, expiry, response_data
diff --git a/venv/Lib/site-packages/google/oauth2/_credentials_async.py b/venv/Lib/site-packages/google/oauth2/_credentials_async.py
new file mode 100644
index 000000000..eb3e97c08
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/_credentials_async.py
@@ -0,0 +1,108 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Async Credentials.
+
+This module provides credentials based on OAuth 2.0 access and refresh tokens.
+These credentials usually access resources on behalf of a user (resource
+owner).
+
+Specifically, this is intended to use access tokens acquired using the
+`Authorization Code grant`_ and can refresh those tokens using a
+optional `refresh token`_.
+
+Obtaining the initial access and refresh token is outside of the scope of this
+module. Consult `rfc6749 section 4.1`_ for complete details on the
+Authorization Code grant flow.
+
+.. _Authorization Code grant: https://tools.ietf.org/html/rfc6749#section-1.3.1
+.. _refresh token: https://tools.ietf.org/html/rfc6749#section-6
+.. _rfc6749 section 4.1: https://tools.ietf.org/html/rfc6749#section-4.1
+"""
+
+from google.auth import _credentials_async as credentials
+from google.auth import _helpers
+from google.auth import exceptions
+from google.oauth2 import _client_async as _client
+from google.oauth2 import credentials as oauth2_credentials
+
+
+class Credentials(oauth2_credentials.Credentials):
+ """Credentials using OAuth 2.0 access and refresh tokens.
+
+ The credentials are considered immutable. If you want to modify the
+ quota project, use :meth:`with_quota_project` or ::
+
+ credentials = credentials.with_quota_project('myproject-123)
+ """
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ async def refresh(self, request):
+ if (
+ self._refresh_token is None
+ or self._token_uri is None
+ or self._client_id is None
+ or self._client_secret is None
+ ):
+ raise exceptions.RefreshError(
+ "The credentials do not contain the necessary fields need to "
+ "refresh the access token. You must specify refresh_token, "
+ "token_uri, client_id, and client_secret."
+ )
+
+ (
+ access_token,
+ refresh_token,
+ expiry,
+ grant_response,
+ ) = await _client.refresh_grant(
+ request,
+ self._token_uri,
+ self._refresh_token,
+ self._client_id,
+ self._client_secret,
+ self._scopes,
+ )
+
+ self.token = access_token
+ self.expiry = expiry
+ self._refresh_token = refresh_token
+ self._id_token = grant_response.get("id_token")
+
+ if self._scopes and "scopes" in grant_response:
+ requested_scopes = frozenset(self._scopes)
+ granted_scopes = frozenset(grant_response["scopes"].split())
+ scopes_requested_but_not_granted = requested_scopes - granted_scopes
+ if scopes_requested_but_not_granted:
+ raise exceptions.RefreshError(
+ "Not all requested scopes were granted by the "
+ "authorization server, missing scopes {}.".format(
+ ", ".join(scopes_requested_but_not_granted)
+ )
+ )
+
+
+class UserAccessTokenCredentials(oauth2_credentials.UserAccessTokenCredentials):
+ """Access token credentials for user account.
+
+ Obtain the access token for a given user account or the current active
+ user account with the ``gcloud auth print-access-token`` command.
+
+ Args:
+ account (Optional[str]): Account to get the access token for. If not
+ specified, the current active account will be used.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+
+ """
diff --git a/venv/Lib/site-packages/google/oauth2/_id_token_async.py b/venv/Lib/site-packages/google/oauth2/_id_token_async.py
new file mode 100644
index 000000000..f5ef8baff
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/_id_token_async.py
@@ -0,0 +1,267 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google ID Token helpers.
+
+Provides support for verifying `OpenID Connect ID Tokens`_, especially ones
+generated by Google infrastructure.
+
+To parse and verify an ID Token issued by Google's OAuth 2.0 authorization
+server use :func:`verify_oauth2_token`. To verify an ID Token issued by
+Firebase, use :func:`verify_firebase_token`.
+
+A general purpose ID Token verifier is available as :func:`verify_token`.
+
+Example::
+
+ from google.oauth2 import _id_token_async
+ from google.auth.transport import aiohttp_requests
+
+ request = aiohttp_requests.Request()
+
+ id_info = await _id_token_async.verify_oauth2_token(
+ token, request, 'my-client-id.example.com')
+
+ if id_info['iss'] != 'https://accounts.google.com':
+ raise ValueError('Wrong issuer.')
+
+ userid = id_info['sub']
+
+By default, this will re-fetch certificates for each verification. Because
+Google's public keys are only changed infrequently (on the order of once per
+day), you may wish to take advantage of caching to reduce latency and the
+potential for network errors. This can be accomplished using an external
+library like `CacheControl`_ to create a cache-aware
+:class:`google.auth.transport.Request`::
+
+ import cachecontrol
+ import google.auth.transport.requests
+ import requests
+
+ session = requests.session()
+ cached_session = cachecontrol.CacheControl(session)
+ request = google.auth.transport.requests.Request(session=cached_session)
+
+.. _OpenID Connect ID Token:
+ http://openid.net/specs/openid-connect-core-1_0.html#IDToken
+.. _CacheControl: https://cachecontrol.readthedocs.io
+"""
+
+import json
+import os
+
+import six
+from six.moves import http_client
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import jwt
+from google.auth.transport import requests
+from google.oauth2 import id_token as sync_id_token
+
+
+async def _fetch_certs(request, certs_url):
+ """Fetches certificates.
+
+ Google-style cerificate endpoints return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ certs_url (str): The certificate endpoint URL.
+
+ Returns:
+ Mapping[str, str]: A mapping of public key ID to x.509 certificate
+ data.
+ """
+ response = await request(certs_url, method="GET")
+
+ if response.status != http_client.OK:
+ raise exceptions.TransportError(
+ "Could not fetch certificates at {}".format(certs_url)
+ )
+
+ data = await response.data.read()
+
+ return json.loads(json.dumps(data))
+
+
+async def verify_token(
+ id_token, request, audience=None, certs_url=sync_id_token._GOOGLE_OAUTH2_CERTS_URL
+):
+ """Verifies an ID token and returns the decoded token.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ audience (str): The audience that this token is intended for. If None
+ then the audience is not verified.
+ certs_url (str): The URL that specifies the certificates to use to
+ verify the token. This URL should return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ certs = await _fetch_certs(request, certs_url)
+
+ return jwt.decode(id_token, certs=certs, audience=audience)
+
+
+async def verify_oauth2_token(id_token, request, audience=None):
+ """Verifies an ID Token issued by Google's OAuth 2.0 authorization server.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ audience (str): The audience that this token is intended for. This is
+ typically your application's OAuth 2.0 client ID. If None then the
+ audience is not verified.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+
+ Raises:
+ exceptions.GoogleAuthError: If the issuer is invalid.
+ """
+ idinfo = await verify_token(
+ id_token,
+ request,
+ audience=audience,
+ certs_url=sync_id_token._GOOGLE_OAUTH2_CERTS_URL,
+ )
+
+ if idinfo["iss"] not in sync_id_token._GOOGLE_ISSUERS:
+ raise exceptions.GoogleAuthError(
+ "Wrong issuer. 'iss' should be one of the following: {}".format(
+ sync_id_token._GOOGLE_ISSUERS
+ )
+ )
+
+ return idinfo
+
+
+async def verify_firebase_token(id_token, request, audience=None):
+ """Verifies an ID Token issued by Firebase Authentication.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ audience (str): The audience that this token is intended for. This is
+ typically your Firebase application ID. If None then the audience
+ is not verified.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ return await verify_token(
+ id_token,
+ request,
+ audience=audience,
+ certs_url=sync_id_token._GOOGLE_APIS_CERTS_URL,
+ )
+
+
+async def fetch_id_token(request, audience):
+ """Fetch the ID Token from the current environment.
+
+ This function acquires ID token from the environment in the following order:
+
+ 1. If the application is running in Compute Engine, App Engine or Cloud Run,
+ then the ID token are obtained from the metadata server.
+ 2. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON file, then ID token is
+ acquired using this service account credentials.
+ 3. If metadata server doesn't exist and no valid service account credentials
+ are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will
+ be raised.
+
+ Example::
+
+ import google.oauth2._id_token_async
+ import google.auth.transport.aiohttp_requests
+
+ request = google.auth.transport.aiohttp_requests.Request()
+ target_audience = "https://pubsub.googleapis.com"
+
+ id_token = await google.oauth2._id_token_async.fetch_id_token(request, target_audience)
+
+ Args:
+ request (google.auth.transport.aiohttp_requests.Request): A callable used to make
+ HTTP requests.
+ audience (str): The audience that this ID token is intended for.
+
+ Returns:
+ str: The ID token.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If metadata server doesn't exist and no valid service account
+ credentials are found.
+ """
+ # 1. First try to fetch ID token from metadata server if it exists. The code
+ # works for GAE and Cloud Run metadata server as well.
+ try:
+ from google.auth import compute_engine
+
+ request_new = requests.Request()
+ credentials = compute_engine.IDTokenCredentials(
+ request_new, audience, use_metadata_identity_endpoint=True
+ )
+ credentials.refresh(request_new)
+
+ return credentials.token
+
+ except (ImportError, exceptions.TransportError, exceptions.RefreshError):
+ pass
+
+ # 2. Try to use service account credentials to get ID token.
+
+ # Try to get credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ # variable.
+ credentials_filename = os.environ.get(environment_vars.CREDENTIALS)
+ if not (
+ credentials_filename
+ and os.path.exists(credentials_filename)
+ and os.path.isfile(credentials_filename)
+ ):
+ raise exceptions.DefaultCredentialsError(
+ "Neither metadata server or valid service account credentials are found."
+ )
+
+ try:
+ with open(credentials_filename, "r") as f:
+ info = json.load(f)
+ credentials_content = (
+ (info.get("type") == "service_account") and info or None
+ )
+
+ from google.oauth2 import _service_account_async as service_account
+
+ credentials = service_account.IDTokenCredentials.from_service_account_info(
+ credentials_content, target_audience=audience
+ )
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "Neither metadata server or valid service account credentials are found.",
+ caught_exc,
+ )
+ six.raise_from(new_exc, caught_exc)
+
+ await credentials.refresh(request)
+ return credentials.token
diff --git a/venv/Lib/site-packages/google/oauth2/_service_account_async.py b/venv/Lib/site-packages/google/oauth2/_service_account_async.py
new file mode 100644
index 000000000..0a4e724a4
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/_service_account_async.py
@@ -0,0 +1,132 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
+
+NOTE: This file adds asynchronous refresh methods to both credentials
+classes, and therefore async/await syntax is required when calling this
+method when using service account credentials with asynchronous functionality.
+Otherwise, all other methods are inherited from the regular service account
+credentials file google.oauth2.service_account
+
+"""
+
+from google.auth import _credentials_async as credentials_async
+from google.auth import _helpers
+from google.oauth2 import _client_async
+from google.oauth2 import service_account
+
+
+class Credentials(
+ service_account.Credentials, credentials_async.Scoped, credentials_async.Credentials
+):
+ """Service account credentials
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = _service_account_async.Credentials.from_service_account_file(
+ 'service-account.json')
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = _service_account_async.Credentials.from_service_account_info(
+ service_account_info)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = _service_account_async.Credentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com')
+
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ To add a quota project, use :meth:`with_quota_project`::
+
+ credentials = credentials.with_quota_project('myproject-123')
+ """
+
+ @_helpers.copy_docstring(credentials_async.Credentials)
+ async def refresh(self, request):
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = await _client_async.jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+
+class IDTokenCredentials(
+ service_account.IDTokenCredentials,
+ credentials_async.Signing,
+ credentials_async.Credentials,
+):
+ """Open ID Connect ID Token-based service account credentials.
+
+ These credentials are largely similar to :class:`.Credentials`, but instead
+ of using an OAuth 2.0 Access Token as the bearer token, they use an Open
+ ID Connect ID Token as the bearer token. These credentials are useful when
+ communicating to services that require ID Tokens and can not accept access
+ tokens.
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = (
+ _service_account_async.IDTokenCredentials.from_service_account_file(
+ 'service-account.json'))
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = (
+ _service_account_async.IDTokenCredentials.from_service_account_info(
+ service_account_info))
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = (
+ _service_account_async.IDTokenCredentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com'))
+`
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ """
+
+ @_helpers.copy_docstring(credentials_async.Credentials)
+ async def refresh(self, request):
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = await _client_async.id_token_jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
diff --git a/venv/Lib/site-packages/google/oauth2/credentials.py b/venv/Lib/site-packages/google/oauth2/credentials.py
new file mode 100644
index 000000000..36b8f0cb7
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/credentials.py
@@ -0,0 +1,383 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Credentials.
+
+This module provides credentials based on OAuth 2.0 access and refresh tokens.
+These credentials usually access resources on behalf of a user (resource
+owner).
+
+Specifically, this is intended to use access tokens acquired using the
+`Authorization Code grant`_ and can refresh those tokens using a
+optional `refresh token`_.
+
+Obtaining the initial access and refresh token is outside of the scope of this
+module. Consult `rfc6749 section 4.1`_ for complete details on the
+Authorization Code grant flow.
+
+.. _Authorization Code grant: https://tools.ietf.org/html/rfc6749#section-1.3.1
+.. _refresh token: https://tools.ietf.org/html/rfc6749#section-6
+.. _rfc6749 section 4.1: https://tools.ietf.org/html/rfc6749#section-4.1
+"""
+
+from datetime import datetime
+import io
+import json
+
+import six
+
+from google.auth import _cloud_sdk
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.oauth2 import _client
+
+
+# The Google OAuth 2.0 token endpoint. Used for authorized user credentials.
+_GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
+
+
+class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaProject):
+ """Credentials using OAuth 2.0 access and refresh tokens.
+
+ The credentials are considered immutable. If you want to modify the
+ quota project, use :meth:`with_quota_project` or ::
+
+ credentials = credentials.with_quota_project('myproject-123)
+ """
+
+ def __init__(
+ self,
+ token,
+ refresh_token=None,
+ id_token=None,
+ token_uri=None,
+ client_id=None,
+ client_secret=None,
+ scopes=None,
+ quota_project_id=None,
+ expiry=None,
+ ):
+ """
+ Args:
+ token (Optional(str)): The OAuth 2.0 access token. Can be None
+ if refresh information is provided.
+ refresh_token (str): The OAuth 2.0 refresh token. If specified,
+ credentials can be refreshed.
+ id_token (str): The Open ID Connect ID Token.
+ token_uri (str): The OAuth 2.0 authorization server's token
+ endpoint URI. Must be specified for refresh, can be left as
+ None if the token can not be refreshed.
+ client_id (str): The OAuth 2.0 client ID. Must be specified for
+ refresh, can be left as None if the token can not be refreshed.
+ client_secret(str): The OAuth 2.0 client secret. Must be specified
+ for refresh, can be left as None if the token can not be
+ refreshed.
+ scopes (Sequence[str]): The scopes used to obtain authorization.
+ This parameter is used by :meth:`has_scopes`. OAuth 2.0
+ credentials can not request additional scopes after
+ authorization. The scopes must be derivable from the refresh
+ token if refresh information is provided (e.g. The refresh
+ token scopes are a superset of this or contain a wild card
+ scope like 'https://www.googleapis.com/auth/any-api').
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
+ This project may be different from the project used to
+ create the credentials.
+ """
+ super(Credentials, self).__init__()
+ self.token = token
+ self.expiry = expiry
+ self._refresh_token = refresh_token
+ self._id_token = id_token
+ self._scopes = scopes
+ self._token_uri = token_uri
+ self._client_id = client_id
+ self._client_secret = client_secret
+ self._quota_project_id = quota_project_id
+
+ def __getstate__(self):
+ """A __getstate__ method must exist for the __setstate__ to be called
+ This is identical to the default implementation.
+ See https://docs.python.org/3.7/library/pickle.html#object.__setstate__
+ """
+ return self.__dict__
+
+ def __setstate__(self, d):
+ """Credentials pickled with older versions of the class do not have
+ all the attributes."""
+ self.token = d.get("token")
+ self.expiry = d.get("expiry")
+ self._refresh_token = d.get("_refresh_token")
+ self._id_token = d.get("_id_token")
+ self._scopes = d.get("_scopes")
+ self._token_uri = d.get("_token_uri")
+ self._client_id = d.get("_client_id")
+ self._client_secret = d.get("_client_secret")
+ self._quota_project_id = d.get("_quota_project_id")
+
+ @property
+ def refresh_token(self):
+ """Optional[str]: The OAuth 2.0 refresh token."""
+ return self._refresh_token
+
+ @property
+ def scopes(self):
+ """Optional[str]: The OAuth 2.0 permission scopes."""
+ return self._scopes
+
+ @property
+ def token_uri(self):
+ """Optional[str]: The OAuth 2.0 authorization server's token endpoint
+ URI."""
+ return self._token_uri
+
+ @property
+ def id_token(self):
+ """Optional[str]: The Open ID Connect ID Token.
+
+ Depending on the authorization server and the scopes requested, this
+ may be populated when credentials are obtained and updated when
+ :meth:`refresh` is called. This token is a JWT. It can be verified
+ and decoded using :func:`google.oauth2.id_token.verify_oauth2_token`.
+ """
+ return self._id_token
+
+ @property
+ def client_id(self):
+ """Optional[str]: The OAuth 2.0 client ID."""
+ return self._client_id
+
+ @property
+ def client_secret(self):
+ """Optional[str]: The OAuth 2.0 client secret."""
+ return self._client_secret
+
+ @property
+ def requires_scopes(self):
+ """False: OAuth 2.0 credentials have their scopes set when
+ the initial token is requested and can not be changed."""
+ return False
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ return self.__class__(
+ self.token,
+ refresh_token=self.refresh_token,
+ id_token=self.id_token,
+ token_uri=self.token_uri,
+ client_id=self.client_id,
+ client_secret=self.client_secret,
+ scopes=self.scopes,
+ quota_project_id=quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ if (
+ self._refresh_token is None
+ or self._token_uri is None
+ or self._client_id is None
+ or self._client_secret is None
+ ):
+ raise exceptions.RefreshError(
+ "The credentials do not contain the necessary fields need to "
+ "refresh the access token. You must specify refresh_token, "
+ "token_uri, client_id, and client_secret."
+ )
+
+ access_token, refresh_token, expiry, grant_response = _client.refresh_grant(
+ request,
+ self._token_uri,
+ self._refresh_token,
+ self._client_id,
+ self._client_secret,
+ self._scopes,
+ )
+
+ self.token = access_token
+ self.expiry = expiry
+ self._refresh_token = refresh_token
+ self._id_token = grant_response.get("id_token")
+
+ if self._scopes and "scopes" in grant_response:
+ requested_scopes = frozenset(self._scopes)
+ granted_scopes = frozenset(grant_response["scopes"].split())
+ scopes_requested_but_not_granted = requested_scopes - granted_scopes
+ if scopes_requested_but_not_granted:
+ raise exceptions.RefreshError(
+ "Not all requested scopes were granted by the "
+ "authorization server, missing scopes {}.".format(
+ ", ".join(scopes_requested_but_not_granted)
+ )
+ )
+
+ @classmethod
+ def from_authorized_user_info(cls, info, scopes=None):
+ """Creates a Credentials instance from parsed authorized user info.
+
+ Args:
+ info (Mapping[str, str]): The authorized user info in Google
+ format.
+ scopes (Sequence[str]): Optional list of scopes to include in the
+ credentials.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ keys_needed = set(("refresh_token", "client_id", "client_secret"))
+ missing = keys_needed.difference(six.iterkeys(info))
+
+ if missing:
+ raise ValueError(
+ "Authorized user info was not in the expected format, missing "
+ "fields {}.".format(", ".join(missing))
+ )
+
+ # access token expiry (datetime obj); auto-expire if not saved
+ expiry = info.get("expiry")
+ if expiry:
+ expiry = datetime.strptime(
+ expiry.rstrip("Z").split(".")[0], "%Y-%m-%dT%H:%M:%S"
+ )
+ else:
+ expiry = _helpers.utcnow() - _helpers.CLOCK_SKEW
+
+ # process scopes, which needs to be a seq
+ if scopes is None and "scopes" in info:
+ scopes = info.get("scopes")
+ if isinstance(scopes, str):
+ scopes = scopes.split(" ")
+
+ return cls(
+ token=info.get("token"),
+ refresh_token=info.get("refresh_token"),
+ token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, # always overrides
+ scopes=scopes,
+ client_id=info.get("client_id"),
+ client_secret=info.get("client_secret"),
+ quota_project_id=info.get("quota_project_id"), # may not exist
+ expiry=expiry,
+ )
+
+ @classmethod
+ def from_authorized_user_file(cls, filename, scopes=None):
+ """Creates a Credentials instance from an authorized user json file.
+
+ Args:
+ filename (str): The path to the authorized user json file.
+ scopes (Sequence[str]): Optional list of scopes to include in the
+ credentials.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the file is not in the expected format.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+ return cls.from_authorized_user_info(data, scopes)
+
+ def to_json(self, strip=None):
+ """Utility function that creates a JSON representation of a Credentials
+ object.
+
+ Args:
+ strip (Sequence[str]): Optional list of members to exclude from the
+ generated JSON.
+
+ Returns:
+ str: A JSON representation of this instance. When converted into
+ a dictionary, it can be passed to from_authorized_user_info()
+ to create a new credential instance.
+ """
+ prep = {
+ "token": self.token,
+ "refresh_token": self.refresh_token,
+ "token_uri": self.token_uri,
+ "client_id": self.client_id,
+ "client_secret": self.client_secret,
+ "scopes": self.scopes,
+ }
+ if self.expiry: # flatten expiry timestamp
+ prep["expiry"] = self.expiry.isoformat() + "Z"
+
+ # Remove empty entries (those which are None)
+ prep = {k: v for k, v in prep.items() if v is not None}
+
+ # Remove entries that explicitely need to be removed
+ if strip is not None:
+ prep = {k: v for k, v in prep.items() if k not in strip}
+
+ return json.dumps(prep)
+
+
+class UserAccessTokenCredentials(credentials.CredentialsWithQuotaProject):
+ """Access token credentials for user account.
+
+ Obtain the access token for a given user account or the current active
+ user account with the ``gcloud auth print-access-token`` command.
+
+ Args:
+ account (Optional[str]): Account to get the access token for. If not
+ specified, the current active account will be used.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+ """
+
+ def __init__(self, account=None, quota_project_id=None):
+ super(UserAccessTokenCredentials, self).__init__()
+ self._account = account
+ self._quota_project_id = quota_project_id
+
+ def with_account(self, account):
+ """Create a new instance with the given account.
+
+ Args:
+ account (str): Account to get the access token for.
+
+ Returns:
+ google.oauth2.credentials.UserAccessTokenCredentials: The created
+ credentials with the given account.
+ """
+ return self.__class__(account=account, quota_project_id=self._quota_project_id)
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(account=self._account, quota_project_id=quota_project_id)
+
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (google.auth.transport.Request): This argument is required
+ by the base class interface but not used in this implementation,
+ so just set it to `None`.
+
+ Raises:
+ google.auth.exceptions.UserAccessTokenError: If the access token
+ refresh failed.
+ """
+ self.token = _cloud_sdk.get_auth_access_token(self._account)
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def before_request(self, request, method, url, headers):
+ self.refresh(request)
+ self.apply(headers)
diff --git a/venv/Lib/site-packages/google/oauth2/id_token.py b/venv/Lib/site-packages/google/oauth2/id_token.py
new file mode 100644
index 000000000..bf6bf2c70
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/id_token.py
@@ -0,0 +1,266 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google ID Token helpers.
+
+Provides support for verifying `OpenID Connect ID Tokens`_, especially ones
+generated by Google infrastructure.
+
+To parse and verify an ID Token issued by Google's OAuth 2.0 authorization
+server use :func:`verify_oauth2_token`. To verify an ID Token issued by
+Firebase, use :func:`verify_firebase_token`.
+
+A general purpose ID Token verifier is available as :func:`verify_token`.
+
+Example::
+
+ from google.oauth2 import id_token
+ from google.auth.transport import requests
+
+ request = requests.Request()
+
+ id_info = id_token.verify_oauth2_token(
+ token, request, 'my-client-id.example.com')
+
+ if id_info['iss'] != 'https://accounts.google.com':
+ raise ValueError('Wrong issuer.')
+
+ userid = id_info['sub']
+
+By default, this will re-fetch certificates for each verification. Because
+Google's public keys are only changed infrequently (on the order of once per
+day), you may wish to take advantage of caching to reduce latency and the
+potential for network errors. This can be accomplished using an external
+library like `CacheControl`_ to create a cache-aware
+:class:`google.auth.transport.Request`::
+
+ import cachecontrol
+ import google.auth.transport.requests
+ import requests
+
+ session = requests.session()
+ cached_session = cachecontrol.CacheControl(session)
+ request = google.auth.transport.requests.Request(session=cached_session)
+
+.. _OpenID Connect ID Token:
+ http://openid.net/specs/openid-connect-core-1_0.html#IDToken
+.. _CacheControl: https://cachecontrol.readthedocs.io
+"""
+
+import json
+import os
+
+import six
+from six.moves import http_client
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import jwt
+
+
+# The URL that provides public certificates for verifying ID tokens issued
+# by Google's OAuth 2.0 authorization server.
+_GOOGLE_OAUTH2_CERTS_URL = "https://www.googleapis.com/oauth2/v1/certs"
+
+# The URL that provides public certificates for verifying ID tokens issued
+# by Firebase and the Google APIs infrastructure
+_GOOGLE_APIS_CERTS_URL = (
+ "https://www.googleapis.com/robot/v1/metadata/x509"
+ "/securetoken@system.gserviceaccount.com"
+)
+
+_GOOGLE_ISSUERS = ["accounts.google.com", "https://accounts.google.com"]
+
+
+def _fetch_certs(request, certs_url):
+ """Fetches certificates.
+
+ Google-style cerificate endpoints return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ certs_url (str): The certificate endpoint URL.
+
+ Returns:
+ Mapping[str, str]: A mapping of public key ID to x.509 certificate
+ data.
+ """
+ response = request(certs_url, method="GET")
+
+ if response.status != http_client.OK:
+ raise exceptions.TransportError(
+ "Could not fetch certificates at {}".format(certs_url)
+ )
+
+ return json.loads(response.data.decode("utf-8"))
+
+
+def verify_token(id_token, request, audience=None, certs_url=_GOOGLE_OAUTH2_CERTS_URL):
+ """Verifies an ID token and returns the decoded token.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ audience (str): The audience that this token is intended for. If None
+ then the audience is not verified.
+ certs_url (str): The URL that specifies the certificates to use to
+ verify the token. This URL should return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ certs = _fetch_certs(request, certs_url)
+
+ return jwt.decode(id_token, certs=certs, audience=audience)
+
+
+def verify_oauth2_token(id_token, request, audience=None):
+ """Verifies an ID Token issued by Google's OAuth 2.0 authorization server.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ audience (str): The audience that this token is intended for. This is
+ typically your application's OAuth 2.0 client ID. If None then the
+ audience is not verified.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+
+ Raises:
+ exceptions.GoogleAuthError: If the issuer is invalid.
+ """
+ idinfo = verify_token(
+ id_token, request, audience=audience, certs_url=_GOOGLE_OAUTH2_CERTS_URL
+ )
+
+ if idinfo["iss"] not in _GOOGLE_ISSUERS:
+ raise exceptions.GoogleAuthError(
+ "Wrong issuer. 'iss' should be one of the following: {}".format(
+ _GOOGLE_ISSUERS
+ )
+ )
+
+ return idinfo
+
+
+def verify_firebase_token(id_token, request, audience=None):
+ """Verifies an ID Token issued by Firebase Authentication.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ audience (str): The audience that this token is intended for. This is
+ typically your Firebase application ID. If None then the audience
+ is not verified.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ return verify_token(
+ id_token, request, audience=audience, certs_url=_GOOGLE_APIS_CERTS_URL
+ )
+
+
+def fetch_id_token(request, audience):
+ """Fetch the ID Token from the current environment.
+
+ This function acquires ID token from the environment in the following order:
+
+ 1. If the application is running in Compute Engine, App Engine or Cloud Run,
+ then the ID token are obtained from the metadata server.
+ 2. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON file, then ID token is
+ acquired using this service account credentials.
+ 3. If metadata server doesn't exist and no valid service account credentials
+ are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will
+ be raised.
+
+ Example::
+
+ import google.oauth2.id_token
+ import google.auth.transport.requests
+
+ request = google.auth.transport.requests.Request()
+ target_audience = "https://pubsub.googleapis.com"
+
+ id_token = google.oauth2.id_token.fetch_id_token(request, target_audience)
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ audience (str): The audience that this ID token is intended for.
+
+ Returns:
+ str: The ID token.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If metadata server doesn't exist and no valid service account
+ credentials are found.
+ """
+ # 1. First try to fetch ID token from metada server if it exists. The code
+ # works for GAE and Cloud Run metadata server as well.
+ try:
+ from google.auth import compute_engine
+
+ credentials = compute_engine.IDTokenCredentials(
+ request, audience, use_metadata_identity_endpoint=True
+ )
+ credentials.refresh(request)
+ return credentials.token
+ except (ImportError, exceptions.TransportError, exceptions.RefreshError):
+ pass
+
+ # 2. Try to use service account credentials to get ID token.
+
+ # Try to get credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ # variable.
+ credentials_filename = os.environ.get(environment_vars.CREDENTIALS)
+ if not (
+ credentials_filename
+ and os.path.exists(credentials_filename)
+ and os.path.isfile(credentials_filename)
+ ):
+ raise exceptions.DefaultCredentialsError(
+ "Neither metadata server or valid service account credentials are found."
+ )
+
+ try:
+ with open(credentials_filename, "r") as f:
+ info = json.load(f)
+ credentials_content = (
+ (info.get("type") == "service_account") and info or None
+ )
+
+ from google.oauth2 import service_account
+
+ credentials = service_account.IDTokenCredentials.from_service_account_info(
+ credentials_content, target_audience=audience
+ )
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "Neither metadata server or valid service account credentials are found.",
+ caught_exc,
+ )
+ six.raise_from(new_exc, caught_exc)
+
+ credentials.refresh(request)
+ return credentials.token
diff --git a/venv/Lib/site-packages/google/oauth2/service_account.py b/venv/Lib/site-packages/google/oauth2/service_account.py
new file mode 100644
index 000000000..c4898a247
--- /dev/null
+++ b/venv/Lib/site-packages/google/oauth2/service_account.py
@@ -0,0 +1,606 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
+
+This module implements the JWT Profile for OAuth 2.0 Authorization Grants
+as defined by `RFC 7523`_ with particular support for how this RFC is
+implemented in Google's infrastructure. Google refers to these credentials
+as *Service Accounts*.
+
+Service accounts are used for server-to-server communication, such as
+interactions between a web application server and a Google service. The
+service account belongs to your application instead of to an individual end
+user. In contrast to other OAuth 2.0 profiles, no users are involved and your
+application "acts" as the service account.
+
+Typically an application uses a service account when the application uses
+Google APIs to work with its own data rather than a user's data. For example,
+an application that uses Google Cloud Datastore for data persistence would use
+a service account to authenticate its calls to the Google Cloud Datastore API.
+However, an application that needs to access a user's Drive documents would
+use the normal OAuth 2.0 profile.
+
+Additionally, Google Apps domain administrators can grant service accounts
+`domain-wide delegation`_ authority to access user data on behalf of users in
+the domain.
+
+This profile uses a JWT to acquire an OAuth 2.0 access token. The JWT is used
+in place of the usual authorization token returned during the standard
+OAuth 2.0 Authorization Code grant. The JWT is only used for this purpose, as
+the acquired access token is used as the bearer token when making requests
+using these credentials.
+
+This profile differs from normal OAuth 2.0 profile because no user consent
+step is required. The use of the private key allows this profile to assert
+identity directly.
+
+This profile also differs from the :mod:`google.auth.jwt` authentication
+because the JWT credentials use the JWT directly as the bearer token. This
+profile instead only uses the JWT to obtain an OAuth 2.0 access token. The
+obtained OAuth 2.0 access token is used as the bearer token.
+
+Domain-wide delegation
+----------------------
+
+Domain-wide delegation allows a service account to access user data on
+behalf of any user in a Google Apps domain without consent from the user.
+For example, an application that uses the Google Calendar API to add events to
+the calendars of all users in a Google Apps domain would use a service account
+to access the Google Calendar API on behalf of users.
+
+The Google Apps administrator must explicitly authorize the service account to
+do this. This authorization step is referred to as "delegating domain-wide
+authority" to a service account.
+
+You can use domain-wise delegation by creating a set of credentials with a
+specific subject using :meth:`~Credentials.with_subject`.
+
+.. _RFC 7523: https://tools.ietf.org/html/rfc7523
+"""
+
+import copy
+import datetime
+
+from google.auth import _helpers
+from google.auth import _service_account_info
+from google.auth import credentials
+from google.auth import jwt
+from google.oauth2 import _client
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+
+
+class Credentials(
+ credentials.Signing, credentials.Scoped, credentials.CredentialsWithQuotaProject
+):
+ """Service account credentials
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = service_account.Credentials.from_service_account_file(
+ 'service-account.json')
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = service_account.Credentials.from_service_account_info(
+ service_account_info)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = service_account.Credentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com')
+
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ To add a quota project, use :meth:`with_quota_project`::
+
+ credentials = credentials.with_quota_project('myproject-123')
+ """
+
+ def __init__(
+ self,
+ signer,
+ service_account_email,
+ token_uri,
+ scopes=None,
+ subject=None,
+ project_id=None,
+ quota_project_id=None,
+ additional_claims=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ service_account_email (str): The service account's email.
+ scopes (Sequence[str]): Scopes to request during the authorization
+ grant.
+ token_uri (str): The OAuth 2.0 Token URI.
+ subject (str): For domain-wide delegation, the email address of the
+ user to for which to request delegated access.
+ project_id (str): Project ID associated with the service account
+ credential.
+ quota_project_id (Optional[str]): The project ID used for quota and
+ billing.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT assertion used in the authorization grant.
+
+ .. note:: Typically one of the helper constructors
+ :meth:`from_service_account_file` or
+ :meth:`from_service_account_info` are used instead of calling the
+ constructor directly.
+ """
+ super(Credentials, self).__init__()
+
+ self._scopes = scopes
+ self._signer = signer
+ self._service_account_email = service_account_email
+ self._subject = subject
+ self._project_id = project_id
+ self._quota_project_id = quota_project_id
+ self._token_uri = token_uri
+
+ if additional_claims is not None:
+ self._additional_claims = additional_claims
+ else:
+ self._additional_claims = {}
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates a Credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ return cls(
+ signer,
+ service_account_email=info["client_email"],
+ token_uri=info["token_uri"],
+ project_id=info.get("project_id"),
+ **kwargs
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates a Credentials instance from parsed service account info.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(
+ info, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates a Credentials instance from a service account json file.
+
+ Args:
+ filename (str): The path to the service account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.Credentials: The constructed
+ credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ return self._service_account_email
+
+ @property
+ def project_id(self):
+ """Project ID associated with this credential."""
+ return self._project_id
+
+ @property
+ def requires_scopes(self):
+ """Checks if the credentials requires scopes.
+
+ Returns:
+ bool: True if there are no scopes set otherwise False.
+ """
+ return True if not self._scopes else False
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes):
+ return self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ scopes=scopes,
+ token_uri=self._token_uri,
+ subject=self._subject,
+ project_id=self._project_id,
+ quota_project_id=self._quota_project_id,
+ additional_claims=self._additional_claims.copy(),
+ )
+
+ def with_subject(self, subject):
+ """Create a copy of these credentials with the specified subject.
+
+ Args:
+ subject (str): The subject claim.
+
+ Returns:
+ google.auth.service_account.Credentials: A new credentials
+ instance.
+ """
+ return self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ scopes=self._scopes,
+ token_uri=self._token_uri,
+ subject=subject,
+ project_id=self._project_id,
+ quota_project_id=self._quota_project_id,
+ additional_claims=self._additional_claims.copy(),
+ )
+
+ def with_claims(self, additional_claims):
+ """Returns a copy of these credentials with modified claims.
+
+ Args:
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload. This will be merged with the current
+ additional claims.
+
+ Returns:
+ google.auth.service_account.Credentials: A new credentials
+ instance.
+ """
+ new_additional_claims = copy.deepcopy(self._additional_claims)
+ new_additional_claims.update(additional_claims or {})
+
+ return self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ scopes=self._scopes,
+ token_uri=self._token_uri,
+ subject=self._subject,
+ project_id=self._project_id,
+ quota_project_id=self._quota_project_id,
+ additional_claims=new_additional_claims,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ return self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ scopes=self._scopes,
+ token_uri=self._token_uri,
+ subject=self._subject,
+ project_id=self._project_id,
+ quota_project_id=quota_project_id,
+ additional_claims=self._additional_claims.copy(),
+ )
+
+ def _make_authorization_grant_assertion(self):
+ """Create the OAuth 2.0 assertion.
+
+ This assertion is used during the OAuth 2.0 grant to acquire an
+ access token.
+
+ Returns:
+ bytes: The authorization grant assertion.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
+ expiry = now + lifetime
+
+ payload = {
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ # The issuer must be the service account email.
+ "iss": self._service_account_email,
+ # The audience must be the auth token endpoint's URI
+ "aud": self._token_uri,
+ "scope": _helpers.scopes_to_string(self._scopes or ()),
+ }
+
+ payload.update(self._additional_claims)
+
+ # The subject can be a user email for domain-wide delegation.
+ if self._subject:
+ payload.setdefault("sub", self._subject)
+
+ token = jwt.encode(self._signer, payload)
+
+ return token
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = _client.jwt_grant(request, self._token_uri, assertion)
+ self.token = access_token
+ self.expiry = expiry
+
+ @_helpers.copy_docstring(credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer_email(self):
+ return self._service_account_email
+
+
+class IDTokenCredentials(credentials.Signing, credentials.CredentialsWithQuotaProject):
+ """Open ID Connect ID Token-based service account credentials.
+
+ These credentials are largely similar to :class:`.Credentials`, but instead
+ of using an OAuth 2.0 Access Token as the bearer token, they use an Open
+ ID Connect ID Token as the bearer token. These credentials are useful when
+ communicating to services that require ID Tokens and can not accept access
+ tokens.
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = (
+ service_account.IDTokenCredentials.from_service_account_file(
+ 'service-account.json'))
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = (
+ service_account.IDTokenCredentials.from_service_account_info(
+ service_account_info))
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = (
+ service_account.IDTokenCredentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com'))
+`
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ """
+
+ def __init__(
+ self,
+ signer,
+ service_account_email,
+ token_uri,
+ target_audience,
+ additional_claims=None,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ service_account_email (str): The service account's email.
+ token_uri (str): The OAuth 2.0 Token URI.
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token. The ID Token's ``aud`` claim
+ will be set to this string.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT assertion used in the authorization grant.
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
+ .. note:: Typically one of the helper constructors
+ :meth:`from_service_account_file` or
+ :meth:`from_service_account_info` are used instead of calling the
+ constructor directly.
+ """
+ super(IDTokenCredentials, self).__init__()
+ self._signer = signer
+ self._service_account_email = service_account_email
+ self._token_uri = token_uri
+ self._target_audience = target_audience
+ self._quota_project_id = quota_project_id
+
+ if additional_claims is not None:
+ self._additional_claims = additional_claims
+ else:
+ self._additional_claims = {}
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates a credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.IDTokenCredentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ kwargs.setdefault("service_account_email", info["client_email"])
+ kwargs.setdefault("token_uri", info["token_uri"])
+ return cls(signer, **kwargs)
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates a credentials instance from parsed service account info.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(
+ info, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates a credentials instance from a service account json file.
+
+ Args:
+ filename (str): The path to the service account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: The constructed
+ credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ def with_target_audience(self, target_audience):
+ """Create a copy of these credentials with the specified target
+ audience.
+
+ Args:
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: A new credentials
+ instance.
+ """
+ return self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=target_audience,
+ additional_claims=self._additional_claims.copy(),
+ quota_project_id=self.quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=self._target_audience,
+ additional_claims=self._additional_claims.copy(),
+ quota_project_id=quota_project_id,
+ )
+
+ def _make_authorization_grant_assertion(self):
+ """Create the OAuth 2.0 assertion.
+
+ This assertion is used during the OAuth 2.0 grant to acquire an
+ ID token.
+
+ Returns:
+ bytes: The authorization grant assertion.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
+ expiry = now + lifetime
+
+ payload = {
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ # The issuer must be the service account email.
+ "iss": self.service_account_email,
+ # The audience must be the auth token endpoint's URI
+ "aud": self._token_uri,
+ # The target audience specifies which service the ID token is
+ # intended for.
+ "target_audience": self._target_audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ token = jwt.encode(self._signer, payload)
+
+ return token
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = _client.id_token_jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ return self._service_account_email
+
+ @_helpers.copy_docstring(credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ @property
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer_email(self):
+ return self._service_account_email
diff --git a/venv/Lib/site-packages/google/resumable_media/__init__.py b/venv/Lib/site-packages/google/resumable_media/__init__.py
new file mode 100644
index 000000000..8c3da244e
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/__init__.py
@@ -0,0 +1,61 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for Google Media Downloads and Resumable Uploads.
+
+This package has some general purposes modules, e.g.
+:mod:`~google.resumable_media.common`, but the majority of the
+public interface will be contained in subpackages.
+
+===========
+Subpackages
+===========
+
+Each subpackage is tailored to a specific transport library:
+
+* the :mod:`~google.resumable_media.requests` subpackage uses the ``requests``
+ transport library.
+
+.. _requests: http://docs.python-requests.org/
+
+==========
+Installing
+==========
+
+To install with `pip`_:
+
+.. code-block:: console
+
+ $ pip install --upgrade google-resumable-media
+
+.. _pip: https://pip.pypa.io/
+"""
+
+
+from google.resumable_media.common import DataCorruption
+from google.resumable_media.common import InvalidResponse
+from google.resumable_media.common import PERMANENT_REDIRECT
+from google.resumable_media.common import RetryStrategy
+from google.resumable_media.common import TOO_MANY_REQUESTS
+from google.resumable_media.common import UPLOAD_CHUNK_SIZE
+
+
+__all__ = [
+ u"DataCorruption",
+ u"InvalidResponse",
+ u"PERMANENT_REDIRECT",
+ u"RetryStrategy",
+ u"TOO_MANY_REQUESTS",
+ u"UPLOAD_CHUNK_SIZE",
+]
diff --git a/venv/Lib/site-packages/google/resumable_media/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..6cc2d2e47
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/__pycache__/_download.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/__pycache__/_download.cpython-36.pyc
new file mode 100644
index 000000000..b6c469084
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/__pycache__/_download.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..0f407bccd
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/__pycache__/_upload.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/__pycache__/_upload.cpython-36.pyc
new file mode 100644
index 000000000..b6969d193
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/__pycache__/_upload.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/__pycache__/common.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/__pycache__/common.cpython-36.pyc
new file mode 100644
index 000000000..624cadfcd
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/__pycache__/common.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/_download.py b/venv/Lib/site-packages/google/resumable_media/_download.py
new file mode 100644
index 000000000..1b06d068a
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/_download.py
@@ -0,0 +1,556 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Virtual bases classes for downloading media from Google APIs."""
+
+
+import re
+
+from six.moves import http_client
+
+from google.resumable_media import _helpers
+from google.resumable_media import common
+
+
+_CONTENT_RANGE_RE = re.compile(
+ r"bytes (?P\d+)-(?P\d+)/(?P\d+)",
+ flags=re.IGNORECASE,
+)
+_ACCEPTABLE_STATUS_CODES = (http_client.OK, http_client.PARTIAL_CONTENT)
+_GET = u"GET"
+_ZERO_CONTENT_RANGE_HEADER = u"bytes */0"
+
+
+class DownloadBase(object):
+ """Base class for download helpers.
+
+ Defines core shared behavior across different download types.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded.
+ end (int): The last byte in a range to be downloaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ """
+
+ def __init__(self, media_url, stream=None, start=None, end=None, headers=None):
+ self.media_url = media_url
+ self._stream = stream
+ self.start = start
+ self.end = end
+ if headers is None:
+ headers = {}
+ self._headers = headers
+ self._finished = False
+ self._retry_strategy = common.RetryStrategy()
+
+ @property
+ def finished(self):
+ """bool: Flag indicating if the download has completed."""
+ return self._finished
+
+ @staticmethod
+ def _get_status_code(response):
+ """Access the status code from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_headers(response):
+ """Access the headers from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class Download(DownloadBase):
+ """Helper to manage downloading a resource from a Google API.
+
+ "Slices" of the resource can be retrieved by specifying a range
+ with ``start`` and / or ``end``. However, in typical usage, neither
+ ``start`` nor ``end`` is expected to be provided.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, but ``end`` is provided, will download from the
+ beginning to ``end`` of the media.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, but ``start`` is provided, will download from the
+ ``start`` to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The response headers must contain
+ a checksum of the requested type. If the headers lack an
+ appropriate checksum (for instance in the case of transcoded or
+ ranged downloads where the remote service does not know the
+ correct checksum) an INFO-level log will be emitted. Supported
+ values are "md5", "crc32c" and None.
+ """
+
+ def __init__(
+ self, media_url, stream=None, start=None, end=None, headers=None, checksum="md5"
+ ):
+ super(Download, self).__init__(
+ media_url, stream=stream, start=start, end=end, headers=headers
+ )
+ self.checksum = checksum
+
+ def _prepare_request(self):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Returns:
+ Tuple[str, str, NoneType, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always GET)
+ * the URL for the request
+ * the body of the request (always :data:`None`)
+ * headers for the request
+
+ Raises:
+ ValueError: If the current :class:`Download` has already
+ finished.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"A download can only be used once.")
+
+ add_bytes_range(self.start, self.end, self._headers)
+ return _GET, self.media_url, None, self._headers
+
+ def _process_response(self, response):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ # Tombstone the current Download so it cannot be used again.
+ self._finished = True
+ _helpers.require_status_code(
+ response, _ACCEPTABLE_STATUS_CODES, self._get_status_code
+ )
+
+ def consume(self, transport, timeout=None):
+ """Consume the resource to be downloaded.
+
+ If a ``stream`` is attached to this download, then the downloaded
+ resource will be written to the stream.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class ChunkedDownload(DownloadBase):
+ """Download a resource in chunks from a Google API.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each
+ request.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ will be used to concatenate chunks of the resource as they are
+ downloaded.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, defaults to ``0``.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, will download to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with each request, e.g. headers for data encryption
+ key headers.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each request.
+
+ Raises:
+ ValueError: If ``start`` is negative.
+ """
+
+ def __init__(self, media_url, chunk_size, stream, start=0, end=None, headers=None):
+ if start < 0:
+ raise ValueError(
+ u"On a chunked download the starting " u"value cannot be negative."
+ )
+ super(ChunkedDownload, self).__init__(
+ media_url, stream=stream, start=start, end=end, headers=headers
+ )
+ self.chunk_size = chunk_size
+ self._bytes_downloaded = 0
+ self._total_bytes = None
+ self._invalid = False
+
+ @property
+ def bytes_downloaded(self):
+ """int: Number of bytes that have been downloaded."""
+ return self._bytes_downloaded
+
+ @property
+ def total_bytes(self):
+ """Optional[int]: The total number of bytes to be downloaded."""
+ return self._total_bytes
+
+ @property
+ def invalid(self):
+ """bool: Indicates if the download is in an invalid state.
+
+ This will occur if a call to :meth:`consume_next_chunk` fails.
+ """
+ return self._invalid
+
+ def _get_byte_range(self):
+ """Determines the byte range for the next request.
+
+ Returns:
+ Tuple[int, int]: The pair of begin and end byte for the next
+ chunked request.
+ """
+ curr_start = self.start + self.bytes_downloaded
+ curr_end = curr_start + self.chunk_size - 1
+ # Make sure ``curr_end`` does not exceed ``end``.
+ if self.end is not None:
+ curr_end = min(curr_end, self.end)
+ # Make sure ``curr_end`` does not exceed ``total_bytes - 1``.
+ if self.total_bytes is not None:
+ curr_end = min(curr_end, self.total_bytes - 1)
+ return curr_start, curr_end
+
+ def _prepare_request(self):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ .. note:
+
+ This method will be used multiple times, so ``headers`` will
+ be mutated in between requests. However, we don't make a copy
+ since the same keys are being updated.
+
+ Returns:
+ Tuple[str, str, NoneType, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always GET)
+ * the URL for the request
+ * the body of the request (always :data:`None`)
+ * headers for the request
+
+ Raises:
+ ValueError: If the current download has finished.
+ ValueError: If the current download is invalid.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"Download has finished.")
+ if self.invalid:
+ raise ValueError(u"Download is invalid and cannot be re-used.")
+
+ curr_start, curr_end = self._get_byte_range()
+ add_bytes_range(curr_start, curr_end, self._headers)
+ return _GET, self.media_url, None, self._headers
+
+ def _make_invalid(self):
+ """Simple setter for ``invalid``.
+
+ This is intended to be passed along as a callback to helpers that
+ raise an exception so they can mark this instance as invalid before
+ raising.
+ """
+ self._invalid = True
+
+ def _process_response(self, response):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O. This is based on the `sans-I/O`_ philosophy.
+
+ For the time being, this **does require** some form of I/O to write
+ a chunk to ``stream``. However, this will (almost) certainly not be
+ network I/O.
+
+ Updates the current state after consuming a chunk. First,
+ increments ``bytes_downloaded`` by the number of bytes in the
+ ``content-length`` header.
+
+ If ``total_bytes`` is already set, this assumes (but does not check)
+ that we already have the correct value and doesn't bother to check
+ that it agrees with the headers.
+
+ We expect the **total** length to be in the ``content-range`` header,
+ but this header is only present on requests which sent the ``range``
+ header. This response header should be of the form
+ ``bytes {start}-{end}/{total}`` and ``{end} - {start} + 1``
+ should be the same as the ``Content-Length``.
+
+ Args:
+ response (object): The HTTP response object (need headers).
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the number
+ of bytes in the body doesn't match the content length header.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ # Verify the response before updating the current instance.
+ if _check_for_zero_content_range(
+ response, self._get_status_code, self._get_headers
+ ):
+ self._finished = True
+ return
+
+ _helpers.require_status_code(
+ response,
+ _ACCEPTABLE_STATUS_CODES,
+ self._get_status_code,
+ callback=self._make_invalid,
+ )
+ headers = self._get_headers(response)
+ response_body = self._get_body(response)
+
+ start_byte, end_byte, total_bytes = get_range_info(
+ response, self._get_headers, callback=self._make_invalid
+ )
+
+ transfer_encoding = headers.get(u"transfer-encoding")
+
+ if transfer_encoding is None:
+ content_length = _helpers.header_required(
+ response,
+ u"content-length",
+ self._get_headers,
+ callback=self._make_invalid,
+ )
+ num_bytes = int(content_length)
+ if len(response_body) != num_bytes:
+ self._make_invalid()
+ raise common.InvalidResponse(
+ response,
+ u"Response is different size than content-length",
+ u"Expected",
+ num_bytes,
+ u"Received",
+ len(response_body),
+ )
+ else:
+ # 'content-length' header not allowed with chunked encoding.
+ num_bytes = end_byte - start_byte + 1
+
+ # First update ``bytes_downloaded``.
+ self._bytes_downloaded += num_bytes
+ # If the end byte is past ``end`` or ``total_bytes - 1`` we are done.
+ if self.end is not None and end_byte >= self.end:
+ self._finished = True
+ elif end_byte >= total_bytes - 1:
+ self._finished = True
+ # NOTE: We only use ``total_bytes`` if not already known.
+ if self.total_bytes is None:
+ self._total_bytes = total_bytes
+ # Write the response body to the stream.
+ self._stream.write(response_body)
+
+ def consume_next_chunk(self, transport, timeout=None):
+ """Consume the next chunk of the resource to be downloaded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+def add_bytes_range(start, end, headers):
+ """Add a bytes range to a header dictionary.
+
+ Some possible inputs and the corresponding bytes ranges::
+
+ >>> headers = {}
+ >>> add_bytes_range(None, None, headers)
+ >>> headers
+ {}
+ >>> add_bytes_range(500, 999, headers)
+ >>> headers['range']
+ 'bytes=500-999'
+ >>> add_bytes_range(None, 499, headers)
+ >>> headers['range']
+ 'bytes=0-499'
+ >>> add_bytes_range(-500, None, headers)
+ >>> headers['range']
+ 'bytes=-500'
+ >>> add_bytes_range(9500, None, headers)
+ >>> headers['range']
+ 'bytes=9500-'
+
+ Args:
+ start (Optional[int]): The first byte in a range. Can be zero,
+ positive, negative or :data:`None`.
+ end (Optional[int]): The last byte in a range. Assumed to be
+ positive.
+ headers (Mapping[str, str]): A headers mapping which can have the
+ bytes range added if at least one of ``start`` or ``end``
+ is not :data:`None`.
+ """
+ if start is None:
+ if end is None:
+ # No range to add.
+ return
+ else:
+ # NOTE: This assumes ``end`` is non-negative.
+ bytes_range = u"0-{:d}".format(end)
+ else:
+ if end is None:
+ if start < 0:
+ bytes_range = u"{:d}".format(start)
+ else:
+ bytes_range = u"{:d}-".format(start)
+ else:
+ # NOTE: This is invalid if ``start < 0``.
+ bytes_range = u"{:d}-{:d}".format(start, end)
+
+ headers[_helpers.RANGE_HEADER] = u"bytes=" + bytes_range
+
+
+def get_range_info(response, get_headers, callback=_helpers.do_nothing):
+ """Get the start, end and total bytes from a content range header.
+
+ Args:
+ response (object): An HTTP response object.
+ get_headers (Callable[Any, Mapping[str, str]]): Helper to get headers
+ from an HTTP response.
+ callback (Optional[Callable]): A callback that takes no arguments,
+ to be executed when an exception is being raised.
+
+ Returns:
+ Tuple[int, int, int]: The start byte, end byte and total bytes.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the
+ ``Content-Range`` header is not of the form
+ ``bytes {start}-{end}/{total}``.
+ """
+ content_range = _helpers.header_required(
+ response, _helpers.CONTENT_RANGE_HEADER, get_headers, callback=callback
+ )
+ match = _CONTENT_RANGE_RE.match(content_range)
+ if match is None:
+ callback()
+ raise common.InvalidResponse(
+ response,
+ u"Unexpected content-range header",
+ content_range,
+ u'Expected to be of the form "bytes {start}-{end}/{total}"',
+ )
+
+ return (
+ int(match.group(u"start_byte")),
+ int(match.group(u"end_byte")),
+ int(match.group(u"total_bytes")),
+ )
+
+
+def _check_for_zero_content_range(response, get_status_code, get_headers):
+ """Validate if response status code is 416 and content range is zero.
+
+ This is the special case for handling zero bytes files.
+
+ Args:
+ response (object): An HTTP response object.
+ get_status_code (Callable[Any, int]): Helper to get a status code
+ from a response.
+ get_headers (Callable[Any, Mapping[str, str]]): Helper to get headers
+ from an HTTP response.
+
+ Returns:
+ bool: True if content range total bytes is zero, false otherwise.
+ """
+ if get_status_code(response) == http_client.REQUESTED_RANGE_NOT_SATISFIABLE:
+ content_range = _helpers.header_required(
+ response,
+ _helpers.CONTENT_RANGE_HEADER,
+ get_headers,
+ callback=_helpers.do_nothing,
+ )
+ if content_range == _ZERO_CONTENT_RANGE_HEADER:
+ return True
+ return False
diff --git a/venv/Lib/site-packages/google/resumable_media/_helpers.py b/venv/Lib/site-packages/google/resumable_media/_helpers.py
new file mode 100644
index 000000000..3e4acf2e8
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/_helpers.py
@@ -0,0 +1,366 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared utilities used by both downloads and uploads."""
+
+import base64
+import hashlib
+import logging
+import random
+import time
+import warnings
+
+from six.moves import http_client
+
+from google.resumable_media import common
+
+
+RANGE_HEADER = u"range"
+CONTENT_RANGE_HEADER = u"content-range"
+RETRYABLE = (
+ common.TOO_MANY_REQUESTS,
+ http_client.INTERNAL_SERVER_ERROR,
+ http_client.BAD_GATEWAY,
+ http_client.SERVICE_UNAVAILABLE,
+ http_client.GATEWAY_TIMEOUT,
+)
+
+_SLOW_CRC32C_WARNING = (
+ "Currently using crcmod in pure python form. This is a slow "
+ "implementation. Python 3 has a faster implementation, `google-crc32c`, "
+ "which will be used if it is installed."
+)
+_HASH_HEADER = u"x-goog-hash"
+_MISSING_CHECKSUM = u"""\
+No {checksum_type} checksum was returned from the service while downloading {}
+(which happens for composite objects), so client-side content integrity
+checking is not being performed."""
+_LOGGER = logging.getLogger(__name__)
+
+
+def do_nothing():
+ """Simple default callback."""
+
+
+def header_required(response, name, get_headers, callback=do_nothing):
+ """Checks that a specific header is in a headers dictionary.
+
+ Args:
+ response (object): An HTTP response object, expected to have a
+ ``headers`` attribute that is a ``Mapping[str, str]``.
+ name (str): The name of a required header.
+ get_headers (Callable[Any, Mapping[str, str]]): Helper to get headers
+ from an HTTP response.
+ callback (Optional[Callable]): A callback that takes no arguments,
+ to be executed when an exception is being raised.
+
+ Returns:
+ str: The desired header.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the header
+ is missing.
+ """
+ headers = get_headers(response)
+ if name not in headers:
+ callback()
+ raise common.InvalidResponse(
+ response, u"Response headers must contain header", name
+ )
+
+ return headers[name]
+
+
+def require_status_code(response, status_codes, get_status_code, callback=do_nothing):
+ """Require a response has a status code among a list.
+
+ Args:
+ response (object): The HTTP response object.
+ status_codes (tuple): The acceptable status codes.
+ get_status_code (Callable[Any, int]): Helper to get a status code
+ from a response.
+ callback (Optional[Callable]): A callback that takes no arguments,
+ to be executed when an exception is being raised.
+
+ Returns:
+ int: The status code.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status code
+ is not one of the values in ``status_codes``.
+ """
+ status_code = get_status_code(response)
+ if status_code not in status_codes:
+ callback()
+ raise common.InvalidResponse(
+ response,
+ u"Request failed with status code",
+ status_code,
+ u"Expected one of",
+ *status_codes
+ )
+ return status_code
+
+
+def calculate_retry_wait(base_wait, max_sleep):
+ """Calculate the amount of time to wait before a retry attempt.
+
+ Wait time grows exponentially with the number of attempts, until
+ it hits ``max_sleep``.
+
+ A random amount of jitter (between 0 and 1 seconds) is added to spread out
+ retry attempts from different clients.
+
+ Args:
+ base_wait (float): The "base" wait time (i.e. without any jitter)
+ that will be doubled until it reaches the maximum sleep.
+ max_sleep (float): Maximum value that a sleep time is allowed to be.
+
+ Returns:
+ Tuple[float, float]: The new base wait time as well as the wait time
+ to be applied (with a random amount of jitter between 0 and 1 seconds
+ added).
+ """
+ new_base_wait = 2.0 * base_wait
+ if new_base_wait > max_sleep:
+ new_base_wait = max_sleep
+
+ jitter_ms = random.randint(0, 1000)
+ return new_base_wait, new_base_wait + 0.001 * jitter_ms
+
+
+def wait_and_retry(func, get_status_code, retry_strategy):
+ """Attempts to retry a call to ``func`` until success.
+
+ Expects ``func`` to return an HTTP response and uses ``get_status_code``
+ to check if the response is retry-able.
+
+ Will retry until :meth:`~.RetryStrategy.retry_allowed` (on the current
+ ``retry_strategy``) returns :data:`False`. Uses
+ :func:`calculate_retry_wait` to double the wait time (with jitter) after
+ each attempt.
+
+ Args:
+ func (Callable): A callable that takes no arguments and produces
+ an HTTP response which will be checked as retry-able.
+ get_status_code (Callable[Any, int]): Helper to get a status code
+ from a response.
+ retry_strategy (~google.resumable_media.common.RetryStrategy): The
+ strategy to use if the request fails and must be retried.
+
+ Returns:
+ object: The return value of ``func``.
+ """
+ response = func()
+ if get_status_code(response) not in RETRYABLE:
+ return response
+
+ total_sleep = 0.0
+ num_retries = 0
+ base_wait = 0.5 # When doubled will give 1.0
+ while retry_strategy.retry_allowed(total_sleep, num_retries):
+ base_wait, wait_time = calculate_retry_wait(base_wait, retry_strategy.max_sleep)
+ num_retries += 1
+ total_sleep += wait_time
+ time.sleep(wait_time)
+ response = func()
+ if get_status_code(response) not in RETRYABLE:
+ return response
+
+ return response
+
+
+def _get_crc32c_object():
+ """Get crc32c object
+ Attempt to use the Google-CRC32c package. If it isn't available, try
+ to use CRCMod. CRCMod might be using a 'slow' varietal. If so, warn...
+ """
+ try:
+ import google_crc32c
+
+ crc_obj = google_crc32c.Checksum()
+ except ImportError:
+ try:
+ import crcmod
+
+ crc_obj = crcmod.predefined.Crc("crc-32c")
+ _is_fast_crcmod()
+
+ except ImportError:
+ raise ImportError("Failed to import either `google-crc32c` or `crcmod`")
+
+ return crc_obj
+
+
+def _is_fast_crcmod():
+ # Determine if this is using the slow form of crcmod.
+ nested_crcmod = __import__(
+ "crcmod.crcmod",
+ globals(),
+ locals(),
+ ["_usingExtension"],
+ 0,
+ )
+ fast_crc = getattr(nested_crcmod, "_usingExtension", False)
+ if not fast_crc:
+ warnings.warn(_SLOW_CRC32C_WARNING, RuntimeWarning, stacklevel=2)
+ return fast_crc
+
+
+def _get_metadata_key(checksum_type):
+ if checksum_type == "md5":
+ return "md5Hash"
+ else:
+ return checksum_type
+
+
+def prepare_checksum_digest(digest_bytestring):
+ """Convert a checksum object into a digest encoded for an HTTP header.
+
+ Args:
+ bytes: A checksum digest bytestring.
+
+ Returns:
+ str: A base64 string representation of the input.
+ """
+ encoded_digest = base64.b64encode(digest_bytestring)
+ # NOTE: ``b64encode`` returns ``bytes``, but HTTP headers expect ``str``.
+ return encoded_digest.decode(u"utf-8")
+
+
+def _get_expected_checksum(response, get_headers, media_url, checksum_type):
+ """Get the expected checksum and checksum object for the download response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+ get_headers (callable: response->dict): returns response headers.
+ media_url (str): The URL containing the media to be downloaded.
+ checksum_type Optional(str): The checksum type to read from the headers,
+ exactly as it will appear in the headers (case-sensitive). Must be
+ "md5", "crc32c" or None.
+
+ Returns:
+ Tuple (Optional[str], object): The expected checksum of the response,
+ if it can be detected from the ``X-Goog-Hash`` header, and the
+ appropriate checksum object for the expected checksum.
+ """
+ if checksum_type not in ["md5", "crc32c", None]:
+ raise ValueError("checksum must be ``'md5'``, ``'crc32c'`` or ``None``")
+ elif checksum_type in ["md5", "crc32c"]:
+ headers = get_headers(response)
+ expected_checksum = _parse_checksum_header(
+ headers.get(_HASH_HEADER), response, checksum_label=checksum_type
+ )
+
+ if expected_checksum is None:
+ msg = _MISSING_CHECKSUM.format(
+ media_url, checksum_type=checksum_type.upper()
+ )
+ _LOGGER.info(msg)
+ checksum_object = _DoNothingHash()
+ else:
+ if checksum_type == "md5":
+ checksum_object = hashlib.md5()
+ else:
+ checksum_object = _get_crc32c_object()
+ else:
+ expected_checksum = None
+ checksum_object = _DoNothingHash()
+
+ return (expected_checksum, checksum_object)
+
+
+def _parse_checksum_header(header_value, response, checksum_label):
+ """Parses the checksum header from an ``X-Goog-Hash`` value.
+
+ .. _header reference: https://cloud.google.com/storage/docs/\
+ xml-api/reference-headers#xgooghash
+
+ Expects ``header_value`` (if not :data:`None`) to be in one of the three
+ following formats:
+
+ * ``crc32c=n03x6A==``
+ * ``md5=Ojk9c3dhfxgoKVVHYwFbHQ==``
+ * ``crc32c=n03x6A==,md5=Ojk9c3dhfxgoKVVHYwFbHQ==``
+
+ See the `header reference`_ for more information.
+
+ Args:
+ header_value (Optional[str]): The ``X-Goog-Hash`` header from
+ a download response.
+ response (~requests.Response): The HTTP response object.
+ checksum_label (str): The label of the header value to read, as in the
+ examples above. Typically "md5" or "crc32c"
+
+ Returns:
+ Optional[str]: The expected checksum of the response, if it
+ can be detected from the ``X-Goog-Hash`` header; otherwise, None.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If there are
+ multiple checksums of the requested type in ``header_value``.
+ """
+ if header_value is None:
+ return None
+
+ matches = []
+ for checksum in header_value.split(u","):
+ name, value = checksum.split(u"=", 1)
+ # Official docs say "," is the separator, but real-world responses have encountered ", "
+ if name.lstrip() == checksum_label:
+ matches.append(value)
+
+ if len(matches) == 0:
+ return None
+ elif len(matches) == 1:
+ return matches[0]
+ else:
+ raise common.InvalidResponse(
+ response,
+ u"X-Goog-Hash header had multiple ``{}`` values.".format(checksum_label),
+ header_value,
+ matches,
+ )
+
+
+def _get_checksum_object(checksum_type):
+ """Respond with a checksum object for a supported type, if not None.
+
+ Raises ValueError if checksum_type is unsupported.
+ """
+ if checksum_type == "md5":
+ return hashlib.md5()
+ elif checksum_type == "crc32c":
+ return _get_crc32c_object()
+ elif checksum_type is None:
+ return None
+ else:
+ raise ValueError("checksum must be ``'md5'``, ``'crc32c'`` or ``None``")
+
+
+class _DoNothingHash(object):
+ """Do-nothing hash object.
+
+ Intended as a stand-in for ``hashlib.md5`` or a crc32c checksum
+ implementation in cases where it isn't necessary to compute the hash.
+ """
+
+ def update(self, unused_chunk):
+ """Do-nothing ``update`` method.
+
+ Intended to match the interface of ``hashlib.md5`` and other checksums.
+
+ Args:
+ unused_chunk (bytes): A chunk of data.
+ """
diff --git a/venv/Lib/site-packages/google/resumable_media/_upload.py b/venv/Lib/site-packages/google/resumable_media/_upload.py
new file mode 100644
index 000000000..3a98464f7
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/_upload.py
@@ -0,0 +1,1016 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Virtual bases classes for uploading media via Google APIs.
+
+Supported here are:
+
+* simple (media) uploads
+* multipart uploads that contain both metadata and a small file as payload
+* resumable uploads (with metadata as well)
+"""
+
+
+import json
+import os
+import random
+import re
+import sys
+
+import six
+from six.moves import http_client
+
+from google import resumable_media
+from google.resumable_media import _helpers
+from google.resumable_media import common
+
+
+_CONTENT_TYPE_HEADER = u"content-type"
+_CONTENT_RANGE_TEMPLATE = u"bytes {:d}-{:d}/{:d}"
+_RANGE_UNKNOWN_TEMPLATE = u"bytes {:d}-{:d}/*"
+_EMPTY_RANGE_TEMPLATE = u"bytes */{:d}"
+_BOUNDARY_WIDTH = len(str(sys.maxsize - 1))
+_BOUNDARY_FORMAT = u"==============={{:0{:d}d}}==".format(_BOUNDARY_WIDTH)
+_MULTIPART_SEP = b"--"
+_CRLF = b"\r\n"
+_MULTIPART_BEGIN = b"\r\ncontent-type: application/json; charset=UTF-8\r\n\r\n"
+_RELATED_HEADER = b'multipart/related; boundary="'
+_BYTES_RANGE_RE = re.compile(r"bytes=0-(?P\d+)", flags=re.IGNORECASE)
+_STREAM_ERROR_TEMPLATE = (
+ u"Bytes stream is in unexpected state. "
+ u"The local stream has had {:d} bytes read from it while "
+ u"{:d} bytes have already been updated (they should match)."
+)
+_STREAM_READ_PAST_TEMPLATE = (
+ u"{:d} bytes have been read from the stream, which exceeds "
+ u"the expected total {:d}."
+)
+_POST = u"POST"
+_PUT = u"PUT"
+_UPLOAD_CHECKSUM_MISMATCH_MESSAGE = (
+ "The computed ``{}`` checksum, ``{}``, and the checksum reported by the "
+ "remote host, ``{}``, did not match."
+)
+_UPLOAD_METADATA_NO_APPROPRIATE_CHECKSUM_MESSAGE = (
+ "Response metadata had no ``{}`` value; checksum could not be validated."
+)
+
+
+class UploadBase(object):
+ """Base class for upload helpers.
+
+ Defines core shared behavior across different upload types.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def __init__(self, upload_url, headers=None):
+ self.upload_url = upload_url
+ if headers is None:
+ headers = {}
+ self._headers = headers
+ self._finished = False
+ self._retry_strategy = common.RetryStrategy()
+
+ @property
+ def finished(self):
+ """bool: Flag indicating if the upload has completed."""
+ return self._finished
+
+ def _process_response(self, response):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 200.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ # Tombstone the current upload so it cannot be used again (in either
+ # failure or success).
+ self._finished = True
+ _helpers.require_status_code(response, (http_client.OK,), self._get_status_code)
+
+ @staticmethod
+ def _get_status_code(response):
+ """Access the status code from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_headers(response):
+ """Access the headers from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ @staticmethod
+ def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class SimpleUpload(UploadBase):
+ """Upload a resource to a Google API.
+
+ A **simple** media upload sends no metadata and completes the upload
+ in a single request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def _prepare_request(self, data, content_type):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ .. note:
+
+ This method will be used only once, so ``headers`` will be
+ mutated by having a new key added to it.
+
+ Args:
+ data (bytes): The resource content to be uploaded.
+ content_type (str): The content type for the request.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always POST)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ Raises:
+ ValueError: If the current upload has already finished.
+ TypeError: If ``data`` isn't bytes.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"An upload can only be used once.")
+
+ if not isinstance(data, six.binary_type):
+ raise TypeError(u"`data` must be bytes, received", type(data))
+ self._headers[_CONTENT_TYPE_HEADER] = content_type
+ return _POST, self.upload_url, data, self._headers
+
+ def transmit(self, transport, data, content_type, timeout=None):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ data (bytes): The resource content to be uploaded.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class MultipartUpload(UploadBase):
+ """Upload a resource with metadata to a Google API.
+
+ A **multipart** upload sends both metadata and the resource in a single
+ (multipart) request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The request metadata will be amended
+ to include the computed value. Using this option will override a
+ manually-set checksum value. Supported values are "md5", "crc32c"
+ and None. The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def __init__(self, upload_url, headers=None, checksum=None):
+ super(MultipartUpload, self).__init__(upload_url, headers=headers)
+ self._checksum_type = checksum
+
+ def _prepare_request(self, data, metadata, content_type):
+ """Prepare the contents of an HTTP request.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ .. note:
+
+ This method will be used only once, so ``headers`` will be
+ mutated by having a new key added to it.
+
+ Args:
+ data (bytes): The resource content to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always POST)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ Raises:
+ ValueError: If the current upload has already finished.
+ TypeError: If ``data`` isn't bytes.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"An upload can only be used once.")
+
+ if not isinstance(data, six.binary_type):
+ raise TypeError(u"`data` must be bytes, received", type(data))
+
+ checksum_object = _helpers._get_checksum_object(self._checksum_type)
+ if checksum_object:
+ checksum_object.update(data)
+ actual_checksum = _helpers.prepare_checksum_digest(checksum_object.digest())
+ metadata_key = _helpers._get_metadata_key(self._checksum_type)
+ metadata[metadata_key] = actual_checksum
+
+ content, multipart_boundary = construct_multipart_request(
+ data, metadata, content_type
+ )
+ multipart_content_type = _RELATED_HEADER + multipart_boundary + b'"'
+ self._headers[_CONTENT_TYPE_HEADER] = multipart_content_type
+
+ return _POST, self.upload_url, content, self._headers
+
+ def transmit(self, transport, data, metadata, content_type, timeout=None):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ data (bytes): The resource content to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+class ResumableUpload(UploadBase):
+ """Initiate and fulfill a resumable upload to a Google API.
+
+ A **resumable** upload sends an initial request with the resource metadata
+ and then gets assigned an upload ID / upload URL to send bytes to.
+ Using the upload URL, the upload is then done in chunks (determined by
+ the user) until all bytes have been uploaded.
+
+ Args:
+ upload_url (str): The URL where the resumable upload will be initiated.
+ chunk_size (int): The size of each chunk used to upload the resource.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the :meth:`initiate` request, e.g. headers for
+ encrypted data. These **will not** be sent with
+ :meth:`transmit_next_chunk` or :meth:`recover` requests.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be read
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. The corrupted file will not be deleted from the remote
+ host automatically. Supported values are "md5", "crc32c" and None.
+ The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+
+ Raises:
+ ValueError: If ``chunk_size`` is not a multiple of
+ :data:`.UPLOAD_CHUNK_SIZE`.
+ """
+
+ def __init__(self, upload_url, chunk_size, checksum=None, headers=None):
+ super(ResumableUpload, self).__init__(upload_url, headers=headers)
+ if chunk_size % resumable_media.UPLOAD_CHUNK_SIZE != 0:
+ raise ValueError(
+ u"{} KB must divide chunk size".format(
+ resumable_media.UPLOAD_CHUNK_SIZE / 1024
+ )
+ )
+ self._chunk_size = chunk_size
+ self._stream = None
+ self._content_type = None
+ self._bytes_uploaded = 0
+ self._bytes_checksummed = 0
+ self._checksum_type = checksum
+ self._checksum_object = None
+ self._total_bytes = None
+ self._resumable_url = None
+ self._invalid = False
+
+ @property
+ def invalid(self):
+ """bool: Indicates if the upload is in an invalid state.
+
+ This will occur if a call to :meth:`transmit_next_chunk` fails.
+ To recover from such a failure, call :meth:`recover`.
+ """
+ return self._invalid
+
+ @property
+ def chunk_size(self):
+ """int: The size of each chunk used to upload the resource."""
+ return self._chunk_size
+
+ @property
+ def resumable_url(self):
+ """Optional[str]: The URL of the in-progress resumable upload."""
+ return self._resumable_url
+
+ @property
+ def bytes_uploaded(self):
+ """int: Number of bytes that have been uploaded."""
+ return self._bytes_uploaded
+
+ @property
+ def total_bytes(self):
+ """Optional[int]: The total number of bytes to be uploaded.
+
+ If this upload is initiated (via :meth:`initiate`) with
+ ``stream_final=True``, this value will be populated based on the size
+ of the ``stream`` being uploaded. (By default ``stream_final=True``.)
+
+ If this upload is initiated with ``stream_final=False``,
+ :attr:`total_bytes` will be :data:`None` since it cannot be
+ determined from the stream.
+ """
+ return self._total_bytes
+
+ def _prepare_initiate_request(
+ self, stream, metadata, content_type, total_bytes=None, stream_final=True
+ ):
+ """Prepare the contents of HTTP request to initiate upload.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ stream (IO[bytes]): The stream (i.e. file-like object) that will
+ be uploaded. The stream **must** be at the beginning (i.e.
+ ``stream.tell() == 0``).
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ total_bytes (Optional[int]): The total number of bytes to be
+ uploaded. If specified, the upload size **will not** be
+ determined from the stream (even if ``stream_final=True``).
+ stream_final (Optional[bool]): Indicates if the ``stream`` is
+ "final" (i.e. no more bytes will be added to it). In this case
+ we determine the upload size from the size of the stream. If
+ ``total_bytes`` is passed, this argument will be ignored.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always POST)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ Raises:
+ ValueError: If the current upload has already been initiated.
+ ValueError: If ``stream`` is not at the beginning.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.resumable_url is not None:
+ raise ValueError(u"This upload has already been initiated.")
+ if stream.tell() != 0:
+ raise ValueError(u"Stream must be at beginning.")
+
+ self._stream = stream
+ self._content_type = content_type
+ headers = {
+ _CONTENT_TYPE_HEADER: u"application/json; charset=UTF-8",
+ u"x-upload-content-type": content_type,
+ }
+ # Set the total bytes if possible.
+ if total_bytes is not None:
+ self._total_bytes = total_bytes
+ elif stream_final:
+ self._total_bytes = get_total_bytes(stream)
+ # Add the total bytes to the headers if set.
+ if self._total_bytes is not None:
+ content_length = u"{:d}".format(self._total_bytes)
+ headers[u"x-upload-content-length"] = content_length
+
+ headers.update(self._headers)
+ payload = json.dumps(metadata).encode(u"utf-8")
+ return _POST, self.upload_url, payload, headers
+
+ def _process_initiate_response(self, response):
+ """Process the response from an HTTP request that initiated upload.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ This method takes the URL from the ``Location`` header and stores it
+ for future use. Within that URL, we assume the ``upload_id`` query
+ parameter has been included, but we do not check.
+
+ Args:
+ response (object): The HTTP response object (need headers).
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ _helpers.require_status_code(
+ response,
+ (http_client.OK, http_client.CREATED),
+ self._get_status_code,
+ callback=self._make_invalid,
+ )
+ self._resumable_url = _helpers.header_required(
+ response, u"location", self._get_headers
+ )
+
+ def initiate(
+ self,
+ transport,
+ stream,
+ metadata,
+ content_type,
+ total_bytes=None,
+ stream_final=True,
+ timeout=None,
+ ):
+ """Initiate a resumable upload.
+
+ By default, this method assumes your ``stream`` is in a "final"
+ state ready to transmit. However, ``stream_final=False`` can be used
+ to indicate that the size of the resource is not known. This can happen
+ if bytes are being dynamically fed into ``stream``, e.g. if the stream
+ is attached to application logs.
+
+ If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be
+ read from the stream every time :meth:`transmit_next_chunk` is called.
+ If one of those reads produces strictly fewer bites than the chunk
+ size, the upload will be concluded.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ stream (IO[bytes]): The stream (i.e. file-like object) that will
+ be uploaded. The stream **must** be at the beginning (i.e.
+ ``stream.tell() == 0``).
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ total_bytes (Optional[int]): The total number of bytes to be
+ uploaded. If specified, the upload size **will not** be
+ determined from the stream (even if ``stream_final=True``).
+ stream_final (Optional[bool]): Indicates if the ``stream`` is
+ "final" (i.e. no more bytes will be added to it). In this case
+ we determine the upload size from the size of the stream. If
+ ``total_bytes`` is passed, this argument will be ignored.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ def _prepare_request(self):
+ """Prepare the contents of HTTP request to upload a chunk.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O. This is based on the `sans-I/O`_ philosophy.
+
+ For the time being, this **does require** some form of I/O to read
+ a chunk from ``stream`` (via :func:`get_next_chunk`). However, this
+ will (almost) certainly not be network I/O.
+
+ Returns:
+ Tuple[str, str, bytes, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always PUT)
+ * the URL for the request
+ * the body of the request
+ * headers for the request
+
+ The headers **do not** incorporate the ``_headers`` on the
+ current instance.
+
+ Raises:
+ ValueError: If the current upload has finished.
+ ValueError: If the current upload is in an invalid state.
+ ValueError: If the current upload has not been initiated.
+ ValueError: If the location in the stream (i.e. ``stream.tell()``)
+ does not agree with ``bytes_uploaded``.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if self.finished:
+ raise ValueError(u"Upload has finished.")
+ if self.invalid:
+ raise ValueError(
+ u"Upload is in an invalid state. To recover call `recover()`."
+ )
+ if self.resumable_url is None:
+ raise ValueError(
+ u"This upload has not been initiated. Please call "
+ u"initiate() before beginning to transmit chunks."
+ )
+
+ start_byte, payload, content_range = get_next_chunk(
+ self._stream, self._chunk_size, self._total_bytes
+ )
+ if start_byte != self.bytes_uploaded:
+ msg = _STREAM_ERROR_TEMPLATE.format(start_byte, self.bytes_uploaded)
+ raise ValueError(msg)
+
+ self._update_checksum(start_byte, payload)
+
+ headers = {
+ _CONTENT_TYPE_HEADER: self._content_type,
+ _helpers.CONTENT_RANGE_HEADER: content_range,
+ }
+ return _PUT, self.resumable_url, payload, headers
+
+ def _update_checksum(self, start_byte, payload):
+ """Update the checksum with the payload if not already updated.
+
+ Because error recovery can result in bytes being transmitted more than
+ once, the checksum tracks the number of bytes checked in
+ self._bytes_checksummed and skips bytes that have already been summed.
+ """
+ if not self._checksum_type:
+ return
+
+ if not self._checksum_object:
+ self._checksum_object = _helpers._get_checksum_object(self._checksum_type)
+
+ if start_byte < self._bytes_checksummed:
+ offset = self._bytes_checksummed - start_byte
+ data = payload[offset:]
+ else:
+ data = payload
+
+ self._checksum_object.update(data)
+ self._bytes_checksummed += len(data)
+
+ def _make_invalid(self):
+ """Simple setter for ``invalid``.
+
+ This is intended to be passed along as a callback to helpers that
+ raise an exception so they can mark this instance as invalid before
+ raising.
+ """
+ self._invalid = True
+
+ def _process_response(self, response, bytes_sent):
+ """Process the response from an HTTP request.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+ bytes_sent (int): The number of bytes sent in the request that
+ ``response`` was returned for.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is 308 and the ``range`` header is not of the form
+ ``bytes 0-{end}``.
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 200 or 308.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ status_code = _helpers.require_status_code(
+ response,
+ (http_client.OK, resumable_media.PERMANENT_REDIRECT),
+ self._get_status_code,
+ callback=self._make_invalid,
+ )
+ if status_code == http_client.OK:
+ # NOTE: We use the "local" information of ``bytes_sent`` to update
+ # ``bytes_uploaded``, but do not verify this against other
+ # state. However, there may be some other information:
+ #
+ # * a ``size`` key in JSON response body
+ # * the ``total_bytes`` attribute (if set)
+ # * ``stream.tell()`` (relying on fact that ``initiate()``
+ # requires stream to be at the beginning)
+ self._bytes_uploaded = self._bytes_uploaded + bytes_sent
+ # Tombstone the current upload so it cannot be used again.
+ self._finished = True
+ # Validate the checksum. This can raise an exception on failure.
+ self._validate_checksum(response)
+ else:
+ bytes_range = _helpers.header_required(
+ response,
+ _helpers.RANGE_HEADER,
+ self._get_headers,
+ callback=self._make_invalid,
+ )
+ match = _BYTES_RANGE_RE.match(bytes_range)
+ if match is None:
+ self._make_invalid()
+ raise common.InvalidResponse(
+ response,
+ u'Unexpected "range" header',
+ bytes_range,
+ u'Expected to be of the form "bytes=0-{end}"',
+ )
+ self._bytes_uploaded = int(match.group(u"end_byte")) + 1
+
+ def _validate_checksum(self, response):
+ """Check the computed checksum, if any, against the response headers.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the checksum
+ computed locally and the checksum reported by the remote host do
+ not match.
+ """
+ if self._checksum_type is None:
+ return
+ metadata_key = _helpers._get_metadata_key(self._checksum_type)
+ metadata = response.json()
+ remote_checksum = metadata.get(metadata_key)
+ if remote_checksum is None:
+ raise common.InvalidResponse(
+ response,
+ _UPLOAD_METADATA_NO_APPROPRIATE_CHECKSUM_MESSAGE.format(metadata_key),
+ self._get_headers(response),
+ )
+ local_checksum = _helpers.prepare_checksum_digest(
+ self._checksum_object.digest()
+ )
+ if local_checksum != remote_checksum:
+ raise common.DataCorruption(
+ response,
+ _UPLOAD_CHECKSUM_MISMATCH_MESSAGE.format(
+ self._checksum_type.upper(), local_checksum, remote_checksum
+ ),
+ )
+
+ def transmit_next_chunk(self, transport, timeout=None):
+ """Transmit the next chunk of the resource to be uploaded.
+
+ If the current upload was initiated with ``stream_final=False``,
+ this method will dynamically determine if the upload has completed.
+ The upload will be considered complete if the stream produces
+ fewer than :attr:`chunk_size` bytes when a chunk is read from it.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+ def _prepare_recover_request(self):
+ """Prepare the contents of HTTP request to recover from failure.
+
+ This is everything that must be done before a request that doesn't
+ require network I/O. This is based on the `sans-I/O`_ philosophy.
+
+ We assume that the :attr:`resumable_url` is set (i.e. the only way
+ the upload can end up :attr:`invalid` is if it has been initiated.
+
+ Returns:
+ Tuple[str, str, NoneType, Mapping[str, str]]: The quadruple
+
+ * HTTP verb for the request (always PUT)
+ * the URL for the request
+ * the body of the request (always :data:`None`)
+ * headers for the request
+
+ The headers **do not** incorporate the ``_headers`` on the
+ current instance.
+
+ Raises:
+ ValueError: If the current upload is not in an invalid state.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ if not self.invalid:
+ raise ValueError(u"Upload is not in invalid state, no need to recover.")
+
+ headers = {_helpers.CONTENT_RANGE_HEADER: u"bytes */*"}
+ return _PUT, self.resumable_url, None, headers
+
+ def _process_recover_response(self, response):
+ """Process the response from an HTTP request to recover from failure.
+
+ This is everything that must be done after a request that doesn't
+ require network I/O (or other I/O). This is based on the `sans-I/O`_
+ philosophy.
+
+ Args:
+ response (object): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 308.
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is 308 and the ``range`` header is not of the form
+ ``bytes 0-{end}``.
+
+ .. _sans-I/O: https://sans-io.readthedocs.io/
+ """
+ _helpers.require_status_code(
+ response, (resumable_media.PERMANENT_REDIRECT,), self._get_status_code
+ )
+ headers = self._get_headers(response)
+ if _helpers.RANGE_HEADER in headers:
+ bytes_range = headers[_helpers.RANGE_HEADER]
+ match = _BYTES_RANGE_RE.match(bytes_range)
+ if match is None:
+ raise common.InvalidResponse(
+ response,
+ u'Unexpected "range" header',
+ bytes_range,
+ u'Expected to be of the form "bytes=0-{end}"',
+ )
+ self._bytes_uploaded = int(match.group(u"end_byte")) + 1
+ else:
+ # In this case, the upload has not "begun".
+ self._bytes_uploaded = 0
+
+ self._stream.seek(self._bytes_uploaded)
+ self._invalid = False
+
+ def recover(self, transport):
+ """Recover from a failure.
+
+ This method should be used when a :class:`ResumableUpload` is in an
+ :attr:`~ResumableUpload.invalid` state due to a request failure.
+
+ This will verify the progress with the server and make sure the
+ current upload is in a valid state before :meth:`transmit_next_chunk`
+ can be used again.
+
+ Args:
+ transport (object): An object which can make authenticated
+ requests.
+
+ Raises:
+ NotImplementedError: Always, since virtual.
+ """
+ raise NotImplementedError(u"This implementation is virtual.")
+
+
+def get_boundary():
+ """Get a random boundary for a multipart request.
+
+ Returns:
+ bytes: The boundary used to separate parts of a multipart request.
+ """
+ random_int = random.randrange(sys.maxsize)
+ boundary = _BOUNDARY_FORMAT.format(random_int)
+ # NOTE: Neither % formatting nor .format() are available for byte strings
+ # in Python 3.4, so we must use unicode strings as templates.
+ return boundary.encode(u"utf-8")
+
+
+def construct_multipart_request(data, metadata, content_type):
+ """Construct a multipart request body.
+
+ Args:
+ data (bytes): The resource content (UTF-8 encoded as bytes)
+ to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+
+ Returns:
+ Tuple[bytes, bytes]: The multipart request body and the boundary used
+ between each part.
+ """
+ multipart_boundary = get_boundary()
+ json_bytes = json.dumps(metadata).encode(u"utf-8")
+ content_type = content_type.encode(u"utf-8")
+ # Combine the two parts into a multipart payload.
+ # NOTE: We'd prefer a bytes template but are restricted by Python 3.4.
+ boundary_sep = _MULTIPART_SEP + multipart_boundary
+ content = (
+ boundary_sep
+ + _MULTIPART_BEGIN
+ + json_bytes
+ + _CRLF
+ + boundary_sep
+ + _CRLF
+ + b"content-type: "
+ + content_type
+ + _CRLF
+ + _CRLF
+ + data # Empty line between headers and body.
+ + _CRLF
+ + boundary_sep
+ + _MULTIPART_SEP
+ )
+
+ return content, multipart_boundary
+
+
+def get_total_bytes(stream):
+ """Determine the total number of bytes in a stream.
+
+ Args:
+ stream (IO[bytes]): The stream (i.e. file-like object).
+
+ Returns:
+ int: The number of bytes.
+ """
+ current_position = stream.tell()
+ # NOTE: ``.seek()`` **should** return the same value that ``.tell()``
+ # returns, but in Python 2, ``file`` objects do not.
+ stream.seek(0, os.SEEK_END)
+ end_position = stream.tell()
+ # Go back to the initial position.
+ stream.seek(current_position)
+
+ return end_position
+
+
+def get_next_chunk(stream, chunk_size, total_bytes):
+ """Get a chunk from an I/O stream.
+
+ The ``stream`` may have fewer bytes remaining than ``chunk_size``
+ so it may not always be the case that
+ ``end_byte == start_byte + chunk_size - 1``.
+
+ Args:
+ stream (IO[bytes]): The stream (i.e. file-like object).
+ chunk_size (int): The size of the chunk to be read from the ``stream``.
+ total_bytes (Optional[int]): The (expected) total number of bytes
+ in the ``stream``.
+
+ Returns:
+ Tuple[int, bytes, str]: Triple of:
+
+ * the start byte index
+ * the content in between the start and end bytes (inclusive)
+ * content range header for the chunk (slice) that has been read
+
+ Raises:
+ ValueError: If ``total_bytes == 0`` but ``stream.read()`` yields
+ non-empty content.
+ ValueError: If there is no data left to consume. This corresponds
+ exactly to the case ``end_byte < start_byte``, which can only
+ occur if ``end_byte == start_byte - 1``.
+ """
+ start_byte = stream.tell()
+ if total_bytes is not None and start_byte + chunk_size >= total_bytes > 0:
+ payload = stream.read(total_bytes - start_byte)
+ else:
+ payload = stream.read(chunk_size)
+ end_byte = stream.tell() - 1
+
+ num_bytes_read = len(payload)
+ if total_bytes is None:
+ if num_bytes_read < chunk_size:
+ # We now **KNOW** the total number of bytes.
+ total_bytes = end_byte + 1
+ elif total_bytes == 0:
+ # NOTE: We also expect ``start_byte == 0`` here but don't check
+ # because ``_prepare_initiate_request()`` requires the
+ # stream to be at the beginning.
+ if num_bytes_read != 0:
+ raise ValueError(
+ u"Stream specified as empty, but produced non-empty content."
+ )
+ else:
+ if num_bytes_read == 0:
+ raise ValueError(
+ u"Stream is already exhausted. There is no content remaining."
+ )
+
+ content_range = get_content_range(start_byte, end_byte, total_bytes)
+ return start_byte, payload, content_range
+
+
+def get_content_range(start_byte, end_byte, total_bytes):
+ """Convert start, end and total into content range header.
+
+ If ``total_bytes`` is not known, uses "bytes {start}-{end}/*".
+ If we are dealing with an empty range (i.e. ``end_byte < start_byte``)
+ then "bytes */{total}" is used.
+
+ This function **ASSUMES** that if the size is not known, the caller will
+ not also pass an empty range.
+
+ Args:
+ start_byte (int): The start (inclusive) of the byte range.
+ end_byte (int): The end (inclusive) of the byte range.
+ total_bytes (Optional[int]): The number of bytes in the byte
+ range (if known).
+
+ Returns:
+ str: The content range header.
+ """
+ if total_bytes is None:
+ return _RANGE_UNKNOWN_TEMPLATE.format(start_byte, end_byte)
+ elif end_byte < start_byte:
+ return _EMPTY_RANGE_TEMPLATE.format(total_bytes)
+ else:
+ return _CONTENT_RANGE_TEMPLATE.format(start_byte, end_byte, total_bytes)
diff --git a/venv/Lib/site-packages/google/resumable_media/common.py b/venv/Lib/site-packages/google/resumable_media/common.py
new file mode 100644
index 000000000..a30d8eb1e
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/common.py
@@ -0,0 +1,148 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common utilities for Google Media Downloads and Resumable Uploads.
+
+Includes custom exception types, useful constants and shared helpers.
+"""
+
+
+_SLEEP_RETRY_ERROR_MSG = (
+ u"At most one of `max_cumulative_retry` and `max_retries` " u"can be specified."
+)
+
+UPLOAD_CHUNK_SIZE = 262144 # 256 * 1024
+"""int: Chunks in a resumable upload must come in multiples of 256 KB."""
+PERMANENT_REDIRECT = 308
+"""int: Permanent redirect status code.
+
+It is used by Google services to indicate some (but not all) of
+a resumable upload has been completed.
+
+``http.client.PERMANENT_REDIRECT`` was added in Python 3.5, so
+can't be used in a "general" code base.
+
+For more information, see `RFC 7238`_.
+
+.. _RFC 7238: https://tools.ietf.org/html/rfc7238
+"""
+TOO_MANY_REQUESTS = 429
+"""int: Status code indicating rate-limiting.
+
+``http.client.TOO_MANY_REQUESTS`` was added in Python 3.3, so
+can't be used in a "general" code base.
+
+For more information, see `RFC 6585`_.
+
+.. _RFC 6585: https://tools.ietf.org/html/rfc6585#section-4
+"""
+MAX_SLEEP = 64.0
+"""float: Maximum amount of time allowed between requests.
+
+Used during the retry process for sleep after a failed request.
+Chosen since it is the power of two nearest to one minute.
+"""
+MAX_CUMULATIVE_RETRY = 600.0
+"""float: Maximum total sleep time allowed during retry process.
+
+This is provided (10 minutes) as a default. When the cumulative sleep
+exceeds this limit, no more retries will occur.
+"""
+
+
+class InvalidResponse(Exception):
+ """Error class for responses which are not in the correct state.
+
+ Args:
+ response (object): The HTTP response which caused the failure.
+ args (tuple): The positional arguments typically passed to an
+ exception class.
+ """
+
+ def __init__(self, response, *args):
+ super(InvalidResponse, self).__init__(*args)
+ self.response = response
+ """object: The HTTP response object that caused the failure."""
+
+
+class DataCorruption(Exception):
+ """Error class for corrupt media transfers.
+
+ Args:
+ response (object): The HTTP response which caused the failure.
+ args (tuple): The positional arguments typically passed to an
+ exception class.
+ """
+
+ def __init__(self, response, *args):
+ super(DataCorruption, self).__init__(*args)
+ self.response = response
+ """object: The HTTP response object that caused the failure."""
+
+
+class RetryStrategy(object):
+ """Configuration class for retrying failed requests.
+
+ At most one of ``max_cumulative_retry`` and ``max_retries`` can be
+ specified (they are both caps on the total number of retries). If
+ neither are specified, then ``max_cumulative_retry`` is set as
+ :data:`MAX_CUMULATIVE_RETRY`.
+
+ Args:
+ max_sleep (Optional[float]): The maximum amount of time to sleep after
+ a failed request. Default is :attr:`MAX_SLEEP`.
+ max_cumulative_retry (Optional[float]): The maximum **total** amount of
+ time to sleep during retry process.
+ max_retries (Optional[int]): The number of retries to attempt.
+
+ Attributes:
+ max_sleep (float): Maximum amount of time allowed between requests.
+ max_cumulative_retry (Optional[float]): Maximum total sleep time
+ allowed during retry process.
+ max_retries (Optional[int]): The number retries to attempt.
+
+ Raises:
+ ValueError: If both of ``max_cumulative_retry`` and ``max_retries``
+ are passed.
+ """
+
+ def __init__(
+ self, max_sleep=MAX_SLEEP, max_cumulative_retry=None, max_retries=None
+ ):
+ if max_cumulative_retry is not None and max_retries is not None:
+ raise ValueError(_SLEEP_RETRY_ERROR_MSG)
+ if max_cumulative_retry is None and max_retries is None:
+ max_cumulative_retry = MAX_CUMULATIVE_RETRY
+
+ self.max_sleep = max_sleep
+ self.max_cumulative_retry = max_cumulative_retry
+ self.max_retries = max_retries
+
+ def retry_allowed(self, total_sleep, num_retries):
+ """Check if another retry is allowed.
+
+ Args:
+ total_sleep (float): The amount of sleep accumulated by the caller.
+ num_retries (int): The number of retries already attempted by
+ the caller.
+
+ Returns:
+ bool: Indicating if another retry is allowed (depending on either
+ the cumulative sleep allowed or the maximum number of retries
+ allowed.
+ """
+ if self.max_cumulative_retry is None:
+ return num_retries <= self.max_retries
+ else:
+ return total_sleep <= self.max_cumulative_retry
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/__init__.py b/venv/Lib/site-packages/google/resumable_media/requests/__init__.py
new file mode 100644
index 000000000..11ac95847
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/requests/__init__.py
@@ -0,0 +1,678 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""``requests`` utilities for Google Media Downloads and Resumable Uploads.
+
+This sub-package assumes callers will use the `requests`_ library
+as transport and `google-auth`_ for sending authenticated HTTP traffic
+with ``requests``.
+
+.. _requests: http://docs.python-requests.org/
+.. _google-auth: https://google-auth.readthedocs.io/
+
+====================
+Authorized Transport
+====================
+
+To use ``google-auth`` and ``requests`` to create an authorized transport
+that has read-only access to Google Cloud Storage (GCS):
+
+.. testsetup:: get-credentials
+
+ import google.auth
+ import google.auth.credentials as creds_mod
+ import mock
+
+ def mock_default(scopes=None):
+ credentials = mock.Mock(spec=creds_mod.Credentials)
+ return credentials, u'mock-project'
+
+ # Patch the ``default`` function on the module.
+ original_default = google.auth.default
+ google.auth.default = mock_default
+
+.. doctest:: get-credentials
+
+ >>> import google.auth
+ >>> import google.auth.transport.requests as tr_requests
+ >>>
+ >>> ro_scope = u'https://www.googleapis.com/auth/devstorage.read_only'
+ >>> credentials, _ = google.auth.default(scopes=(ro_scope,))
+ >>> transport = tr_requests.AuthorizedSession(credentials)
+ >>> transport
+
+
+.. testcleanup:: get-credentials
+
+ # Put back the correct ``default`` function on the module.
+ google.auth.default = original_default
+
+================
+Simple Downloads
+================
+
+To download an object from Google Cloud Storage, construct the media URL
+for the GCS object and download it with an authorized transport that has
+access to the resource:
+
+.. testsetup:: basic-download
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'bucket-foo'
+ blob_name = u'file.txt'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response.headers[u'Content-Length'] = u'1364156'
+ fake_content = mock.MagicMock(spec=['__len__'])
+ fake_content.__len__.return_value = 1364156
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: basic-download
+
+ >>> from google.resumable_media.requests import Download
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/download/storage/v1/b/'
+ ... u'{bucket}/o/{blob_name}?alt=media')
+ >>> media_url = url_template.format(
+ ... bucket=bucket, blob_name=blob_name)
+ >>>
+ >>> download = Download(media_url)
+ >>> response = download.consume(transport)
+ >>> download.finished
+ True
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '1364156'
+ >>> len(response.content)
+ 1364156
+
+To download only a portion of the bytes in the object,
+specify ``start`` and ``end`` byte positions (both optional):
+
+.. testsetup:: basic-download-with-slice
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import Download
+
+ media_url = u'http://test.invalid'
+ start = 4096
+ end = 8191
+ slice_size = end - start + 1
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.PARTIAL_CONTENT)
+ fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size)
+ content_range = u'bytes {:d}-{:d}/1364156'.format(start, end)
+ fake_response.headers[u'Content-Range'] = content_range
+ fake_content = mock.MagicMock(spec=['__len__'])
+ fake_content.__len__.return_value = slice_size
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: basic-download-with-slice
+
+ >>> download = Download(media_url, start=4096, end=8191)
+ >>> response = download.consume(transport)
+ >>> download.finished
+ True
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '4096'
+ >>> response.headers[u'Content-Range']
+ 'bytes 4096-8191/1364156'
+ >>> len(response.content)
+ 4096
+
+=================
+Chunked Downloads
+=================
+
+For very large objects or objects of unknown size, it may make more sense
+to download the object in chunks rather than all at once. This can be done
+to avoid dropped connections with a poor internet connection or can allow
+multiple chunks to be downloaded in parallel to speed up the total
+download.
+
+A :class:`.ChunkedDownload` uses the same media URL and authorized
+transport that a basic :class:`.Download` would use, but also
+requires a chunk size and a write-able byte ``stream``. The chunk size is used
+to determine how much of the resouce to consume with each request and the
+stream is to allow the resource to be written out (e.g. to disk) without
+having to fit in memory all at once.
+
+.. testsetup:: chunked-download
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ media_url = u'http://test.invalid'
+
+ fifty_mb = 50 * 1024 * 1024
+ one_gb = 1024 * 1024 * 1024
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.PARTIAL_CONTENT)
+ fake_response.headers[u'Content-Length'] = u'{:d}'.format(fifty_mb)
+ content_range = u'bytes 0-{:d}/{:d}'.format(fifty_mb - 1, one_gb)
+ fake_response.headers[u'Content-Range'] = content_range
+ fake_content_begin = b'The beginning of the chunk...'
+ fake_content = fake_content_begin + b'1' * (fifty_mb - 29)
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: chunked-download
+
+ >>> from google.resumable_media.requests import ChunkedDownload
+ >>>
+ >>> chunk_size = 50 * 1024 * 1024 # 50MB
+ >>> stream = io.BytesIO()
+ >>> download = ChunkedDownload(
+ ... media_url, chunk_size, stream)
+ >>> # Check the state of the download before starting.
+ >>> download.bytes_downloaded
+ 0
+ >>> download.total_bytes is None
+ True
+ >>> response = download.consume_next_chunk(transport)
+ >>> # Check the state of the download after consuming one chunk.
+ >>> download.finished
+ False
+ >>> download.bytes_downloaded # chunk_size
+ 52428800
+ >>> download.total_bytes # 1GB
+ 1073741824
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '52428800'
+ >>> response.headers[u'Content-Range']
+ 'bytes 0-52428799/1073741824'
+ >>> len(response.content) == chunk_size
+ True
+ >>> stream.seek(0)
+ 0
+ >>> stream.read(29)
+ b'The beginning of the chunk...'
+
+The download will change it's ``finished`` status to :data:`True`
+once the final chunk is consumed. In some cases, the final chunk may
+not be the same size as the other chunks:
+
+.. testsetup:: chunked-download-end
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ChunkedDownload
+
+ media_url = u'http://test.invalid'
+
+ fifty_mb = 50 * 1024 * 1024
+ one_gb = 1024 * 1024 * 1024
+ stream = mock.Mock(spec=['write'])
+ download = ChunkedDownload(media_url, fifty_mb, stream)
+ download._bytes_downloaded = 20 * fifty_mb
+ download._total_bytes = one_gb
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.PARTIAL_CONTENT)
+ slice_size = one_gb - 20 * fifty_mb
+ fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size)
+ content_range = u'bytes {:d}-{:d}/{:d}'.format(
+ 20 * fifty_mb, one_gb - 1, one_gb)
+ fake_response.headers[u'Content-Range'] = content_range
+ fake_content = mock.MagicMock(spec=['__len__'])
+ fake_content.__len__.return_value = slice_size
+ fake_response._content = fake_content
+
+ get_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=get_method, spec=['request'])
+
+.. doctest:: chunked-download-end
+
+ >>> # The state of the download in progress.
+ >>> download.finished
+ False
+ >>> download.bytes_downloaded # 20 chunks at 50MB
+ 1048576000
+ >>> download.total_bytes # 1GB
+ 1073741824
+ >>> response = download.consume_next_chunk(transport)
+ >>> # The state of the download after consuming the final chunk.
+ >>> download.finished
+ True
+ >>> download.bytes_downloaded == download.total_bytes
+ True
+ >>> response
+
+ >>> response.headers[u'Content-Length']
+ '25165824'
+ >>> response.headers[u'Content-Range']
+ 'bytes 1048576000-1073741823/1073741824'
+ >>> len(response.content) < download.chunk_size
+ True
+
+In addition, a :class:`.ChunkedDownload` can also take optional
+``start`` and ``end`` byte positions.
+
+==============
+Simple Uploads
+==============
+
+Among the three supported upload classes, the simplest is
+:class:`.SimpleUpload`. A simple upload should be used when the resource
+being uploaded is small and when there is no metadata (other than the name)
+associated with the resource.
+
+.. testsetup:: simple-upload
+
+ import json
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ payload = {
+ u'bucket': bucket,
+ u'contentType': u'text/plain',
+ u'md5Hash': u'M0XLEsX9/sMdiI+4pB4CAQ==',
+ u'name': blob_name,
+ u'size': u'27',
+ }
+ fake_response._content = json.dumps(payload).encode(u'utf-8')
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+.. doctest:: simple-upload
+ :options: +NORMALIZE_WHITESPACE
+
+ >>> from google.resumable_media.requests import SimpleUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=media&'
+ ... u'name={blob_name}')
+ >>> upload_url = url_template.format(
+ ... bucket=bucket, blob_name=blob_name)
+ >>>
+ >>> upload = SimpleUpload(upload_url)
+ >>> data = b'Some not too large content.'
+ >>> content_type = u'text/plain'
+ >>> response = upload.transmit(transport, data, content_type)
+ >>> upload.finished
+ True
+ >>> response
+
+ >>> json_response = response.json()
+ >>> json_response[u'bucket'] == bucket
+ True
+ >>> json_response[u'name'] == blob_name
+ True
+ >>> json_response[u'contentType'] == content_type
+ True
+ >>> json_response[u'md5Hash']
+ 'M0XLEsX9/sMdiI+4pB4CAQ=='
+ >>> int(json_response[u'size']) == len(data)
+ True
+
+In the rare case that an upload fails, an :exc:`.InvalidResponse`
+will be raised:
+
+.. testsetup:: simple-upload-fail
+
+ import time
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google import resumable_media
+ from google.resumable_media import _helpers
+ from google.resumable_media.requests import SimpleUpload as constructor
+
+ upload_url = u'http://test.invalid'
+ data = b'Some not too large content.'
+ content_type = u'text/plain'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.SERVICE_UNAVAILABLE)
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ time_sleep = time.sleep
+ def dont_sleep(seconds):
+ raise RuntimeError(u'No sleep', seconds)
+
+ def SimpleUpload(*args, **kwargs):
+ upload = constructor(*args, **kwargs)
+ # Mock the cumulative sleep to avoid retries (and `time.sleep()`).
+ upload._retry_strategy = resumable_media.RetryStrategy(
+ max_cumulative_retry=-1.0)
+ return upload
+
+ time.sleep = dont_sleep
+
+.. doctest:: simple-upload-fail
+ :options: +NORMALIZE_WHITESPACE
+
+ >>> upload = SimpleUpload(upload_url)
+ >>> error = None
+ >>> try:
+ ... upload.transmit(transport, data, content_type)
+ ... except resumable_media.InvalidResponse as caught_exc:
+ ... error = caught_exc
+ ...
+ >>> error
+ InvalidResponse('Request failed with status code', 503,
+ 'Expected one of', )
+ >>> error.response
+
+ >>>
+ >>> upload.finished
+ True
+
+.. testcleanup:: simple-upload-fail
+
+ # Put back the correct ``sleep`` function on the ``time`` module.
+ time.sleep = time_sleep
+
+Even in the case of failure, we see that the upload is
+:attr:`~.SimpleUpload.finished`, i.e. it cannot be re-used.
+
+=================
+Multipart Uploads
+=================
+
+After the simple upload, the :class:`.MultipartUpload` can be used to
+achieve essentially the same task. However, a multipart upload allows some
+metadata about the resource to be sent along as well. (This is the "multi":
+we send a first part with the metadata and a second part with the actual
+bytes in the resource.)
+
+Usage is similar to the simple upload, but :meth:`~.MultipartUpload.transmit`
+accepts an extra required argument: ``metadata``.
+
+.. testsetup:: multipart-upload
+
+ import json
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+ data = b'Some not too large content.'
+ content_type = u'text/plain'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ payload = {
+ u'bucket': bucket,
+ u'name': blob_name,
+ u'metadata': {u'color': u'grurple'},
+ }
+ fake_response._content = json.dumps(payload).encode(u'utf-8')
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+.. doctest:: multipart-upload
+
+ >>> from google.resumable_media.requests import MultipartUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=multipart')
+ >>> upload_url = url_template.format(bucket=bucket)
+ >>>
+ >>> upload = MultipartUpload(upload_url)
+ >>> metadata = {
+ ... u'name': blob_name,
+ ... u'metadata': {
+ ... u'color': u'grurple',
+ ... },
+ ... }
+ >>> response = upload.transmit(transport, data, metadata, content_type)
+ >>> upload.finished
+ True
+ >>> response
+
+ >>> json_response = response.json()
+ >>> json_response[u'bucket'] == bucket
+ True
+ >>> json_response[u'name'] == blob_name
+ True
+ >>> json_response[u'metadata'] == metadata[u'metadata']
+ True
+
+As with the simple upload, in the case of failure an :exc:`.InvalidResponse`
+is raised, enclosing the :attr:`~.InvalidResponse.response` that caused
+the failure and the ``upload`` object cannot be re-used after a failure.
+
+=================
+Resumable Uploads
+=================
+
+A :class:`.ResumableUpload` deviates from the other two upload classes:
+it transmits a resource over the course of multiple requests. This
+is intended to be used in cases where:
+
+* the size of the resource is not known (i.e. it is generated on the fly)
+* requests must be short-lived
+* the client has request **size** limitations
+* the resource is too large to fit into memory
+
+In general, a resource should be sent in a **single** request to avoid
+latency and reduce QPS. See `GCS best practices`_ for more things to
+consider when using a resumable upload.
+
+.. _GCS best practices: https://cloud.google.com/storage/docs/\
+ best-practices#uploading
+
+After creating a :class:`.ResumableUpload` instance, a
+**resumable upload session** must be initiated to let the server know that
+a series of chunked upload requests will be coming and to obtain an
+``upload_id`` for the session. In contrast to the other two upload classes,
+:meth:`~.ResumableUpload.initiate` takes a byte ``stream`` as input rather
+than raw bytes as ``data``. This can be a file object, a :class:`~io.BytesIO`
+object or any other stream implementing the same interface.
+
+.. testsetup:: resumable-initiate
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+ data = b'Some resumable bytes.'
+ content_type = u'text/plain'
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ upload_id = u'ABCdef189XY_super_serious'
+ resumable_url_template = (
+ u'https://www.googleapis.com/upload/storage/v1/b/{bucket}'
+ u'/o?uploadType=resumable&upload_id={upload_id}')
+ resumable_url = resumable_url_template.format(
+ bucket=bucket, upload_id=upload_id)
+ fake_response.headers[u'location'] = resumable_url
+ fake_response.headers[u'x-guploader-uploadid'] = upload_id
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+.. doctest:: resumable-initiate
+
+ >>> from google.resumable_media.requests import ResumableUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=resumable')
+ >>> upload_url = url_template.format(bucket=bucket)
+ >>>
+ >>> chunk_size = 1024 * 1024 # 1MB
+ >>> upload = ResumableUpload(upload_url, chunk_size)
+ >>> stream = io.BytesIO(data)
+ >>> # The upload doesn't know how "big" it is until seeing a stream.
+ >>> upload.total_bytes is None
+ True
+ >>> metadata = {u'name': blob_name}
+ >>> response = upload.initiate(transport, stream, metadata, content_type)
+ >>> response
+
+ >>> upload.resumable_url == response.headers[u'Location']
+ True
+ >>> upload.total_bytes == len(data)
+ True
+ >>> upload_id = response.headers[u'X-GUploader-UploadID']
+ >>> upload_id
+ 'ABCdef189XY_super_serious'
+ >>> upload.resumable_url == upload_url + u'&upload_id=' + upload_id
+ True
+
+Once a :class:`.ResumableUpload` has been initiated, the resource is
+transmitted in chunks until completion:
+
+.. testsetup:: resumable-transmit
+
+ import io
+ import json
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google import resumable_media
+ import google.resumable_media.requests.upload as upload_mod
+
+ data = b'01234567891'
+ stream = io.BytesIO(data)
+ # Create an "already initiated" upload.
+ upload_url = u'http://test.invalid'
+ chunk_size = 256 * 1024 # 256KB
+ upload = upload_mod.ResumableUpload(upload_url, chunk_size)
+ upload._resumable_url = u'http://test.invalid?upload_id=mocked'
+ upload._stream = stream
+ upload._content_type = u'text/plain'
+ upload._total_bytes = len(data)
+
+ # After-the-fact update the chunk size so that len(data)
+ # is split into three.
+ upload._chunk_size = 4
+ # Make three fake responses.
+ fake_response0 = requests.Response()
+ fake_response0.status_code = resumable_media.PERMANENT_REDIRECT
+ fake_response0.headers[u'range'] = u'bytes=0-3'
+
+ fake_response1 = requests.Response()
+ fake_response1.status_code = resumable_media.PERMANENT_REDIRECT
+ fake_response1.headers[u'range'] = u'bytes=0-7'
+
+ fake_response2 = requests.Response()
+ fake_response2.status_code = int(http_client.OK)
+ bucket = u'some-bucket'
+ blob_name = u'file.txt'
+ payload = {
+ u'bucket': bucket,
+ u'name': blob_name,
+ u'size': u'{:d}'.format(len(data)),
+ }
+ fake_response2._content = json.dumps(payload).encode(u'utf-8')
+
+ # Use the fake responses to mock a transport.
+ responses = [fake_response0, fake_response1, fake_response2]
+ put_method = mock.Mock(side_effect=responses, spec=[])
+ transport = mock.Mock(request=put_method, spec=['request'])
+
+.. doctest:: resumable-transmit
+
+ >>> response0 = upload.transmit_next_chunk(transport)
+ >>> response0
+
+ >>> upload.finished
+ False
+ >>> upload.bytes_uploaded == upload.chunk_size
+ True
+ >>>
+ >>> response1 = upload.transmit_next_chunk(transport)
+ >>> response1
+
+ >>> upload.finished
+ False
+ >>> upload.bytes_uploaded == 2 * upload.chunk_size
+ True
+ >>>
+ >>> response2 = upload.transmit_next_chunk(transport)
+ >>> response2
+
+ >>> upload.finished
+ True
+ >>> upload.bytes_uploaded == upload.total_bytes
+ True
+ >>> json_response = response2.json()
+ >>> json_response[u'bucket'] == bucket
+ True
+ >>> json_response[u'name'] == blob_name
+ True
+"""
+from google.resumable_media.requests.download import ChunkedDownload
+from google.resumable_media.requests.download import Download
+from google.resumable_media.requests.upload import MultipartUpload
+from google.resumable_media.requests.download import RawChunkedDownload
+from google.resumable_media.requests.download import RawDownload
+from google.resumable_media.requests.upload import ResumableUpload
+from google.resumable_media.requests.upload import SimpleUpload
+
+
+__all__ = [
+ u"ChunkedDownload",
+ u"Download",
+ u"MultipartUpload",
+ u"RawChunkedDownload",
+ u"RawDownload",
+ u"ResumableUpload",
+ u"SimpleUpload",
+]
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..d99b16eb7
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc
new file mode 100644
index 000000000..9d4ac5a2c
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/download.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/download.cpython-36.pyc
new file mode 100644
index 000000000..aee793209
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/download.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/upload.cpython-36.pyc b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/upload.cpython-36.pyc
new file mode 100644
index 000000000..611fa3e8d
Binary files /dev/null and b/venv/Lib/site-packages/google/resumable_media/requests/__pycache__/upload.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/_request_helpers.py b/venv/Lib/site-packages/google/resumable_media/requests/_request_helpers.py
new file mode 100644
index 000000000..2f0fab3ac
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/requests/_request_helpers.py
@@ -0,0 +1,136 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared utilities used by both downloads and uploads.
+
+This utilities are explicitly catered to ``requests``-like transports.
+"""
+
+
+import functools
+
+from google.resumable_media import _helpers
+from google.resumable_media import common
+
+
+_DEFAULT_RETRY_STRATEGY = common.RetryStrategy()
+_SINGLE_GET_CHUNK_SIZE = 8192
+# The number of seconds to wait to establish a connection
+# (connect() call on socket). Avoid setting this to a multiple of 3 to not
+# Align with TCP Retransmission timing. (typically 2.5-3s)
+_DEFAULT_CONNECT_TIMEOUT = 61
+# The number of seconds to wait between bytes sent from the server.
+_DEFAULT_READ_TIMEOUT = 60
+
+
+class RequestsMixin(object):
+ """Mix-in class implementing ``requests``-specific behavior.
+
+ These are methods that are more general purpose, with implementations
+ specific to the types defined in ``requests``.
+ """
+
+ @staticmethod
+ def _get_status_code(response):
+ """Access the status code from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ int: The status code.
+ """
+ return response.status_code
+
+ @staticmethod
+ def _get_headers(response):
+ """Access the headers from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ ~requests.structures.CaseInsensitiveDict: The header mapping (keys
+ are case-insensitive).
+ """
+ return response.headers
+
+ @staticmethod
+ def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ bytes: The body of the ``response``.
+ """
+ return response.content
+
+
+class RawRequestsMixin(RequestsMixin):
+ @staticmethod
+ def _get_body(response):
+ """Access the response body from an HTTP response.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Returns:
+ bytes: The body of the ``response``.
+ """
+ if response._content is False:
+ response._content = b"".join(
+ response.raw.stream(_SINGLE_GET_CHUNK_SIZE, decode_content=False)
+ )
+ response._content_consumed = True
+ return response._content
+
+
+def http_request(
+ transport,
+ method,
+ url,
+ data=None,
+ headers=None,
+ retry_strategy=_DEFAULT_RETRY_STRATEGY,
+ **transport_kwargs
+):
+ """Make an HTTP request.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can make
+ authenticated requests via a ``request()`` method. This method
+ must accept an HTTP method, an upload URL, a ``data`` keyword
+ argument and a ``headers`` keyword argument.
+ method (str): The HTTP method for the request.
+ url (str): The URL for the request.
+ data (Optional[bytes]): The body of the request.
+ headers (Mapping[str, str]): The headers for the request (``transport``
+ may also add additional headers).
+ retry_strategy (~google.resumable_media.common.RetryStrategy): The
+ strategy to use if the request fails and must be retried.
+ transport_kwargs (Dict[str, str]): Extra keyword arguments to be
+ passed along to ``transport.request``.
+
+ Returns:
+ ~requests.Response: The return value of ``transport.request()``.
+ """
+ if "timeout" not in transport_kwargs:
+ transport_kwargs["timeout"] = (_DEFAULT_CONNECT_TIMEOUT, _DEFAULT_READ_TIMEOUT)
+
+ func = functools.partial(
+ transport.request, method, url, data=data, headers=headers, **transport_kwargs
+ )
+ return _helpers.wait_and_retry(func, RequestsMixin._get_status_code, retry_strategy)
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/download.py b/venv/Lib/site-packages/google/resumable_media/requests/download.py
new file mode 100644
index 000000000..d44fb93a9
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/requests/download.py
@@ -0,0 +1,501 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Support for downloading media from Google APIs."""
+
+import urllib3.response
+
+from google.resumable_media import _download
+from google.resumable_media import common
+from google.resumable_media import _helpers
+from google.resumable_media.requests import _request_helpers
+
+
+_CHECKSUM_MISMATCH = u"""\
+Checksum mismatch while downloading:
+
+ {}
+
+The X-Goog-Hash header indicated an {checksum_type} checksum of:
+
+ {}
+
+but the actual {checksum_type} checksum of the downloaded contents was:
+
+ {}
+"""
+
+
+class Download(_request_helpers.RequestsMixin, _download.Download):
+ """Helper to manage downloading a resource from a Google API.
+
+ "Slices" of the resource can be retrieved by specifying a range
+ with ``start`` and / or ``end``. However, in typical usage, neither
+ ``start`` nor ``end`` is expected to be provided.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, but ``end`` is provided, will download from the
+ beginning to ``end`` of the media.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, but ``start`` is provided, will download from the
+ ``start`` to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The response headers must contain
+ a checksum of the requested type. If the headers lack an
+ appropriate checksum (for instance in the case of transcoded or
+ ranged downloads where the remote service does not know the
+ correct checksum) an INFO-level log will be emitted. Supported
+ values are "md5", "crc32c" and None. The default is "md5".
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ """
+
+ def _write_to_stream(self, response):
+ """Write response body to a write-able stream.
+
+ .. note:
+
+ This method assumes that the ``_stream`` attribute is set on the
+ current download.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ """
+
+ # `_get_expected_checksum()` may return None even if a checksum was
+ # requested, in which case it will emit an info log _MISSING_CHECKSUM.
+ # If an invalid checksum type is specified, this will raise ValueError.
+ expected_checksum, checksum_object = _helpers._get_expected_checksum(
+ response, self._get_headers, self.media_url, checksum_type=self.checksum
+ )
+
+ with response:
+ # NOTE: In order to handle compressed streams gracefully, we try
+ # to insert our checksum object into the decompression stream. If
+ # the stream is indeed compressed, this will delegate the checksum
+ # object to the decoder and return a _DoNothingHash here.
+ local_checksum_object = _add_decoder(response.raw, checksum_object)
+ body_iter = response.iter_content(
+ chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
+ )
+ for chunk in body_iter:
+ self._stream.write(chunk)
+ local_checksum_object.update(chunk)
+
+ if expected_checksum is None:
+ return
+ else:
+ actual_checksum = _helpers.prepare_checksum_digest(checksum_object.digest())
+ if actual_checksum != expected_checksum:
+ msg = _CHECKSUM_MISMATCH.format(
+ self.media_url,
+ expected_checksum,
+ actual_checksum,
+ checksum_type=self.checksum.upper(),
+ )
+ raise common.DataCorruption(response, msg)
+
+ def consume(
+ self,
+ transport,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Consume the resource to be downloaded.
+
+ If a ``stream`` is attached to this download, then the downloaded
+ resource will be written to the stream.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ ValueError: If the current :class:`Download` has already
+ finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ request_kwargs = {
+ u"data": payload,
+ u"headers": headers,
+ u"retry_strategy": self._retry_strategy,
+ u"timeout": timeout,
+ }
+ if self._stream is not None:
+ request_kwargs[u"stream"] = True
+
+ result = _request_helpers.http_request(transport, method, url, **request_kwargs)
+
+ self._process_response(result)
+
+ if self._stream is not None:
+ self._write_to_stream(result)
+
+ return result
+
+
+class RawDownload(_request_helpers.RawRequestsMixin, _download.Download):
+ """Helper to manage downloading a raw resource from a Google API.
+
+ "Slices" of the resource can be retrieved by specifying a range
+ with ``start`` and / or ``end``. However, in typical usage, neither
+ ``start`` nor ``end`` is expected to be provided.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ the downloaded resource can be written to.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, but ``end`` is provided, will download from the
+ beginning to ``end`` of the media.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, but ``start`` is provided, will download from the
+ ``start`` to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The response headers must contain
+ a checksum of the requested type. If the headers lack an
+ appropriate checksum (for instance in the case of transcoded or
+ ranged downloads where the remote service does not know the
+ correct checksum) an INFO-level log will be emitted. Supported
+ values are "md5", "crc32c" and None. The default is "md5".
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ """
+
+ def _write_to_stream(self, response):
+ """Write response body to a write-able stream.
+
+ .. note:
+
+ This method assumes that the ``_stream`` attribute is set on the
+ current download.
+
+ Args:
+ response (~requests.Response): The HTTP response object.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ """
+
+ # `_get_expected_checksum()` may return None even if a checksum was
+ # requested, in which case it will emit an info log _MISSING_CHECKSUM.
+ # If an invalid checksum type is specified, this will raise ValueError.
+ expected_checksum, checksum_object = _helpers._get_expected_checksum(
+ response, self._get_headers, self.media_url, checksum_type=self.checksum
+ )
+
+ with response:
+ body_iter = response.raw.stream(
+ _request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
+ )
+ for chunk in body_iter:
+ self._stream.write(chunk)
+ checksum_object.update(chunk)
+ response._content_consumed = True
+
+ if expected_checksum is None:
+ return
+ else:
+ actual_checksum = _helpers.prepare_checksum_digest(checksum_object.digest())
+
+ if actual_checksum != expected_checksum:
+ msg = _CHECKSUM_MISMATCH.format(
+ self.media_url,
+ expected_checksum,
+ actual_checksum,
+ checksum_type=self.checksum.upper(),
+ )
+ raise common.DataCorruption(response, msg)
+
+ def consume(
+ self,
+ transport,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Consume the resource to be downloaded.
+
+ If a ``stream`` is attached to this download, then the downloaded
+ resource will be written to the stream.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ~google.resumable_media.common.DataCorruption: If the download's
+ checksum doesn't agree with server-computed checksum.
+ ValueError: If the current :class:`Download` has already
+ finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ result = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ stream=True,
+ timeout=timeout,
+ )
+
+ self._process_response(result)
+
+ if self._stream is not None:
+ self._write_to_stream(result)
+
+ return result
+
+
+class ChunkedDownload(_request_helpers.RequestsMixin, _download.ChunkedDownload):
+ """Download a resource in chunks from a Google API.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each
+ request.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ will be used to concatenate chunks of the resource as they are
+ downloaded.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, defaults to ``0``.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, will download to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with each request, e.g. headers for data encryption
+ key headers.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each request.
+
+ Raises:
+ ValueError: If ``start`` is negative.
+ """
+
+ def consume_next_chunk(
+ self,
+ transport,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Consume the next chunk of the resource to be downloaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ValueError: If the current download has finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ result = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_response(result)
+ return result
+
+
+class RawChunkedDownload(_request_helpers.RawRequestsMixin, _download.ChunkedDownload):
+ """Download a raw resource in chunks from a Google API.
+
+ Args:
+ media_url (str): The URL containing the media to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each
+ request.
+ stream (IO[bytes]): A write-able stream (i.e. file-like object) that
+ will be used to concatenate chunks of the resource as they are
+ downloaded.
+ start (int): The first byte in a range to be downloaded. If not
+ provided, defaults to ``0``.
+ end (int): The last byte in a range to be downloaded. If not
+ provided, will download to the end of the media.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with each request, e.g. headers for data encryption
+ key headers.
+
+ Attributes:
+ media_url (str): The URL containing the media to be downloaded.
+ start (Optional[int]): The first byte in a range to be downloaded.
+ end (Optional[int]): The last byte in a range to be downloaded.
+ chunk_size (int): The number of bytes to be retrieved in each request.
+
+ Raises:
+ ValueError: If ``start`` is negative.
+ """
+
+ def consume_next_chunk(
+ self,
+ transport,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Consume the next chunk of the resource to be downloaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ValueError: If the current download has finished.
+ """
+ method, url, payload, headers = self._prepare_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ result = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ stream=True,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_response(result)
+ return result
+
+
+def _add_decoder(response_raw, checksum):
+ """Patch the ``_decoder`` on a ``urllib3`` response.
+
+ This is so that we can intercept the compressed bytes before they are
+ decoded.
+
+ Only patches if the content encoding is ``gzip``.
+
+ Args:
+ response_raw (urllib3.response.HTTPResponse): The raw response for
+ an HTTP request.
+ checksum (object):
+ A checksum which will be updated with compressed bytes.
+
+ Returns:
+ object: Either the original ``checksum`` if ``_decoder`` is not
+ patched, or a ``_DoNothingHash`` if the decoder is patched, since the
+ caller will no longer need to hash to decoded bytes.
+ """
+ encoding = response_raw.headers.get(u"content-encoding", u"").lower()
+ if encoding != u"gzip":
+ return checksum
+
+ response_raw._decoder = _GzipDecoder(checksum)
+ return _helpers._DoNothingHash()
+
+
+class _GzipDecoder(urllib3.response.GzipDecoder):
+ """Custom subclass of ``urllib3`` decoder for ``gzip``-ed bytes.
+
+ Allows a checksum function to see the compressed bytes before they are
+ decoded. This way the checksum of the compressed value can be computed.
+
+ Args:
+ checksum (object):
+ A checksum which will be updated with compressed bytes.
+ """
+
+ def __init__(self, checksum):
+ super(_GzipDecoder, self).__init__()
+ self._checksum = checksum
+
+ def decompress(self, data):
+ """Decompress the bytes.
+
+ Args:
+ data (bytes): The compressed bytes to be decompressed.
+
+ Returns:
+ bytes: The decompressed bytes from ``data``.
+ """
+ self._checksum.update(data)
+ return super(_GzipDecoder, self).decompress(data)
diff --git a/venv/Lib/site-packages/google/resumable_media/requests/upload.py b/venv/Lib/site-packages/google/resumable_media/requests/upload.py
new file mode 100644
index 000000000..f440d680e
--- /dev/null
+++ b/venv/Lib/site-packages/google/resumable_media/requests/upload.py
@@ -0,0 +1,536 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Support for resumable uploads.
+
+Also supported here are simple (media) uploads and multipart
+uploads that contain both metadata and a small file as payload.
+"""
+
+
+from google.resumable_media import _upload
+from google.resumable_media.requests import _request_helpers
+
+
+class SimpleUpload(_request_helpers.RequestsMixin, _upload.SimpleUpload):
+ """Upload a resource to a Google API.
+
+ A **simple** media upload sends no metadata and completes the upload
+ in a single request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def transmit(
+ self,
+ transport,
+ data,
+ content_type,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ data (bytes): The resource content to be uploaded.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_request(data, content_type)
+ response = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_response(response)
+ return response
+
+
+class MultipartUpload(_request_helpers.RequestsMixin, _upload.MultipartUpload):
+ """Upload a resource with metadata to a Google API.
+
+ A **multipart** upload sends both metadata and the resource in a single
+ (multipart) request.
+
+ Args:
+ upload_url (str): The URL where the content will be uploaded.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the request, e.g. headers for encrypted data.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. The request metadata will be amended
+ to include the computed value. Using this option will override a
+ manually-set checksum value. Supported values are "md5",
+ "crc32c" and None. The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+ """
+
+ def transmit(
+ self,
+ transport,
+ data,
+ metadata,
+ content_type,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Transmit the resource to be uploaded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ data (bytes): The resource content to be uploaded.
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_request(
+ data, metadata, content_type
+ )
+ response = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+
+ self._process_response(response)
+ return response
+
+
+class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload):
+ """Initiate and fulfill a resumable upload to a Google API.
+
+ A **resumable** upload sends an initial request with the resource metadata
+ and then gets assigned an upload ID / upload URL to send bytes to.
+ Using the upload URL, the upload is then done in chunks (determined by
+ the user) until all bytes have been uploaded.
+
+ When constructing a resumable upload, only the resumable upload URL and
+ the chunk size are required:
+
+ .. testsetup:: resumable-constructor
+
+ bucket = u'bucket-foo'
+
+ .. doctest:: resumable-constructor
+
+ >>> from google.resumable_media.requests import ResumableUpload
+ >>>
+ >>> url_template = (
+ ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?'
+ ... u'uploadType=resumable')
+ >>> upload_url = url_template.format(bucket=bucket)
+ >>>
+ >>> chunk_size = 3 * 1024 * 1024 # 3MB
+ >>> upload = ResumableUpload(upload_url, chunk_size)
+
+ When initiating an upload (via :meth:`initiate`), the caller is expected
+ to pass the resource being uploaded as a file-like ``stream``. If the size
+ of the resource is explicitly known, it can be passed in directly:
+
+ .. testsetup:: resumable-explicit-size
+
+ import os
+ import tempfile
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ResumableUpload
+
+ upload_url = u'http://test.invalid'
+ chunk_size = 3 * 1024 * 1024 # 3MB
+ upload = ResumableUpload(upload_url, chunk_size)
+
+ file_desc, filename = tempfile.mkstemp()
+ os.close(file_desc)
+
+ data = b'some bytes!'
+ with open(filename, u'wb') as file_obj:
+ file_obj.write(data)
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ resumable_url = u'http://test.invalid?upload_id=7up'
+ fake_response.headers[u'location'] = resumable_url
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ .. doctest:: resumable-explicit-size
+
+ >>> import os
+ >>>
+ >>> upload.total_bytes is None
+ True
+ >>>
+ >>> stream = open(filename, u'rb')
+ >>> total_bytes = os.path.getsize(filename)
+ >>> metadata = {u'name': filename}
+ >>> response = upload.initiate(
+ ... transport, stream, metadata, u'text/plain',
+ ... total_bytes=total_bytes)
+ >>> response
+
+ >>>
+ >>> upload.total_bytes == total_bytes
+ True
+
+ .. testcleanup:: resumable-explicit-size
+
+ os.remove(filename)
+
+ If the stream is in a "final" state (i.e. it won't have any more bytes
+ written to it), the total number of bytes can be determined implicitly
+ from the ``stream`` itself:
+
+ .. testsetup:: resumable-implicit-size
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ResumableUpload
+
+ upload_url = u'http://test.invalid'
+ chunk_size = 3 * 1024 * 1024 # 3MB
+ upload = ResumableUpload(upload_url, chunk_size)
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ resumable_url = u'http://test.invalid?upload_id=7up'
+ fake_response.headers[u'location'] = resumable_url
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ data = b'some MOAR bytes!'
+ metadata = {u'name': u'some-file.jpg'}
+ content_type = u'image/jpeg'
+
+ .. doctest:: resumable-implicit-size
+
+ >>> stream = io.BytesIO(data)
+ >>> response = upload.initiate(
+ ... transport, stream, metadata, content_type)
+ >>>
+ >>> upload.total_bytes == len(data)
+ True
+
+ If the size of the resource is **unknown** when the upload is initiated,
+ the ``stream_final`` argument can be used. This might occur if the
+ resource is being dynamically created on the client (e.g. application
+ logs). To use this argument:
+
+ .. testsetup:: resumable-unknown-size
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google.resumable_media.requests import ResumableUpload
+
+ upload_url = u'http://test.invalid'
+ chunk_size = 3 * 1024 * 1024 # 3MB
+ upload = ResumableUpload(upload_url, chunk_size)
+
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.OK)
+ fake_response._content = b''
+ resumable_url = u'http://test.invalid?upload_id=7up'
+ fake_response.headers[u'location'] = resumable_url
+
+ post_method = mock.Mock(return_value=fake_response, spec=[])
+ transport = mock.Mock(request=post_method, spec=['request'])
+
+ metadata = {u'name': u'some-file.jpg'}
+ content_type = u'application/octet-stream'
+
+ stream = io.BytesIO(b'data')
+
+ .. doctest:: resumable-unknown-size
+
+ >>> response = upload.initiate(
+ ... transport, stream, metadata, content_type,
+ ... stream_final=False)
+ >>>
+ >>> upload.total_bytes is None
+ True
+
+ Args:
+ upload_url (str): The URL where the resumable upload will be initiated.
+ chunk_size (int): The size of each chunk used to upload the resource.
+ headers (Optional[Mapping[str, str]]): Extra headers that should
+ be sent with the :meth:`initiate` request, e.g. headers for
+ encrypted data. These **will not** be sent with
+ :meth:`transmit_next_chunk` or :meth:`recover` requests.
+ checksum Optional([str]): The type of checksum to compute to verify
+ the integrity of the object. After the upload is complete, the
+ server-computed checksum of the resulting object will be checked
+ and google.resumable_media.common.DataCorruption will be raised on
+ a mismatch. The corrupted file will not be deleted from the remote
+ host automatically. Supported values are "md5", "crc32c" and None.
+ The default is None.
+
+ Attributes:
+ upload_url (str): The URL where the content will be uploaded.
+
+ Raises:
+ ValueError: If ``chunk_size`` is not a multiple of
+ :data:`.UPLOAD_CHUNK_SIZE`.
+ """
+
+ def initiate(
+ self,
+ transport,
+ stream,
+ metadata,
+ content_type,
+ total_bytes=None,
+ stream_final=True,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Initiate a resumable upload.
+
+ By default, this method assumes your ``stream`` is in a "final"
+ state ready to transmit. However, ``stream_final=False`` can be used
+ to indicate that the size of the resource is not known. This can happen
+ if bytes are being dynamically fed into ``stream``, e.g. if the stream
+ is attached to application logs.
+
+ If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be
+ read from the stream every time :meth:`transmit_next_chunk` is called.
+ If one of those reads produces strictly fewer bites than the chunk
+ size, the upload will be concluded.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ stream (IO[bytes]): The stream (i.e. file-like object) that will
+ be uploaded. The stream **must** be at the beginning (i.e.
+ ``stream.tell() == 0``).
+ metadata (Mapping[str, str]): The resource metadata, such as an
+ ACL list.
+ content_type (str): The content type of the resource, e.g. a JPEG
+ image has content type ``image/jpeg``.
+ total_bytes (Optional[int]): The total number of bytes to be
+ uploaded. If specified, the upload size **will not** be
+ determined from the stream (even if ``stream_final=True``).
+ stream_final (Optional[bool]): Indicates if the ``stream`` is
+ "final" (i.e. no more bytes will be added to it). In this case
+ we determine the upload size from the size of the stream. If
+ ``total_bytes`` is passed, this argument will be ignored.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_initiate_request(
+ stream,
+ metadata,
+ content_type,
+ total_bytes=total_bytes,
+ stream_final=stream_final,
+ )
+ response = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_initiate_response(response)
+ return response
+
+ def transmit_next_chunk(
+ self,
+ transport,
+ timeout=(
+ _request_helpers._DEFAULT_CONNECT_TIMEOUT,
+ _request_helpers._DEFAULT_READ_TIMEOUT,
+ ),
+ ):
+ """Transmit the next chunk of the resource to be uploaded.
+
+ If the current upload was initiated with ``stream_final=False``,
+ this method will dynamically determine if the upload has completed.
+ The upload will be considered complete if the stream produces
+ fewer than :attr:`chunk_size` bytes when a chunk is read from it.
+
+ In the case of failure, an exception is thrown that preserves the
+ failed response:
+
+ .. testsetup:: bad-response
+
+ import io
+
+ import mock
+ import requests
+ from six.moves import http_client
+
+ from google import resumable_media
+ import google.resumable_media.requests.upload as upload_mod
+
+ transport = mock.Mock(spec=['request'])
+ fake_response = requests.Response()
+ fake_response.status_code = int(http_client.BAD_REQUEST)
+ transport.request.return_value = fake_response
+
+ upload_url = u'http://test.invalid'
+ upload = upload_mod.ResumableUpload(
+ upload_url, resumable_media.UPLOAD_CHUNK_SIZE)
+ # Fake that the upload has been initiate()-d
+ data = b'data is here'
+ upload._stream = io.BytesIO(data)
+ upload._total_bytes = len(data)
+ upload._resumable_url = u'http://test.invalid?upload_id=nope'
+
+ .. doctest:: bad-response
+ :options: +NORMALIZE_WHITESPACE
+
+ >>> error = None
+ >>> try:
+ ... upload.transmit_next_chunk(transport)
+ ... except resumable_media.InvalidResponse as caught_exc:
+ ... error = caught_exc
+ ...
+ >>> error
+ InvalidResponse('Request failed with status code', 400,
+ 'Expected one of', , 308)
+ >>> error.response
+
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The number of seconds to wait for the server response.
+ Depending on the retry strategy, a request may be repeated
+ several times using the same timeout each time.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+
+ Raises:
+ ~google.resumable_media.common.InvalidResponse: If the status
+ code is not 200 or 308.
+ ~google.resumable_media.common.DataCorruption: If this is the final
+ chunk, a checksum validation was requested, and the checksum
+ does not match or is not available.
+ """
+ method, url, payload, headers = self._prepare_request()
+ response = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ timeout=timeout,
+ )
+ self._process_response(response, len(payload))
+ return response
+
+ def recover(self, transport):
+ """Recover from a failure.
+
+ This method should be used when a :class:`ResumableUpload` is in an
+ :attr:`~ResumableUpload.invalid` state due to a request failure.
+
+ This will verify the progress with the server and make sure the
+ current upload is in a valid state before :meth:`transmit_next_chunk`
+ can be used again.
+
+ Args:
+ transport (~requests.Session): A ``requests`` object which can
+ make authenticated requests.
+
+ Returns:
+ ~requests.Response: The HTTP response returned by ``transport``.
+ """
+ method, url, payload, headers = self._prepare_recover_request()
+ # NOTE: We assume "payload is None" but pass it along anyway.
+ response = _request_helpers.http_request(
+ transport,
+ method,
+ url,
+ data=payload,
+ headers=headers,
+ retry_strategy=self._retry_strategy,
+ )
+ self._process_recover_response(response)
+ return response
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4-py3.8-nspkg.pth b/venv/Lib/site-packages/google_api_core-1.22.4-py3.8-nspkg.pth
new file mode 100644
index 000000000..baef7a0f4
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4-py3.8-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/INSTALLER b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/LICENSE b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/LICENSE
new file mode 100644
index 000000000..a8ee855de
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/METADATA b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/METADATA
new file mode 100644
index 000000000..c6af4f60c
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/METADATA
@@ -0,0 +1,62 @@
+Metadata-Version: 2.1
+Name: google-api-core
+Version: 1.22.4
+Summary: Google API client core library
+Home-page: https://github.com/googleapis/python-api-core
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Platform: Posix; MacOS X; Windows
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Requires-Dist: googleapis-common-protos (<2.0dev,>=1.6.0)
+Requires-Dist: protobuf (>=3.12.0)
+Requires-Dist: google-auth (<2.0dev,>=1.21.1)
+Requires-Dist: requests (<3.0.0dev,>=2.18.0)
+Requires-Dist: setuptools (>=34.0.0)
+Requires-Dist: six (>=1.13.0)
+Requires-Dist: pytz
+Requires-Dist: futures (>=3.2.0) ; python_version < "3.2"
+Provides-Extra: grpc
+Requires-Dist: grpcio (<2.0dev,>=1.29.0) ; extra == 'grpc'
+Provides-Extra: grpcgcp
+Requires-Dist: grpcio-gcp (>=0.2.2) ; extra == 'grpcgcp'
+Provides-Extra: grpcio-gcp
+Requires-Dist: grpcio-gcp (>=0.2.2) ; extra == 'grpcio-gcp'
+
+Core Library for Google Client Libraries
+========================================
+
+|pypi| |versions|
+
+This library is not meant to stand-alone. Instead it defines
+common helpers used by all Google API clients. For more information, see the
+`documentation`_.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-api_core.svg
+ :target: https://pypi.org/project/google-api_core/
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-api_core.svg
+ :target: https://pypi.org/project/google-api_core/
+.. _documentation: https://googleapis.dev/python/google-api-core/latest
+
+
+Supported Python Versions
+-------------------------
+Python >= 3.5
+
+Deprecated Python Versions
+--------------------------
+Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/RECORD b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/RECORD
new file mode 100644
index 000000000..7ff3a4cd4
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/RECORD
@@ -0,0 +1,80 @@
+google/api_core/__init__.py,sha256=-U7y29Wu9zFZkscKJkoAJcEkKycIzKU0v_PYQ-GXwWo,781
+google/api_core/__pycache__/__init__.cpython-36.pyc,,
+google/api_core/__pycache__/bidi.cpython-36.pyc,,
+google/api_core/__pycache__/client_info.cpython-36.pyc,,
+google/api_core/__pycache__/client_options.cpython-36.pyc,,
+google/api_core/__pycache__/datetime_helpers.cpython-36.pyc,,
+google/api_core/__pycache__/exceptions.cpython-36.pyc,,
+google/api_core/__pycache__/general_helpers.cpython-36.pyc,,
+google/api_core/__pycache__/grpc_helpers.cpython-36.pyc,,
+google/api_core/__pycache__/grpc_helpers_async.cpython-36.pyc,,
+google/api_core/__pycache__/iam.cpython-36.pyc,,
+google/api_core/__pycache__/operation.cpython-36.pyc,,
+google/api_core/__pycache__/operation_async.cpython-36.pyc,,
+google/api_core/__pycache__/page_iterator.cpython-36.pyc,,
+google/api_core/__pycache__/page_iterator_async.cpython-36.pyc,,
+google/api_core/__pycache__/path_template.cpython-36.pyc,,
+google/api_core/__pycache__/protobuf_helpers.cpython-36.pyc,,
+google/api_core/__pycache__/retry.cpython-36.pyc,,
+google/api_core/__pycache__/retry_async.cpython-36.pyc,,
+google/api_core/__pycache__/timeout.cpython-36.pyc,,
+google/api_core/__pycache__/version.cpython-36.pyc,,
+google/api_core/bidi.py,sha256=pH5rxa39L20XlIKHtCXymZqD06NKhm5_5KsfXurMC5g,27209
+google/api_core/client_info.py,sha256=QCwQkULNJds-i1vVIK7Y2l06FxNI8CjHV9tdFjOSHbY,3528
+google/api_core/client_options.py,sha256=vqVJXuwdgXFIAM2VaZJUXGPUIiVK2HJ-EPht3w4QqGk,4073
+google/api_core/datetime_helpers.py,sha256=KBS7qP6-7o7loccrar9_nVcOPAi-QdcAJ_0uMGDemcM,8885
+google/api_core/exceptions.py,sha256=6eMGSLe_iQUc7oGomGc1kVuHY2T-RYMrGjBAJa0IKFU,14103
+google/api_core/future/__init__.py,sha256=7sToxNNu9c_xqcpmO8dbrcSLOOxplnYOOSXjOX9QIXw,702
+google/api_core/future/__pycache__/__init__.cpython-36.pyc,,
+google/api_core/future/__pycache__/_helpers.cpython-36.pyc,,
+google/api_core/future/__pycache__/async_future.cpython-36.pyc,,
+google/api_core/future/__pycache__/base.cpython-36.pyc,,
+google/api_core/future/__pycache__/polling.cpython-36.pyc,,
+google/api_core/future/_helpers.py,sha256=jA6m2L1aqlOJA-9NdC1BDosPksZQ7FmLLYWDOrsQOPc,1248
+google/api_core/future/async_future.py,sha256=Lr9633-W4MJgI9w3GzjK2S4Nhy3y4E6WxYfIw2E1Rm0,5313
+google/api_core/future/base.py,sha256=8dVqsSNT02bSzCQ0qAmgeEdd-wXlUlhnZR4fEBpKYCo,1784
+google/api_core/future/polling.py,sha256=x0_YKL3tT0WaesPDhcV5bYGT-bQUkvwnUfgVacc3aug,6179
+google/api_core/gapic_v1/__init__.py,sha256=uVG23fJ1f8Ievfc9XOBDq9PG3neJNtWRv3ZTj1CYPSQ,1079
+google/api_core/gapic_v1/__pycache__/__init__.cpython-36.pyc,,
+google/api_core/gapic_v1/__pycache__/client_info.cpython-36.pyc,,
+google/api_core/gapic_v1/__pycache__/config.cpython-36.pyc,,
+google/api_core/gapic_v1/__pycache__/config_async.cpython-36.pyc,,
+google/api_core/gapic_v1/__pycache__/method.cpython-36.pyc,,
+google/api_core/gapic_v1/__pycache__/method_async.cpython-36.pyc,,
+google/api_core/gapic_v1/__pycache__/routing_header.cpython-36.pyc,,
+google/api_core/gapic_v1/client_info.py,sha256=wv4iYrtcqHsORB3rKfSHi5LFeVYtsvHhujKha5jfPEk,2217
+google/api_core/gapic_v1/config.py,sha256=Le3Q9Gq_curWkRuYKYcEjbmz-puWPgwOszpeg81UI2E,6019
+google/api_core/gapic_v1/config_async.py,sha256=wKtUvu3iLK97UXGITSdKV71kV7zj5Fu4haV9kJCd11g,1731
+google/api_core/gapic_v1/method.py,sha256=4gGoEOCfJRyvbhGsb2W9XgitFQA8ua8uxN3GCcmCTXY,9468
+google/api_core/gapic_v1/method_async.py,sha256=AQl2nUacg4AO2ePI7NoIjO0ETatmFcLsHELBhZ6f35I,1794
+google/api_core/gapic_v1/routing_header.py,sha256=d4D6y8rNszIocpil-JzwxdmPtfmq-TdOIwkrDi0xMX0,1938
+google/api_core/general_helpers.py,sha256=K1PJRO7ei2FPxcNg2otvGtBUq85MtADuHAFb6ESIKfo,1196
+google/api_core/grpc_helpers.py,sha256=PruU8672MfZcfXlBjo5LN76F6StMAzCkMIz5b2atOHk,16673
+google/api_core/grpc_helpers_async.py,sha256=gUfLJ6JIAJmld2eV7GuBURn1y8keO5K5Dqlyt-KIXkM,9748
+google/api_core/iam.py,sha256=Yitfudyp2tx5dOkd1oLRrYQ53mq-_-o4ZOWvCuPaU-w,14256
+google/api_core/operation.py,sha256=CEq_DtOeh2aZZWQu1-D44WqjVHGX4_UZ0W7zDCtn_zM,12111
+google/api_core/operation_async.py,sha256=CXFe9OVqt_uzgbqlgXZDO3Ab1C4KWdJTcORrn4Hx-98,7807
+google/api_core/operations_v1/__init__.py,sha256=LsuZFqJOf-TpProB4CAiz33BCWj2ddYeme6Dbn1cuTM,960
+google/api_core/operations_v1/__pycache__/__init__.cpython-36.pyc,,
+google/api_core/operations_v1/__pycache__/operations_async_client.cpython-36.pyc,,
+google/api_core/operations_v1/__pycache__/operations_client.cpython-36.pyc,,
+google/api_core/operations_v1/__pycache__/operations_client_config.cpython-36.pyc,,
+google/api_core/operations_v1/operations_async_client.py,sha256=GqfBu_bMT1jJMhzomc4fFk9liWt15soXIEN3lygaWs8,12315
+google/api_core/operations_v1/operations_client.py,sha256=AYcZdC-sak1W3Cc977tCo3NUONrziGgKTuFfppFe754,12824
+google/api_core/operations_v1/operations_client_config.py,sha256=T5bHWWHGZ2GZr2tuSXQCo1mb4JvWMaigThIs-eK_NMI,2218
+google/api_core/page_iterator.py,sha256=mZLvhZ8mXGxwExITOFe-dmhCejkYrD3Oyb49nebA9Dk,19811
+google/api_core/page_iterator_async.py,sha256=CTWcV_GflnTozu0D09ynqjsr8VYOGS5c0kF7510aryk,10066
+google/api_core/path_template.py,sha256=rBhDOnyonbodMgvY2XRHs56UJGa1HTLPJG_UXUxVYdg,6549
+google/api_core/protobuf_helpers.py,sha256=GEj2LFQspH-mDFMx666c7P8Gx9ZuIepQxPvwHLeUzDI,12140
+google/api_core/retry.py,sha256=Z5W0sWUZ5_TBqTkOrYuYi1-c_pjbT4NwJh-rv36r6uU,12561
+google/api_core/retry_async.py,sha256=BACfyYCL5pn7bcNfT5ZG4v4fEwx9avqicoBd11xIixA,10401
+google/api_core/timeout.py,sha256=tiyTh0oeq_EoCHtmwtrMEZrW2jGRvpSEo0jESfpjpKk,7411
+google/api_core/version.py,sha256=ofHktM1eSOJeb94tku8LjsJVk1Spcb984GjmxawZiBQ,598
+google_api_core-1.22.4-py3.8-nspkg.pth,sha256=xH5gTxc4UipYP3qrbP-4CCHNGBV97eBR4QqhheCvBl4,539
+google_api_core-1.22.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_api_core-1.22.4.dist-info/LICENSE,sha256=wj8Xz9NbPUIIxjKTeDELp0xe1uoa3ruCdyltMOOzSyk,11354
+google_api_core-1.22.4.dist-info/METADATA,sha256=P1gCA67CrB4cHwHsTwHlVlvuE_Gs8QWPtkH7F22NwKk,2271
+google_api_core-1.22.4.dist-info/RECORD,,
+google_api_core-1.22.4.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_api_core-1.22.4.dist-info/namespace_packages.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
+google_api_core-1.22.4.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/WHEEL b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/namespace_packages.txt b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/namespace_packages.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/namespace_packages.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/top_level.txt b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/top_level.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_core-1.22.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/INSTALLER b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/LICENSE b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/LICENSE
new file mode 100644
index 000000000..2987b3b95
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/LICENSE
@@ -0,0 +1,22 @@
+ Copyright 2014 Google Inc. All Rights Reserved.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+Dependent Modules
+=================
+
+This code has the following dependencies
+above and beyond the Python standard library:
+
+uritemplates - Apache License 2.0
+httplib2 - MIT License
diff --git a/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/METADATA b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/METADATA
new file mode 100644
index 000000000..82bfcc33a
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/METADATA
@@ -0,0 +1,103 @@
+Metadata-Version: 2.1
+Name: google-api-python-client
+Version: 1.12.3
+Summary: Google API Client Library for Python
+Home-page: https://github.com/googleapis/google-api-python-client/
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Keywords: google api client
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Description-Content-Type: text/markdown
+Requires-Dist: httplib2 (<1dev,>=0.15.0)
+Requires-Dist: google-auth (>=1.16.0)
+Requires-Dist: google-auth-httplib2 (>=0.0.3)
+Requires-Dist: google-api-core (<2dev,>=1.21.0)
+Requires-Dist: six (<2dev,>=1.13.0)
+Requires-Dist: uritemplate (<4dev,>=3.0.0)
+
+# Google API Client
+
+[![PyPI version](https://badge.fury.io/py/google-api-python-client.svg)](https://badge.fury.io/py/google-api-python-client)
+
+This is the Python client library for Google's discovery based APIs. To get started, please see the [docs folder](docs/README.md).
+
+These client libraries are officially supported by Google. However, the libraries are considered complete and are in maintenance mode. This means that we will address critical bugs and security issues but will not add any new features.
+
+## Documentation
+
+See the [docs folder](docs/README.md) for more detailed instructions and additional documentation.
+
+## Other Google API libraries
+
+For Google Cloud Platform APIs such as Datastore, Cloud Storage or Pub/Sub, we recommend using [Cloud Client Libraries for Python](https://github.com/GoogleCloudPlatform/google-cloud-python).
+
+For Google Ads API, we recommend using [Google Ads API Client Library for Python](https://github.com/googleads/google-ads-python/).
+
+For Google Firebase Admin API, we recommend using [Firebase Admin Python SDK](https://github.com/firebase/firebase-admin-python).
+
+## Installation
+
+Install this library in a [virtualenv](https://virtualenv.pypa.io/en/latest/) using pip. virtualenv is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With virtualenv, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+### Mac/Linux
+
+```
+pip install virtualenv
+virtualenv
+source /bin/activate
+/bin/pip install google-api-python-client
+```
+
+### Windows
+
+```
+pip install virtualenv
+virtualenv
+\Scripts\activate
+\Scripts\pip.exe install google-api-python-client
+```
+
+## Supported Python Versions
+
+Python 3.5, 3.6 and 3.7, and 3.8 are fully supported and tested. This library may work on later versions of 3, but we do not currently run tests against those versions
+
+## Deprecated Python Versions
+
+Python == 2.7
+
+## Third Party Libraries and Dependencies
+
+The following libraries will be installed when you install the client library:
+* [httplib2](https://github.com/httplib2/httplib2)
+* [uritemplate](https://github.com/sigmavirus24/uritemplate)
+
+For development you will also need the following libraries:
+* [WebTest](http://webtest.pythonpaste.org/en/latest/index.html)
+* [pyopenssl](https://pypi.python.org/pypi/pyOpenSSL)
+
+## Contributing
+
+Please see our [Contribution Guide](CONTRIBUTING.rst).
+In particular, we love pull requests - but please make sure to sign
+the contributor license agreement.
+
+
diff --git a/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/RECORD b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/RECORD
new file mode 100644
index 000000000..d93a1df84
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/RECORD
@@ -0,0 +1,38 @@
+apiclient/__init__.py,sha256=qOtjyK2_tgEZijr5ooTnulTuDJCJgFujbhj1cd-qpLM,935
+apiclient/__pycache__/__init__.cpython-36.pyc,,
+google_api_python_client-1.12.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_api_python_client-1.12.3.dist-info/LICENSE,sha256=PQ49F4Z1BG23H8qaDuYeWay052oV-TTkCOdd-5N1iiQ,767
+google_api_python_client-1.12.3.dist-info/METADATA,sha256=4y4PAYWklwupOdr4V0T0Go7VWeazxIXPTocs1IZDaCI,3901
+google_api_python_client-1.12.3.dist-info/RECORD,,
+google_api_python_client-1.12.3.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_api_python_client-1.12.3.dist-info/top_level.txt,sha256=lbWgdDwQ3oww0ub0pmZ3fmIYAOTHf18Two4U9CxRlnw,58
+googleapiclient/__init__.py,sha256=kFWxKShJalbnrCuV0klL7mjZaiXfPpjlqmyKyI8yfTU,904
+googleapiclient/__pycache__/__init__.cpython-36.pyc,,
+googleapiclient/__pycache__/_auth.cpython-36.pyc,,
+googleapiclient/__pycache__/_helpers.cpython-36.pyc,,
+googleapiclient/__pycache__/channel.cpython-36.pyc,,
+googleapiclient/__pycache__/discovery.cpython-36.pyc,,
+googleapiclient/__pycache__/errors.cpython-36.pyc,,
+googleapiclient/__pycache__/http.cpython-36.pyc,,
+googleapiclient/__pycache__/mimeparse.cpython-36.pyc,,
+googleapiclient/__pycache__/model.cpython-36.pyc,,
+googleapiclient/__pycache__/sample_tools.cpython-36.pyc,,
+googleapiclient/__pycache__/schema.cpython-36.pyc,,
+googleapiclient/_auth.py,sha256=7RLtEh5QtqgP1GXkmH7qSLrxIUdm-4ie93GGinQZmyU,5679
+googleapiclient/_helpers.py,sha256=bTwuQ1sJNPZayyWoZcxt_2X_hrOcYxKaVawFzEfOMPU,6780
+googleapiclient/channel.py,sha256=Gf3pUHcfLpmXTsTJ61SdvpRPFgzQ0ifb4MAtyJEEK5g,10858
+googleapiclient/discovery.py,sha256=CngUDTQNW5vcipmMu2yyHV1PMTO3og2gyJwILmwzfaA,59078
+googleapiclient/discovery_cache/__init__.py,sha256=KPDtHfDZFWKIGMjHlu7n7zmTQl489n7J27_m7V9B__g,1449
+googleapiclient/discovery_cache/__pycache__/__init__.cpython-36.pyc,,
+googleapiclient/discovery_cache/__pycache__/appengine_memcache.cpython-36.pyc,,
+googleapiclient/discovery_cache/__pycache__/base.cpython-36.pyc,,
+googleapiclient/discovery_cache/__pycache__/file_cache.cpython-36.pyc,,
+googleapiclient/discovery_cache/appengine_memcache.py,sha256=pTD4YYEN2kjLWmoZR9SuHN2_iltdpLfO5grSiTqwfI8,1652
+googleapiclient/discovery_cache/base.py,sha256=V1UQcU2lMvAq_wybuqy-F3b94Q1j3H1kFYW_b7h3L4c,1349
+googleapiclient/discovery_cache/file_cache.py,sha256=1jCvMHcS8_PrDH9LYMWU6injErkMXKkv31bkdox4gaM,4796
+googleapiclient/errors.py,sha256=j8om5vzw8ohn3ng1NzXVGUNEo4EgOmo-aO_ueAtjw70,4939
+googleapiclient/http.py,sha256=kXQS0RRJjRA8eOrnlZeIUzGUsOUjQ-KqDher3ODVOhg,64810
+googleapiclient/mimeparse.py,sha256=V4drTRrogD6yOUJ2ljLswSF1Yz_Bn7e-4f7aIbzQ1Yw,6550
+googleapiclient/model.py,sha256=U4g8Xt7oMTCuMV7TUyaHQ_bLNqW6BNJESxib8gWyNXA,12948
+googleapiclient/sample_tools.py,sha256=6JKgn1S4fd77xwzuBaJRsh4_yYtyj2-SDL9WikyJvDk,4341
+googleapiclient/schema.py,sha256=_z8o5HsfDJ4TCkXhiMMZwLKsnGwZ5ZcIzCJhMadaOdA,10026
diff --git a/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/WHEEL b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/top_level.txt b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/top_level.txt
new file mode 100644
index 000000000..f907e7e17
--- /dev/null
+++ b/venv/Lib/site-packages/google_api_python_client-1.12.3.dist-info/top_level.txt
@@ -0,0 +1,3 @@
+apiclient
+googleapiclient
+googleapiclient/discovery_cache
diff --git a/venv/Lib/site-packages/google_auth-1.22.1-py3.8-nspkg.pth b/venv/Lib/site-packages/google_auth-1.22.1-py3.8-nspkg.pth
new file mode 100644
index 000000000..baef7a0f4
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1-py3.8-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/INSTALLER b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/LICENSE b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/METADATA b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/METADATA
new file mode 100644
index 000000000..4de7e6aaa
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/METADATA
@@ -0,0 +1,100 @@
+Metadata-Version: 2.1
+Name: google-auth
+Version: 1.22.1
+Summary: Google Authentication Library
+Home-page: https://github.com/googleapis/google-auth-library-python
+Author: Google Cloud Platform
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Keywords: google auth oauth client
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Requires-Dist: cachetools (<5.0,>=2.0.0)
+Requires-Dist: pyasn1-modules (>=0.2.1)
+Requires-Dist: setuptools (>=40.3.0)
+Requires-Dist: six (>=1.9.0)
+Requires-Dist: rsa (<4.6) ; python_version < "3.5"
+Requires-Dist: rsa (<5,>=3.1.4) ; python_version >= "3.5"
+Provides-Extra: aiohttp
+Requires-Dist: aiohttp (<4.0.0dev,>=3.6.2) ; (python_version >= "3.6") and extra == 'aiohttp'
+
+Google Auth Python Library
+==========================
+
+|pypi|
+
+This library simplifies using Google's various server-to-server authentication
+mechanisms to access Google APIs.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-auth.svg
+ :target: https://pypi.python.org/pypi/google-auth
+
+Installing
+----------
+
+You can install using `pip`_::
+
+ $ pip install google-auth
+
+.. _pip: https://pip.pypa.io/en/stable/
+
+For more information on setting up your Python development environment, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform.
+
+.. _`Python Development Environment Setup Guide`: https://cloud.google.com/python/setup
+
+Supported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Python >= 3.5
+
+Deprecated Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+Documentation
+-------------
+
+Google Auth Python Library has usage and reference documentation at https://googleapis.dev/python/google-auth/latest/index.html.
+
+Current Maintainers
+-------------------
+- `@busunkim96 `_ (Bu Sun Kim)
+
+Authors
+-------
+
+- `@theacodes `_ (Thea Flowers)
+- `@dhermes `_ (Danny Hermes)
+- `@lukesneeringer `_ (Luke Sneeringer)
+
+Contributing
+------------
+
+Contributions to this library are always welcome and highly encouraged.
+
+See `CONTRIBUTING.rst`_ for more information on how to get started.
+
+.. _CONTRIBUTING.rst: https://github.com/googleapis/google-auth-library-python/blob/master/CONTRIBUTING.rst
+
+License
+-------
+
+Apache 2.0 - See `the LICENSE`_ for more information.
+
+.. _the LICENSE: https://github.com/googleapis/google-auth-library-python/blob/master/LICENSE
+
+
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/RECORD b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/RECORD
new file mode 100644
index 000000000..6205b7dac
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/RECORD
@@ -0,0 +1,94 @@
+google/auth/__init__.py,sha256=O7d3aroTbHQEWHVjBRcdcUfWBPcmgOQEEB37Me-2CmU,884
+google/auth/__pycache__/__init__.cpython-36.pyc,,
+google/auth/__pycache__/_cloud_sdk.cpython-36.pyc,,
+google/auth/__pycache__/_credentials_async.cpython-36.pyc,,
+google/auth/__pycache__/_default.cpython-36.pyc,,
+google/auth/__pycache__/_default_async.cpython-36.pyc,,
+google/auth/__pycache__/_helpers.cpython-36.pyc,,
+google/auth/__pycache__/_jwt_async.cpython-36.pyc,,
+google/auth/__pycache__/_oauth2client.cpython-36.pyc,,
+google/auth/__pycache__/_service_account_info.cpython-36.pyc,,
+google/auth/__pycache__/app_engine.cpython-36.pyc,,
+google/auth/__pycache__/credentials.cpython-36.pyc,,
+google/auth/__pycache__/environment_vars.cpython-36.pyc,,
+google/auth/__pycache__/exceptions.cpython-36.pyc,,
+google/auth/__pycache__/iam.cpython-36.pyc,,
+google/auth/__pycache__/impersonated_credentials.cpython-36.pyc,,
+google/auth/__pycache__/jwt.cpython-36.pyc,,
+google/auth/_cloud_sdk.py,sha256=iAkj8tteiEs-fTH21RKNWw5H2cnfls_6OJK92_o2_DQ,4892
+google/auth/_credentials_async.py,sha256=YfF4VPD7zMCciRxHIT91VofYH-A6whU2d4ih8LdEeuY,6841
+google/auth/_default.py,sha256=WnkmlCylo1wYlWVT6P4h7iaaQSI_-zWxoUXkQZqTxxs,13720
+google/auth/_default_async.py,sha256=ZCtkHLCINwABke-uqojKkSt7PKRF-bTFVx0FZPnJnEg,10461
+google/auth/_helpers.py,sha256=5uJpJiw8zbyvXRf_LTrZ2Ruxx1THRBNTaotgEMp8gIk,6643
+google/auth/_jwt_async.py,sha256=Rf33IpeatY-kdhBAW5QQZiz8FRte2doZ8nsMgGoObHM,6014
+google/auth/_oauth2client.py,sha256=NuTWfwMelOp3EKzXPu79P1qkVVb31eeASNZvBFIhvq4,5847
+google/auth/_service_account_info.py,sha256=YAYIQUy8YfPN3_zd0lJ_ak3RSxnoFCRgkqnoRTCIs1U,2358
+google/auth/app_engine.py,sha256=rbNpkVQqxxCU5tLJX1gTChRg_iwLb4iKWMgSOotebnw,5551
+google/auth/compute_engine/__init__.py,sha256=bv5BKOb55ai4fGii1AGJI_SHrFQ2y4mdc06-iS1IBV8,804
+google/auth/compute_engine/__pycache__/__init__.cpython-36.pyc,,
+google/auth/compute_engine/__pycache__/_metadata.cpython-36.pyc,,
+google/auth/compute_engine/__pycache__/credentials.cpython-36.pyc,,
+google/auth/compute_engine/_metadata.py,sha256=R2LS5Z4Eplx54y63B-aLqAIMA3lLZE0tDlEsocU4uJY,8840
+google/auth/compute_engine/credentials.py,sha256=FfvhWdOxcMlrnb_NmCRvpRm5VaWV_RNals26qhLdYsk,14914
+google/auth/credentials.py,sha256=0D37t8SIW1JaxpcbqZmVEWHcIUO8ltlUf_w-aPFtRwo,12539
+google/auth/crypt/__init__.py,sha256=w5UwuYwD2OtPwzo9z_9pl1q_ePyuqI1DKarClqVqi7M,3340
+google/auth/crypt/__pycache__/__init__.cpython-36.pyc,,
+google/auth/crypt/__pycache__/_cryptography_rsa.cpython-36.pyc,,
+google/auth/crypt/__pycache__/_helpers.cpython-36.pyc,,
+google/auth/crypt/__pycache__/_python_rsa.cpython-36.pyc,,
+google/auth/crypt/__pycache__/base.cpython-36.pyc,,
+google/auth/crypt/__pycache__/es256.cpython-36.pyc,,
+google/auth/crypt/__pycache__/rsa.cpython-36.pyc,,
+google/auth/crypt/_cryptography_rsa.py,sha256=TxekhUL5CELSxLkYa6ububkQq1d_1HRpE6gGUalW2zs,4988
+google/auth/crypt/_helpers.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/auth/crypt/_python_rsa.py,sha256=8ZCFK3-sI3PDJ97uSUZvLc823FchLVcp7fcAbAbTSgc,5940
+google/auth/crypt/base.py,sha256=I7h2p_DY-dhiVZsv81nvBB6k6ElBco4Xxtfw5kTtra8,4203
+google/auth/crypt/es256.py,sha256=hDswFBXNb2Zp1p1t299VFz2prLBQ2iBbG21lTZ7G0Io,5658
+google/auth/crypt/rsa.py,sha256=gCFMLy7amlfVaJVlJSEwAJGEdJ8C9yJp1xW9pPs0mmo,1077
+google/auth/environment_vars.py,sha256=RHQZF9CL6jmRj4DDaRUFSYbFk-ei0f3EBs1qDWjszkU,2354
+google/auth/exceptions.py,sha256=SmEsVwaE3fNj2AY0MwjDcnlqAWcS4JbMNtoc-ePsN4w,1489
+google/auth/iam.py,sha256=P1FinUEw5VeqnuQbhdpZJwTL3NoWJ_cF6Z_EjBfG7z4,3653
+google/auth/impersonated_credentials.py,sha256=znhx36Zc2q3FYob_ozquW5XMP_b0M2Wf0lAfy5rL9-M,14121
+google/auth/jwt.py,sha256=oJYlFeLqwLAt2L8ohcRQvZ58L6ML22PTw4-76e1hd_M,29339
+google/auth/transport/__init__.py,sha256=eD8Iua8Sjc5TBFzBqrCP4s_9PC9IouWiIRdqDlz2AXM,3438
+google/auth/transport/__pycache__/__init__.cpython-36.pyc,,
+google/auth/transport/__pycache__/_aiohttp_requests.cpython-36.pyc,,
+google/auth/transport/__pycache__/_http_client.cpython-36.pyc,,
+google/auth/transport/__pycache__/_mtls_helper.cpython-36.pyc,,
+google/auth/transport/__pycache__/grpc.cpython-36.pyc,,
+google/auth/transport/__pycache__/mtls.cpython-36.pyc,,
+google/auth/transport/__pycache__/requests.cpython-36.pyc,,
+google/auth/transport/__pycache__/urllib3.cpython-36.pyc,,
+google/auth/transport/_aiohttp_requests.py,sha256=HEvYDCFvURJ1DXpE-oAokXfLbZgFUM3Z49z1IfiSpwg,14061
+google/auth/transport/_http_client.py,sha256=yy-cYYr87NWbMnRkIrS0wXvtyVkCa8G1zz23utqubPQ,3739
+google/auth/transport/_mtls_helper.py,sha256=w5SuZvU1jy4M7UEoW72nXx5HZGluiJ3Fye2p0KWH92g,8932
+google/auth/transport/grpc.py,sha256=DyFEheOE63rB__-J5MBg7jrBIPkgQwkaVijqkI2t2vA,13218
+google/auth/transport/mtls.py,sha256=c6-0hJZ5xZA9lm_hy921UH30q07etKSh2hMan9YO5oI,3817
+google/auth/transport/requests.py,sha256=guL4UpxH5MeJeSBcQcF_Xr4CAzAZ4pYWKxr3N4-R2u0,19227
+google/auth/transport/urllib3.py,sha256=mFqcT8IynvLMXFF3cN_HAzbMIkegEpiVfWr6wNcOhn0,15046
+google/oauth2/__init__.py,sha256=iDyTpxuh864rLf4YyINgeO4wMJLQ53EUMPJpiw1GA3U,619
+google/oauth2/__pycache__/__init__.cpython-36.pyc,,
+google/oauth2/__pycache__/_client.cpython-36.pyc,,
+google/oauth2/__pycache__/_client_async.cpython-36.pyc,,
+google/oauth2/__pycache__/_credentials_async.cpython-36.pyc,,
+google/oauth2/__pycache__/_id_token_async.cpython-36.pyc,,
+google/oauth2/__pycache__/_service_account_async.cpython-36.pyc,,
+google/oauth2/__pycache__/credentials.cpython-36.pyc,,
+google/oauth2/__pycache__/id_token.cpython-36.pyc,,
+google/oauth2/__pycache__/service_account.cpython-36.pyc,,
+google/oauth2/_client.py,sha256=BYAC95jAoFH8dVOZq-ykBn-DovV68ITs0wgGsQmWrrw,8774
+google/oauth2/_client_async.py,sha256=MR8McmVSretAL9TXTDYsT_lleXw9RJjQMK7vKgitjB8,8919
+google/oauth2/_credentials_async.py,sha256=ia5PQNk1pr_oHHIxh6KvzBpGRJ5Q94m8eZwMEQeY8Sg,4088
+google/oauth2/_id_token_async.py,sha256=pG8iZgnF6CX0opISiFgAG6yAIHOhQELcRPyIyPfGBn8,9319
+google/oauth2/_service_account_async.py,sha256=iua-EwEywwWfagYaRrSZ7rFccHt8IL_ICSWG3kGny3E,5132
+google/oauth2/credentials.py,sha256=QvL3SvewjaNtY5OmR0PaJqOU9x1DVS8kGLozx2fzi0c,14396
+google/oauth2/id_token.py,sha256=ajEka52lOVBVcR1noUtJHHogFG_zMe__JgAc_a2wzCU,9237
+google/oauth2/service_account.py,sha256=1KBlujfI47GiSBkjxcGyUO96vlQnH1pgStsfH9qtQnk,22368
+google_auth-1.22.1-py3.8-nspkg.pth,sha256=xH5gTxc4UipYP3qrbP-4CCHNGBV97eBR4QqhheCvBl4,539
+google_auth-1.22.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_auth-1.22.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+google_auth-1.22.1.dist-info/METADATA,sha256=WJ2uI4fO3_1cI8IdS6wYc23nv76Xi9puN2A5Gzf0-WY,3265
+google_auth-1.22.1.dist-info/RECORD,,
+google_auth-1.22.1.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_auth-1.22.1.dist-info/namespace_packages.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
+google_auth-1.22.1.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/WHEEL b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/namespace_packages.txt b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/namespace_packages.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/namespace_packages.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_auth-1.22.1.dist-info/top_level.txt b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/top_level.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth-1.22.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/INSTALLER b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/LICENSE b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/METADATA b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/METADATA
new file mode 100644
index 000000000..058d0c3ca
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/METADATA
@@ -0,0 +1,63 @@
+Metadata-Version: 2.1
+Name: google-auth-httplib2
+Version: 0.0.4
+Summary: Google Authentication Library: httplib2 transport
+Home-page: https://github.com/GoogleCloudPlatform/google-auth-library-python-httplib2
+Author: Google Cloud Platform
+Author-email: jonwayne+google-auth@google.com
+License: Apache 2.0
+Keywords: google auth oauth client
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Dist: google-auth
+Requires-Dist: httplib2 (>=0.9.1)
+Requires-Dist: six
+
+``httplib2`` Transport for Google Auth
+======================================
+
+|pypi|
+
+This library provides an `httplib2`_ transport for `google-auth`_.
+
+.. note:: ``httplib`` has lots of problems such as lack of threadsafety
+ and insecure usage of TLS. Using it is highly discouraged. This
+ library is intended to help existing users of ``oauth2client`` migrate to
+ ``google-auth``.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-auth-httplib2.svg
+ :target: https://pypi.python.org/pypi/google-auth-httplib2
+
+.. _httplib2: https://github.com/httplib2/httplib2
+.. _google-auth: https://github.com/GoogleCloudPlatform/google-auth-library-python/
+
+Installing
+----------
+
+You can install using `pip`_::
+
+ $ pip install google-auth-httplib2
+
+.. _pip: https://pip.pypa.io/en/stable/
+
+License
+-------
+
+Apache 2.0 - See `the LICENSE`_ for more information.
+
+.. _the LICENSE: https://github.com/GoogleCloudPlatform/google-auth-library-python/blob/master/LICENSE
+
+
diff --git a/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/RECORD b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/RECORD
new file mode 100644
index 000000000..f7a410327
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/google_auth_httplib2.cpython-36.pyc,,
+google_auth_httplib2-0.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_auth_httplib2-0.0.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+google_auth_httplib2-0.0.4.dist-info/METADATA,sha256=O9R15inoHDTAoFR-4cd9x4kV_4V-mDcNaabzT7KeII4,2095
+google_auth_httplib2-0.0.4.dist-info/RECORD,,
+google_auth_httplib2-0.0.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+google_auth_httplib2-0.0.4.dist-info/top_level.txt,sha256=xQr4X91CsNWr1mw3rrOH8mKnYLOW_Uhr5U7moYxkq4E,21
+google_auth_httplib2.py,sha256=KJwYQ7q0lN2qwlMhopZnYO5gA3H6LKAwSPm7nhD5Itg,9391
diff --git a/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/WHEEL b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/WHEEL
new file mode 100644
index 000000000..ef99c6cf3
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/top_level.txt b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/top_level.txt
new file mode 100644
index 000000000..f8b63c266
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2-0.0.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+google_auth_httplib2
diff --git a/venv/Lib/site-packages/google_auth_httplib2.py b/venv/Lib/site-packages/google_auth_httplib2.py
new file mode 100644
index 000000000..d481c1b89
--- /dev/null
+++ b/venv/Lib/site-packages/google_auth_httplib2.py
@@ -0,0 +1,262 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for httplib2."""
+
+from __future__ import absolute_import
+
+import logging
+
+from google.auth import exceptions
+from google.auth import transport
+import httplib2
+from six.moves import http_client
+
+
+_LOGGER = logging.getLogger(__name__)
+# Properties present in file-like streams / buffers.
+_STREAM_PROPERTIES = ('read', 'seek', 'tell')
+
+
+class _Response(transport.Response):
+ """httplib2 transport response adapter.
+
+ Args:
+ response (httplib2.Response): The raw httplib2 response.
+ data (bytes): The response body.
+ """
+ def __init__(self, response, data):
+ self._response = response
+ self._data = data
+
+ @property
+ def status(self):
+ """int: The HTTP status code."""
+ return self._response.status
+
+ @property
+ def headers(self):
+ """Mapping[str, str]: The HTTP response headers."""
+ return dict(self._response)
+
+ @property
+ def data(self):
+ """bytes: The response body."""
+ return self._data
+
+
+class Request(transport.Request):
+ """httplib2 request adapter.
+
+ This class is used internally for making requests using various transports
+ in a consistent way. If you use :class:`AuthorizedHttp` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google_auth_httplib2
+ import httplib2
+
+ http = httplib2.Http()
+ request = google_auth_httplib2.Request(http)
+
+ credentials.refresh(request)
+
+ Args:
+ http (httplib2.Http): The underlying http object to use to make
+ requests.
+
+ .. automethod:: __call__
+ """
+ def __init__(self, http):
+ self.http = http
+
+ def __call__(self, url, method='GET', body=None, headers=None,
+ timeout=None, **kwargs):
+ """Make an HTTP request using httplib2.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. This is ignored by httplib2 and will
+ issue a warning.
+ kwargs: Additional arguments passed throught to the underlying
+ :meth:`httplib2.Http.request` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ if timeout is not None:
+ _LOGGER.warning(
+ 'httplib2 transport does not support per-request timeout. '
+ 'Set the timeout when constructing the httplib2.Http instance.'
+ )
+
+ try:
+ _LOGGER.debug('Making request: %s %s', method, url)
+ response, data = self.http.request(
+ url, method=method, body=body, headers=headers, **kwargs)
+ return _Response(response, data)
+ # httplib2 should catch the lower http error, this is a bug and
+ # needs to be fixed there. Catch the error for the meanwhile.
+ except (httplib2.HttpLib2Error, http_client.HTTPException) as exc:
+ raise exceptions.TransportError(exc)
+
+
+def _make_default_http():
+ """Returns a default httplib2.Http instance."""
+ return httplib2.Http()
+
+
+class AuthorizedHttp(object):
+ """A httplib2 HTTP class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport._httplib2 import AuthorizedHttp
+
+ authed_http = AuthorizedHttp(credentials)
+
+ response = authed_http.request(
+ 'https://www.googleapis.com/storage/v1/b')
+
+ This class implements :meth:`request` in the same way as
+ :class:`httplib2.Http` and can usually be used just like any other
+ instance of :class:``httplib2.Http`.
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+ """
+ def __init__(self, credentials, http=None,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS):
+ """
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials
+ to add to the request.
+ http (httplib2.Http): The underlying HTTP object to
+ use to make requests. If not specified, a
+ :class:`httplib2.Http` instance will be constructed.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes
+ indicate that credentials should be refreshed and the request
+ should be retried.
+ max_refresh_attempts (int): The maximum number of times to attempt
+ to refresh the credentials and retry the request.
+ """
+
+ if http is None:
+ http = _make_default_http()
+
+ self.http = http
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ # Request instance used by internal methods (for example,
+ # credentials.refresh).
+ self._request = Request(self.http)
+
+ def request(self, uri, method='GET', body=None, headers=None,
+ **kwargs):
+ """Implementation of httplib2's Http.request."""
+
+ _credential_refresh_attempt = kwargs.pop(
+ '_credential_refresh_attempt', 0)
+
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy() if headers is not None else {}
+
+ self.credentials.before_request(
+ self._request, method, uri, request_headers)
+
+ # Check if the body is a file-like stream, and if so, save the body
+ # stream position so that it can be restored in case of refresh.
+ body_stream_position = None
+ if all(getattr(body, stream_prop, None) for stream_prop in
+ _STREAM_PROPERTIES):
+ body_stream_position = body.tell()
+
+ # Make the request.
+ response, content = self.http.request(
+ uri, method, body=body, headers=request_headers, **kwargs)
+
+ # If the response indicated that the credentials needed to be
+ # refreshed, then refresh the credentials and re-attempt the
+ # request.
+ # A stored token may expire between the time it is retrieved and
+ # the time the request is made, so we may need to try twice.
+ if (response.status in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts):
+
+ _LOGGER.info(
+ 'Refreshing credentials due to a %s response. Attempt %s/%s.',
+ response.status, _credential_refresh_attempt + 1,
+ self._max_refresh_attempts)
+
+ self.credentials.refresh(self._request)
+
+ # Restore the body's stream position if needed.
+ if body_stream_position is not None:
+ body.seek(body_stream_position)
+
+ # Recurse. Pass in the original headers, not our modified set.
+ return self.request(
+ uri, method, body=body, headers=headers,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs)
+
+ return response, content
+
+ def add_certificate(self, key, cert, domain, password=None):
+ """Proxy to httplib2.Http.add_certificate."""
+ self.http.add_certificate(key, cert, domain, password=password)
+
+ @property
+ def connections(self):
+ """Proxy to httplib2.Http.connections."""
+ return self.http.connections
+
+ @connections.setter
+ def connections(self, value):
+ """Proxy to httplib2.Http.connections."""
+ self.http.connections = value
+
+ @property
+ def follow_redirects(self):
+ """Proxy to httplib2.Http.follow_redirects."""
+ return self.http.follow_redirects
+
+ @follow_redirects.setter
+ def follow_redirects(self, value):
+ """Proxy to httplib2.Http.follow_redirects."""
+ self.http.follow_redirects = value
+
+ @property
+ def timeout(self):
+ """Proxy to httplib2.Http.timeout."""
+ return self.http.timeout
+
+ @timeout.setter
+ def timeout(self, value):
+ """Proxy to httplib2.Http.timeout."""
+ self.http.timeout = value
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3-py3.8-nspkg.pth b/venv/Lib/site-packages/google_cloud_core-1.4.3-py3.8-nspkg.pth
new file mode 100644
index 000000000..2bf911880
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3-py3.8-nspkg.pth
@@ -0,0 +1,3 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google', 'cloud'));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google.cloud', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google.cloud', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google.cloud', types.ModuleType('google.cloud'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p);m and setattr(sys.modules['google'], 'cloud', m)
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/INSTALLER b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/LICENSE b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/LICENSE
new file mode 100644
index 000000000..a8ee855de
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/METADATA b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/METADATA
new file mode 100644
index 000000000..d20516925
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/METADATA
@@ -0,0 +1,68 @@
+Metadata-Version: 2.1
+Name: google-cloud-core
+Version: 1.4.3
+Summary: Google Cloud API client core library
+Home-page: https://github.com/googleapis/python-cloud-core
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Platform: Posix; MacOS X; Windows
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Requires-Dist: google-api-core (<2.0.0dev,>=1.19.0)
+Provides-Extra: grpc
+Requires-Dist: grpcio (<2.0dev,>=1.8.2) ; extra == 'grpc'
+
+Core Helpers for Google Cloud Python Client Library
+===================================================
+
+|pypi| |versions|
+
+This library is not meant to stand-alone. Instead it defines
+common helpers (e.g. base ``Client`` classes) used by all of the
+``google-cloud-*`` packages.
+
+
+- `Documentation`_
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-core.svg
+ :target: https://pypi.org/project/google-cloud-core/
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-core.svg
+ :target: https://pypi.org/project/google-cloud-core/
+.. _Documentation: https://googleapis.dev/python/google-cloud-core/latest
+
+Quick Start
+-----------
+
+.. code-block:: console
+
+ $ pip install --upgrade google-cloud-core
+
+For more information on setting up your Python development environment,
+such as installing ``pip`` and ``virtualenv`` on your system, please refer
+to `Python Development Environment Setup Guide`_ for Google Cloud Platform.
+
+.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup
+
+
+Supported Python Versions
+-------------------------
+Python >= 3.5
+
+Deprecated Python Versions
+--------------------------
+Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/RECORD b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/RECORD
new file mode 100644
index 000000000..45e7ae5b6
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/RECORD
@@ -0,0 +1,26 @@
+google/cloud/__pycache__/_helpers.cpython-36.pyc,,
+google/cloud/__pycache__/_http.cpython-36.pyc,,
+google/cloud/__pycache__/_testing.cpython-36.pyc,,
+google/cloud/__pycache__/client.cpython-36.pyc,,
+google/cloud/__pycache__/environment_vars.cpython-36.pyc,,
+google/cloud/__pycache__/exceptions.cpython-36.pyc,,
+google/cloud/__pycache__/obsolete.cpython-36.pyc,,
+google/cloud/__pycache__/operation.cpython-36.pyc,,
+google/cloud/__pycache__/version.cpython-36.pyc,,
+google/cloud/_helpers.py,sha256=VbSTEB9ulWK68UxzSz5VKrnc2LHyDjjDVCPnaRfEBkk,19919
+google/cloud/_http.py,sha256=DES_MDhQYsvKphUVJFztPbgbARfKiTQe3lSYKfYp1_c,14322
+google/cloud/_testing.py,sha256=eyi3qivuEykA2AUw9SnWfqe5b2oAwu9B1VvTu2Xbdo4,3439
+google/cloud/client.py,sha256=H9HXV7yhX4gqyh4Djlli1iIDD2bUxuDdmAqLS70htYI,9815
+google/cloud/environment_vars.py,sha256=lCe2EyQjeJO2ImbrSR23Cj2XpwGKFFy1xCZuQo2DHEo,1318
+google/cloud/exceptions.py,sha256=pyCO5lyFO_99DTwUsJqolO0iPOoFDhKG_ebTIfsoI6M,2199
+google/cloud/obsolete.py,sha256=zF9bBwD1Y2X1yvl9TlalLoqM6C78rvCxv2i2igoA-Jg,1382
+google/cloud/operation.py,sha256=I_uwALTnNzF4FDzXX8z4uakS71dsZG5jWHGAHQgmTec,9041
+google/cloud/version.py,sha256=4uDCZTBI3XKst32E4deTbjYzFM3E-4PuI1G9CrsDyFg,597
+google_cloud_core-1.4.3-py3.8-nspkg.pth,sha256=b0D5dZk3RUzK54tZ9iZDvLm7u8ltc5EzYrGCmhsuoNw,1698
+google_cloud_core-1.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_cloud_core-1.4.3.dist-info/LICENSE,sha256=wj8Xz9NbPUIIxjKTeDELp0xe1uoa3ruCdyltMOOzSyk,11354
+google_cloud_core-1.4.3.dist-info/METADATA,sha256=44tWek2Rf3RUvSOE2ZoaK0DjnKNd5udqZwAh1OayJYw,2345
+google_cloud_core-1.4.3.dist-info/RECORD,,
+google_cloud_core-1.4.3.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_cloud_core-1.4.3.dist-info/namespace_packages.txt,sha256=v8IaYqRE2a0onAGJIpZeFkkH83wXSWZRR9eOyfMwoTc,20
+google_cloud_core-1.4.3.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/WHEEL b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/namespace_packages.txt b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/namespace_packages.txt
new file mode 100644
index 000000000..5a9c12bb2
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/namespace_packages.txt
@@ -0,0 +1,2 @@
+google
+google.cloud
diff --git a/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/top_level.txt b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/top_level.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_core-1.4.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0-py3.8-nspkg.pth b/venv/Lib/site-packages/google_cloud_firestore-1.9.0-py3.8-nspkg.pth
new file mode 100644
index 000000000..2bf911880
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0-py3.8-nspkg.pth
@@ -0,0 +1,3 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google', 'cloud'));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google.cloud', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google.cloud', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google.cloud', types.ModuleType('google.cloud'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p);m and setattr(sys.modules['google'], 'cloud', m)
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/INSTALLER b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/LICENSE b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/LICENSE
new file mode 100644
index 000000000..a8ee855de
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/METADATA b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/METADATA
new file mode 100644
index 000000000..a20d1bf12
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/METADATA
@@ -0,0 +1,143 @@
+Metadata-Version: 2.1
+Name: google-cloud-firestore
+Version: 1.9.0
+Summary: Google Cloud Firestore API client library
+Home-page: https://github.com/googleapis/python-firestore
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Platform: Posix; MacOS X; Windows
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Requires-Dist: google-api-core[grpc] (<2.0.0dev,>=1.14.0)
+Requires-Dist: google-cloud-core (<2.0dev,>=1.4.1)
+Requires-Dist: pytz
+
+Python Client for Google Cloud Firestore
+========================================
+
+|GA| |pypi| |versions|
+
+The `Google Cloud Firestore`_ API is a flexible, scalable
+database for mobile, web, and server development from Firebase and Google
+Cloud Platform. Like Firebase Realtime Database, it keeps your data in
+sync across client apps through realtime listeners and offers offline support
+for mobile and web so you can build responsive apps that work regardless of
+network latency or Internet connectivity. Cloud Firestore also offers seamless
+integration with other Firebase and Google Cloud Platform products,
+including Cloud Functions.
+
+- `Product Documentation`_
+- `Client Library Documentation`_
+
+.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg
+ :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability
+.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-firestore.svg
+ :target: https://pypi.org/project/google-cloud-firestore/
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-firestore.svg
+.. _Google Cloud Firestore: https://cloud.google.com/firestore/
+.. _Product Documentation: https://cloud.google.com/firestore/docs/
+.. _Client Library Documentation: https://googleapis.dev/python/firestore/latest
+
+Quick Start
+-----------
+
+In order to use this library, you first need to go through the following steps:
+
+1. `Select or create a Cloud Platform project.`_
+2. `Enable billing for your project.`_
+3. `Enable the Google Cloud Firestore API.`_
+4. `Setup Authentication.`_
+
+.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
+.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
+.. _Enable the Google Cloud Firestore API.: https://cloud.google.com/firestore
+.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+
+Installation
+~~~~~~~~~~~~
+
+Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With `virtualenv`_, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
+
+
+Supported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Python >= 3.5
+
+Deprecated Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+
+Mac/Linux
+^^^^^^^^^
+
+.. code-block:: console
+
+ pip install virtualenv
+ virtualenv
+ source /bin/activate
+ /bin/pip install google-cloud-firestore
+
+
+Windows
+^^^^^^^
+
+.. code-block:: console
+
+ pip install virtualenv
+ virtualenv
+ \Scripts\activate
+ \Scripts\pip.exe install google-cloud-firestore
+
+
+Example Usage
+~~~~~~~~~~~~~
+
+.. code:: python
+
+ from google.cloud import firestore
+
+ # Add a new document
+ db = firestore.Client()
+ doc_ref = db.collection(u'users').document(u'alovelace')
+ doc_ref.set({
+ u'first': u'Ada',
+ u'last': u'Lovelace',
+ u'born': 1815
+ })
+
+ # Then query for documents
+ users_ref = db.collection(u'users')
+
+ for doc in users_ref.stream():
+ print(u'{} => {}'.format(doc.id, doc.to_dict()))
+
+Next Steps
+~~~~~~~~~~
+
+- Read the `Client Library Documentation`_ for Google Cloud Firestore API
+ to see other available methods on the client.
+- Read the `Product Documentation`_ to learn
+ more about the product and see How-to Guides.
+
+
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/RECORD b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/RECORD
new file mode 100644
index 000000000..683741703
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/RECORD
@@ -0,0 +1,208 @@
+google/cloud/__pycache__/firestore.cpython-36.pyc,,
+google/cloud/firestore.py,sha256=Z3GkxCM_A3n1HZlR99QYgEyNh7CSORIXsZfGcbXuuNQ,2322
+google/cloud/firestore_admin_v1/__init__.py,sha256=ef5ahHfzuIMBSxwsEnz2JPgffBMqc1BI30LD4CLuYAg,1422
+google/cloud/firestore_admin_v1/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/__pycache__/types.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_admin_v1/gapic/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/__pycache__/enums.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/__pycache__/firestore_admin_client_config.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/enums.py,sha256=1Re5fScM8TXcbiT7HEhguIfdH65H8dMHXVLzWqgBzxg,5365
+google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py,sha256=6JtNcbiTs60mQ-BPfpWQ8-X7JbpBMrlb4pEVsqozSlw,46167
+google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py,sha256=QlXa7vgxFhrqGHMf2eRHYUP5iBmxIH0873hZymX4TkI,2686
+google/cloud/firestore_admin_v1/gapic/transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_admin_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/transports/__pycache__/firestore_admin_grpc_transport.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py,sha256=QTpc4Nc0jPBocxyzBKWMbOYxcXhT0euQIJUjr1HtCxE,10411
+google/cloud/firestore_admin_v1/proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_admin_v1/proto/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/field_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/index_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/location_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/__pycache__/operation_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_admin_v1/proto/field.proto,sha256=wrnMkIyX8Dy8aHpOfFmBx8FYvrbhO1L-3pYySLsOi6A,4345
+google/cloud/firestore_admin_v1/proto/field_pb2.py,sha256=98mAUdKlG_FUOR7A9mjsfjXd0yM8lWZDL1p1kCjtr8I,11869
+google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_admin_v1/proto/firestore_admin.proto,sha256=Mwxfc36K2m-QyZBbh0zJtRL9RsMHwE8V491KVMgK5_k,14490
+google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py,sha256=gq7p_Htbx49Xzp8ASRuIUxisQmBxifj9d4fox1qwr9s,47890
+google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py,sha256=hqBzlxwxsorj0nMyLxk6KhJrULyRWG17lmmmUZiRFa4,20842
+google/cloud/firestore_admin_v1/proto/index.proto,sha256=riDJJ3GVuxq4zV0j9JOfeMJ1rrh-Gctj6em2as1qjDc,6063
+google/cloud/firestore_admin_v1/proto/index_pb2.py,sha256=JNizkuBWVdNZAIzOs1ftoi3tMsFRJ42Wx9VnOnEfsqk,17689
+google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_admin_v1/proto/location.proto,sha256=N-7nWVWEWfYR2d-jFxzTjhlhkyx-hfZd8XUpJyXW2ls,1308
+google/cloud/firestore_admin_v1/proto/location_pb2.py,sha256=AjxOkORTjeEGTSxFzGnii-w4vH-YtTQkZdA-mJuUqs4,2973
+google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_admin_v1/proto/operation.proto,sha256=wAB09rBuy3qtiIVTdihRF4mm1qcUM2KPOJ9TN7Po-ig,6783
+google/cloud/firestore_admin_v1/proto/operation_pb2.py,sha256=5aqDeQrf-IWbmZGMP23N4ZdCtuAoU1oi8blKiSybDh8,43346
+google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_admin_v1/types.py,sha256=C84M1COr_X_4CejED2K5Iq4EZWPiqyC-PMxo-3h6JD8,1978
+google/cloud/firestore_v1/__init__.py,sha256=BtCkBwCqsUHlIEX6qENhJth4BoWyK_lF06LZljZL_H0,2587
+google/cloud/firestore_v1/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/_helpers.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/batch.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/client.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/collection.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/document.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/field_path.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/order.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/query.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/transaction.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/transforms.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/types.cpython-36.pyc,,
+google/cloud/firestore_v1/__pycache__/watch.cpython-36.pyc,,
+google/cloud/firestore_v1/_helpers.py,sha256=ijriPIAT2behHcvGkUwyqID2RkSGrVN-NOOSSB2x62A,36613
+google/cloud/firestore_v1/batch.py,sha256=RUBzCAvo2vQOAIXhePclVxrrm8Wzf8Tj_tf2zRu7AL8,6296
+google/cloud/firestore_v1/client.py,sha256=3mvs2G-8zra7HDjpqc2uLpAAOCCVSGr5D2AddJki3Ks,22934
+google/cloud/firestore_v1/collection.py,sha256=4TROoVhBi969EPiXrtdkyDhCIJUIIHSRH070jyPPrQI,18620
+google/cloud/firestore_v1/document.py,sha256=SYRmdz9XFQrCe1250ggH8d5DZL8lsGwhdwo9FQP41zQ,26549
+google/cloud/firestore_v1/field_path.py,sha256=U-JngESnZPrr8gwvJVTk1bzbjQeIKd7Q_eodimWWYi0,12444
+google/cloud/firestore_v1/gapic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1/gapic/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1/gapic/__pycache__/enums.cpython-36.pyc,,
+google/cloud/firestore_v1/gapic/__pycache__/firestore_client.cpython-36.pyc,,
+google/cloud/firestore_v1/gapic/__pycache__/firestore_client_config.cpython-36.pyc,,
+google/cloud/firestore_v1/gapic/enums.py,sha256=nkP1CRF4tOw1eZgqNnEeoMF9LUqobQZe0tE-c56kvEM,5854
+google/cloud/firestore_v1/gapic/firestore_client.py,sha256=SBQQK6AHvQusWKSQBt--y8deJ9ox0AFJ-LX7DJeyIF0,71854
+google/cloud/firestore_v1/gapic/firestore_client_config.py,sha256=CkqztD34U22DRZgnmMPJCemYqXsg5bYI4oEzrZJ40jQ,4409
+google/cloud/firestore_v1/gapic/transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1/gapic/transports/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc,,
+google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py,sha256=WsKDIwlzDsMI-EuOtgNPKI1rUOce7Ml9gk_vHCf1cSE,10988
+google/cloud/firestore_v1/order.py,sha256=WnsYhFzUs2fXJ8ysIb_tJxm81luD5-NrqImLWnLP1_I,6913
+google/cloud/firestore_v1/proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1/proto/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/common_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/document_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/firestore_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/query_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/test_v1_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/tests_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/write_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1/proto/common.proto,sha256=zuQkGVnH48KOs2Sk2nd8BseX7ipiLPIVulb5CcfDJu0,2979
+google/cloud/firestore_v1/proto/common_pb2.py,sha256=DhcCpVwaazuAfAFOorJruG0boGSh6CXTvIZRgmV3C24,16078
+google/cloud/firestore_v1/proto/common_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1/proto/document.proto,sha256=5GjXk4u_KEHDEfP2cXDnS7oZ8bcmw3KZnF1F3YIXktY,5197
+google/cloud/firestore_v1/proto/document_pb2.py,sha256=Bgle8i3yAzm1tCw_RJEbqEYr7j3QCm3nw4lElZ0gT50,29293
+google/cloud/firestore_v1/proto/document_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1/proto/firestore.proto,sha256=BRf93NaI0X4WnK6ShqWNTslh2NNfmULQWfznfIqvceU,33444
+google/cloud/firestore_v1/proto/firestore_pb2.py,sha256=DYF2Gb9xiETmOFSx9OwlPkWQDyjLOCEiU-1o6UZJiUo,167173
+google/cloud/firestore_v1/proto/firestore_pb2_grpc.py,sha256=Xzcg4pSYwhculjSDXZnSgmG93Mb0VHuJ1eD3uph-wmg,30778
+google/cloud/firestore_v1/proto/query.proto,sha256=QBft1gmdkTWu_GiG8w07sg5WLxjPQJrkqxBoCYWRggY,7601
+google/cloud/firestore_v1/proto/query_pb2.py,sha256=OMlGM6JTQVheAJyOp9_w2msLXmcw36w6QQtBbwvj88Q,45843
+google/cloud/firestore_v1/proto/query_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1/proto/test_v1_pb2.py,sha256=LOt2-dQEYfAfinzTfssGAj7Peu-FBXhjdRJCaRZ3uuU,67229
+google/cloud/firestore_v1/proto/tests_pb2.py,sha256=qdzV_WP_pL_ilU_8MOwQn-fcW3yGODJXUWCHLRCFViQ,71826
+google/cloud/firestore_v1/proto/write.proto,sha256=NuVXA1s3WlOn37W_kGsk21LHpPt9qCeuIG1Pn9MpccE,10709
+google/cloud/firestore_v1/proto/write_pb2.py,sha256=XAeB2h1sewwSmearYPJaUZQaLKTn3PYDB4UQ50SAYB0,45035
+google/cloud/firestore_v1/proto/write_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1/query.py,sha256=IPpEsZ8k3IcfoZArERhrM2XFpTFzLVhK_6zZGNBS1_M,44442
+google/cloud/firestore_v1/transaction.py,sha256=syT8CDQzBLs6YylLBMWFqVKuk_466NPmTnW4wevwYb0,15984
+google/cloud/firestore_v1/transforms.py,sha256=nkr4fly6oC0xpQnO5BjGkHPdJQiZ_YhIF7fLfUsO6nY,4661
+google/cloud/firestore_v1/types.py,sha256=BvvGHZyK9RHVGeJ1D3sD6zka1u8W-cZvtdHvf_cHGUo,1999
+google/cloud/firestore_v1/watch.py,sha256=NgAG0LWoTVEZS1adqPYfrlXV-56zthb8K8NnOZSkAFI,26179
+google/cloud/firestore_v1beta1/__init__.py,sha256=WsdTGlJMRn-hKfzM9qYJ_sL3Z1jF4wvVTglHeFoHXXs,2711
+google/cloud/firestore_v1beta1/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/_helpers.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/batch.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/client.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/collection.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/document.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/field_path.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/order.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/query.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/transaction.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/transforms.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/types.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/__pycache__/watch.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/_helpers.py,sha256=lmX1I2Ng-n2kiWYRF3x-gs8vGduZdw0EcbC3LMPLZXs,34923
+google/cloud/firestore_v1beta1/batch.py,sha256=NsrqniJBCNEDiRISCYVzkUeNGEPTDp-9TbT80WCpBSU,6234
+google/cloud/firestore_v1beta1/client.py,sha256=vgj0Qw2UyRZNnIMZ6Ipm_mZf9yY5rvOrI9IUqzn4AWA,19504
+google/cloud/firestore_v1beta1/collection.py,sha256=Glwllhyh8MT4z6zY369pyvTVSNiTCCLvNkOrBrRtMgc,17595
+google/cloud/firestore_v1beta1/document.py,sha256=ektRIpBtWEE4sGmt-64-p3_tntYYY2LgMNIiAz0_VTk,26290
+google/cloud/firestore_v1beta1/field_path.py,sha256=IxeXzcEL80eA9xyvSBuDnJCDsY7TifSk2gRLKUvGOog,12151
+google/cloud/firestore_v1beta1/gapic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1beta1/gapic/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/gapic/__pycache__/enums.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/gapic/__pycache__/firestore_client_config.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/gapic/enums.py,sha256=WGGSh-_CRM9JIoxo8uYJOCDrLs1pr6tRciGmfPELopc,5160
+google/cloud/firestore_v1beta1/gapic/firestore_client.py,sha256=NzmHXvxqUQTJ8Jb-rFN5GybAQIDtslkPgmyX0dOxi48,62928
+google/cloud/firestore_v1beta1/gapic/firestore_client_config.py,sha256=EL1mrNkwytBbUeqAJMQ9RlguOMq6h8WvY7qDWhhwU50,3845
+google/cloud/firestore_v1beta1/gapic/transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1beta1/gapic/transports/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/gapic/transports/__pycache__/firestore_grpc_transport.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py,sha256=qLPLHPIkSplAr0NXAPvYArYBw3s-pzsGbqihXTQDG3I,9562
+google/cloud/firestore_v1beta1/order.py,sha256=6D5LaUxePTO1YXXMyNzo47tl_4SUiU9ZBkOofhhXt-8,6918
+google/cloud/firestore_v1beta1/proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1beta1/proto/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/common_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/document_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/event_flow_document_change_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/firestore_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/query_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/test_v1beta1_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/__pycache__/write_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/cloud/firestore_v1beta1/proto/admin/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/admin/__pycache__/firestore_admin_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/admin/__pycache__/index_pb2_grpc.cpython-36.pyc,,
+google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py,sha256=Xs0qf0ja9ft2XkVzwttZXVeYmpUef_mhNoVCuA9LAoE,49294
+google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py,sha256=bPkuJ8Y5NX3dVK3okVVABUjVw-1gBtSsu2SJPszo3lc,8830
+google/cloud/firestore_v1beta1/proto/admin/index_pb2.py,sha256=fwNu1nRggVJ8rhAL6lWcY152Kx0JY7Sqki7iE4il5T4,10054
+google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py,sha256=DWMIAZgxdUkWnngE5BvewClEYej5jZ2CTkiww5AhtnE,82
+google/cloud/firestore_v1beta1/proto/common.proto,sha256=7V0IFA5KpNVZMbsFPS4kSRTjKM5sH_0u4l-dHA-OxmY,3028
+google/cloud/firestore_v1beta1/proto/common_pb2.py,sha256=K5FQV_XHi_shPDekh4tQutRZyxosl9WnRz4mp_0FaPE,16294
+google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1beta1/proto/document.proto,sha256=oFTfMUBsP9YLgm1-g1HYl3yccToX5o6L-eHQzH89250,5231
+google/cloud/firestore_v1beta1/proto/document_pb2.py,sha256=qA86ecQ1VzDDB4k3utlI2iKyblwMtYn8DZdoqD-zLI8,29582
+google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py,sha256=w5HovWO8YLFDt8O22XVuX-GD9VpO8O0IKEt9339cZYI,2686
+google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py,sha256=DWMIAZgxdUkWnngE5BvewClEYej5jZ2CTkiww5AhtnE,82
+google/cloud/firestore_v1beta1/proto/field.proto,sha256=dEfCLBLp68TrMA1iOWWI6cj6IRNO5s7I0DDCfoCLlhg,4042
+google/cloud/firestore_v1beta1/proto/firestore.proto,sha256=ilGML_v7iFDpNArPOoZ5_6zKdwkPwLUIIczWsJvO-Wg,28339
+google/cloud/firestore_v1beta1/proto/firestore_admin.proto,sha256=gAg4db0BX205Q8F4tfHzer0HgfmU5vCSzgfYn9i3_L0,13306
+google/cloud/firestore_v1beta1/proto/firestore_pb2.py,sha256=BJrc4zaGTch6ZUufNar_LJha70SzZNVxb37Ev-bNZic,146748
+google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py,sha256=2cQM5Trp49ZQxId-KXtMKhN1YL01tb3DrctXRRLhzUE,28181
+google/cloud/firestore_v1beta1/proto/index.proto,sha256=nNusrbWFFjeLsOB-w4F8DxmyWeNdHVZ2xOUXDxXJctw,3351
+google/cloud/firestore_v1beta1/proto/location.proto,sha256=gwX-oE4W1wxcAQALIirjAhy1npfMK0SW24XnfIGqCvs,1206
+google/cloud/firestore_v1beta1/proto/operation.proto,sha256=-hIpu9zJ5G4Agl6QcHeRs5VpmIi0Bjj_fvg46JUAx4A,6715
+google/cloud/firestore_v1beta1/proto/query.proto,sha256=HVjaN2Tny7hrFofCcIgb8fW02NemDv9m-vPkj4jZaxM,6978
+google/cloud/firestore_v1beta1/proto/query_pb2.py,sha256=9i0buT4OjBLtovl8RjZfc8lchnNRZplzymzz-BabU3U,46365
+google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py,sha256=H3E7j8YBHPTdGrwh_uhrngYm2KHDpnGBQWgZWnHBF2o,68287
+google/cloud/firestore_v1beta1/proto/write.proto,sha256=xU_eqI3tq4eBEWrwjoEdaTjt0PjMKqa-J2isydFhctE,10689
+google/cloud/firestore_v1beta1/proto/write_pb2.py,sha256=0mHKWioNOLBm7Bg1PbPTsXAocYLF2a7NSg1ZmWvev-Q,44730
+google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
+google/cloud/firestore_v1beta1/query.py,sha256=fv61cE1fd0wXb7caACxjmNhsjcIWdQTO1VZDKuaZ2jc,37737
+google/cloud/firestore_v1beta1/transaction.py,sha256=TNPEUQJKRe_mlDfBN_2VNDyklrVmWZ9lFGyqL7ECjTw,14517
+google/cloud/firestore_v1beta1/transforms.py,sha256=EVpizxDQszb2WiQizv5x-PTAfCzP1Dn6AUM--DM7y10,2762
+google/cloud/firestore_v1beta1/types.py,sha256=rlZKDKb8GetV4wCsA4gieHkOBIhQuAf7V6I2I9_2g4I,2029
+google/cloud/firestore_v1beta1/watch.py,sha256=aZuRmwa4E6H5X1AsTGyhZRyt_1nqUwEVYL8w_NjiKHA,25592
+google_cloud_firestore-1.9.0-py3.8-nspkg.pth,sha256=b0D5dZk3RUzK54tZ9iZDvLm7u8ltc5EzYrGCmhsuoNw,1698
+google_cloud_firestore-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_cloud_firestore-1.9.0.dist-info/LICENSE,sha256=wj8Xz9NbPUIIxjKTeDELp0xe1uoa3ruCdyltMOOzSyk,11354
+google_cloud_firestore-1.9.0.dist-info/METADATA,sha256=Ym79qqm1cEjykQMRUsSdUTMUkvi2QaCAQ4cUcab4W8A,4774
+google_cloud_firestore-1.9.0.dist-info/RECORD,,
+google_cloud_firestore-1.9.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_cloud_firestore-1.9.0.dist-info/namespace_packages.txt,sha256=v8IaYqRE2a0onAGJIpZeFkkH83wXSWZRR9eOyfMwoTc,20
+google_cloud_firestore-1.9.0.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/WHEEL b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/namespace_packages.txt b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/namespace_packages.txt
new file mode 100644
index 000000000..5a9c12bb2
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/namespace_packages.txt
@@ -0,0 +1,2 @@
+google
+google.cloud
diff --git a/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/top_level.txt b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/top_level.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_firestore-1.9.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2-py3.8-nspkg.pth b/venv/Lib/site-packages/google_cloud_storage-1.31.2-py3.8-nspkg.pth
new file mode 100644
index 000000000..2bf911880
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2-py3.8-nspkg.pth
@@ -0,0 +1,3 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google', 'cloud'));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google.cloud', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google.cloud', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google.cloud', types.ModuleType('google.cloud'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p);m and setattr(sys.modules['google'], 'cloud', m)
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/INSTALLER b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/LICENSE b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/LICENSE
new file mode 100644
index 000000000..a8ee855de
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/METADATA b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/METADATA
new file mode 100644
index 000000000..721881e68
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/METADATA
@@ -0,0 +1,134 @@
+Metadata-Version: 2.1
+Name: google-cloud-storage
+Version: 1.31.2
+Summary: Google Cloud Storage API client library
+Home-page: https://github.com/googleapis/python-storage
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Platform: Posix; MacOS X; Windows
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Requires-Dist: google-auth (<2.0dev,>=1.11.0)
+Requires-Dist: google-cloud-core (<2.0dev,>=1.4.1)
+Requires-Dist: google-resumable-media (<2.0dev,>=1.0.0)
+Requires-Dist: requests (<3.0.0dev,>=2.18.0)
+
+Python Client for Google Cloud Storage
+======================================
+
+|GA| |pypi| |versions|
+
+`Google Cloud Storage`_ allows you to store data on
+Google infrastructure with very high reliability, performance and
+availability, and can be used to distribute large data objects to users
+via direct download.
+
+- `Client Library Documentation`_
+- `Storage API docs`_
+
+.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg
+ :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability
+.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-storage.svg
+ :target: https://pypi.org/project/google-cloud-storage
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-storage.svg
+ :target: https://pypi.org/project/google-cloud-storage
+.. _Google Cloud Storage: https://cloud.google.com/storage/docs
+.. _Client Library Documentation: https://googleapis.dev/python/storage/latest
+.. _Storage API docs: https://cloud.google.com/storage/docs/json_api/v1
+
+Quick Start
+-----------
+
+In order to use this library, you first need to go through the following steps:
+
+1. `Select or create a Cloud Platform project.`_
+2. `Enable billing for your project.`_
+3. `Enable the Google Cloud Storage API.`_
+4. `Setup Authentication.`_
+
+.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
+.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
+.. _Enable the Google Cloud Storage API.: https://cloud.google.com/storage
+.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+
+Installation
+~~~~~~~~~~~~
+
+Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With `virtualenv`_, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
+
+
+Supported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Python >= 3.5
+
+Deprecated Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+
+Mac/Linux
+^^^^^^^^^
+
+.. code-block:: console
+
+ pip install virtualenv
+ virtualenv
+ source /bin/activate
+ /bin/pip install google-cloud-storage
+
+
+Windows
+^^^^^^^
+
+.. code-block:: console
+
+ pip install virtualenv
+ virtualenv
+ \Scripts\activate
+ \Scripts\pip.exe install google-cloud-storage
+
+
+Example Usage
+~~~~~~~~~~~~~
+
+You need to create a Google Cloud Storage bucket to use this client library.
+Follow along with the `official Google Cloud Storage documentation`_ to learn
+how to create a bucket.
+
+.. _official Google Cloud Storage documentation: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets
+
+.. code:: python
+
+ from google.cloud import storage
+ client = storage.Client()
+ # https://console.cloud.google.com/storage/browser/[bucket-id]/
+ bucket = client.get_bucket('bucket-id-here')
+ # Then do other things...
+ blob = bucket.get_blob('remote/path/to/file.txt')
+ print(blob.download_as_string())
+ blob.upload_from_string('New contents!')
+ blob2 = bucket.blob('remote/path/storage.txt')
+ blob2.upload_from_filename(filename='/local/path.txt')
+
+
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/RECORD b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/RECORD
new file mode 100644
index 000000000..edfc75034
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/RECORD
@@ -0,0 +1,34 @@
+google/cloud/storage/__init__.py,sha256=UQ9ttkUTz-VUaGL_xdIA3TDXIN9wICyIrkgWhdzHW7k,1519
+google/cloud/storage/__pycache__/__init__.cpython-36.pyc,,
+google/cloud/storage/__pycache__/_helpers.cpython-36.pyc,,
+google/cloud/storage/__pycache__/_http.cpython-36.pyc,,
+google/cloud/storage/__pycache__/_signing.cpython-36.pyc,,
+google/cloud/storage/__pycache__/acl.cpython-36.pyc,,
+google/cloud/storage/__pycache__/batch.cpython-36.pyc,,
+google/cloud/storage/__pycache__/blob.cpython-36.pyc,,
+google/cloud/storage/__pycache__/bucket.cpython-36.pyc,,
+google/cloud/storage/__pycache__/client.cpython-36.pyc,,
+google/cloud/storage/__pycache__/constants.cpython-36.pyc,,
+google/cloud/storage/__pycache__/hmac_key.cpython-36.pyc,,
+google/cloud/storage/__pycache__/iam.cpython-36.pyc,,
+google/cloud/storage/__pycache__/notification.cpython-36.pyc,,
+google/cloud/storage/_helpers.py,sha256=EhOtEZjK0K877UqPJk-V1UNIaFKOdPLkkHuOm4dwb_Q,19184
+google/cloud/storage/_http.py,sha256=OXWZRJJHV0ZRXIKeQu5Ukhc9ufzCAIyHVRGCTmJQbTM,1913
+google/cloud/storage/_signing.py,sha256=jzev7Ys7QGOVg6B_VdSr6J4E13tayDQfUucCcfhz7fc,25497
+google/cloud/storage/acl.py,sha256=21OphXnNVU6J9dFYbPGdxWivamxek7OktRTLKzv9sCI,22481
+google/cloud/storage/batch.py,sha256=kmYk3n_r0xc4m7SF0sQTtvRj2oDKe0OM0sAuqapLoS8,11817
+google/cloud/storage/blob.py,sha256=fGhNwaLJK-J7rI37cSTxO2p26MPkzjQ4w76xIBEUB20,152339
+google/cloud/storage/bucket.py,sha256=Ot7pFB-ZlvTRvMfCfzcaq7wvjw-QE8YivC4ugl1_YNc,124092
+google/cloud/storage/client.py,sha256=2BCTs0HoXbzFIPkNmEKGslISWJUnI3f902bPqwiGGKw,41522
+google/cloud/storage/constants.py,sha256=xa0gJon961CIZ74CrNtYsI8pNIcyouV-PvTgcYIQPBI,2853
+google/cloud/storage/hmac_key.py,sha256=BXwJ_RKmsMtFpm51jwtMlK_QyuHNDsD-k5TyhWUXsLY,9260
+google/cloud/storage/iam.py,sha256=zOGFHUG4mYQTcLrHMGmiTQHZcDOjcNh1NgX8V3L9y2w,2802
+google/cloud/storage/notification.py,sha256=rK125JNzWz7qYk3FZ7gnD8rRmBJ4WRHLXBJZ5mi3HMg,14750
+google_cloud_storage-1.31.2-py3.8-nspkg.pth,sha256=b0D5dZk3RUzK54tZ9iZDvLm7u8ltc5EzYrGCmhsuoNw,1698
+google_cloud_storage-1.31.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_cloud_storage-1.31.2.dist-info/LICENSE,sha256=wj8Xz9NbPUIIxjKTeDELp0xe1uoa3ruCdyltMOOzSyk,11354
+google_cloud_storage-1.31.2.dist-info/METADATA,sha256=tPvKAeKwxOg2pHwLsrOnjvNkHmtuMHUOkqoOmH16Kgc,4694
+google_cloud_storage-1.31.2.dist-info/RECORD,,
+google_cloud_storage-1.31.2.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_cloud_storage-1.31.2.dist-info/namespace_packages.txt,sha256=v8IaYqRE2a0onAGJIpZeFkkH83wXSWZRR9eOyfMwoTc,20
+google_cloud_storage-1.31.2.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/WHEEL b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/namespace_packages.txt b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/namespace_packages.txt
new file mode 100644
index 000000000..5a9c12bb2
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/namespace_packages.txt
@@ -0,0 +1,2 @@
+google
+google.cloud
diff --git a/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/top_level.txt b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/top_level.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_cloud_storage-1.31.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/INSTALLER b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/LICENSE b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/LICENSE
new file mode 100644
index 000000000..b44737601
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/METADATA b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/METADATA
new file mode 100644
index 000000000..c6b3c5b80
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/METADATA
@@ -0,0 +1,176 @@
+Metadata-Version: 2.1
+Name: google-crc32c
+Version: 1.0.0
+Summary: A python wrapper of the C library 'Google CRC32C'
+Home-page: https://github.com/googleapis/python-crc32c
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Platform: Posix
+Platform: MacOS X
+Platform: Windows
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Requires-Python: >=3.5
+Description-Content-Type: text/markdown
+Requires-Dist: cffi (>=1.0.0)
+Provides-Extra: testing
+Requires-Dist: pytest ; extra == 'testing'
+
+# `google-crc32c`
+
+This package wraps the [`google/crc32c`](https://github.com/google/crc32c)
+hardware-based implementation of the CRC32C hashing algorithm. Multiple wheels
+are distributed as well as source. If a wheel is not published for the python
+version and platform you are using, you will need to compile crc32c using a
+C toolchain.
+
+
+# Building
+
+## Be sure to check out all submodules:
+
+```
+$ git clone --recursive https://github.com/googleapis/python-crc32c
+```
+
+## Prerequisites
+
+On Linux:
+
+- `docker`
+- `python3.7`
+
+On OS X:
+
+- `make`
+- [Official][1] `python.org` Python 2.7, 3.5, 3.6 and 3.7
+
+On Windows:
+
+- `cmake`
+- [Official][1] `python.org` Python 3.5, 3.6 and 3.7
+- Visual Studio 15 2017 (just the compiler toolchain)
+
+Unfortunately, `libcrc32c` relies on many C++11 features, so
+building a Python 2.7 extension with the
+[Visual C++ Compiler for Python 2.7][2] is infeasible.
+
+
+## Building Wheels
+
+On Linux:
+
+```
+./scripts/manylinux/build.sh
+```
+
+On OS X:
+
+```
+./scripts/osx/build.sh
+```
+
+On Windows: see `.appveyor.yml`.
+
+## Testing/Verify Wheels
+
+On Linux (i.e. a host OS, not a `docker` container):
+
+```
+$ ./scripts/manylinux/check-37.sh
+...
++ venv/bin/python check_cffi_crc32c.py
+_crc32c_cffi:
+_crc32c_cffi.lib:
+dir(_crc32c_cffi.lib): ['crc32c_extend', 'crc32c_value']
++ unzip -l wheels/google_crc32c-0.0.1-cp37-cp37m-manylinux1_x86_64.whl
+Archive: wheels/google_crc32c-0.0.1-cp37-cp37m-manylinux1_x86_64.whl
+ Length Date Time Name
+--------- ---------- ----- ----
+ 26120 2018-10-25 00:09 crc32c/_crc32c_cffi.abi3.so
+ 765 2018-10-24 23:57 crc32c/__init__.py
+ 29552 2018-10-25 00:09 crc32c/.libs/libcrc32c-f865a225.so
+ 109 2018-10-25 00:09 google_crc32c-0.0.1.dist-info/WHEEL
+ 766 2018-10-25 00:09 google_crc32c-0.0.1.dist-info/METADATA
+ 652 2018-10-25 00:09 google_crc32c-0.0.1.dist-info/RECORD
+ 1 2018-10-25 00:09 google_crc32c-0.0.1.dist-info/zip-safe
+ 7 2018-10-25 00:09 google_crc32c-0.0.1.dist-info/top_level.txt
+--------- -------
+ 57972 8 files
+...
+```
+
+On OS X:
+
+```
+$ ./scripts/osx/check.sh
+...
++ venv37/bin/python .../python-crc32c/check_cffi_crc32c.py
+_crc32c_cffi:
+_crc32c_cffi.lib:
+dir(_crc32c_cffi.lib): ['crc32c_extend', 'crc32c_value']
++ /Library/Frameworks/Python.framework/Versions/3.7/bin/delocate-listdeps --all --depending .../python-crc32c/wheels/google_crc32c-0.0.1-cp37-cp37m-macosx_10_6_intel.whl
+/usr/lib/libSystem.B.dylib:
+ google_crc32c/_crc32c_cffi.abi3.so
+ google_crc32c/.dylibs/libcrc32c.dylib
+/usr/lib/libc++.1.dylib:
+ google_crc32c/.dylibs/libcrc32c.dylib
+@loader_path/.dylibs/libcrc32c.dylib:
+ google_crc32c/_crc32c_cffi.abi3.so
+...
+```
+
+On Windows: TBD.
+
+[1]: https://www.python.org/downloads/
+[2]: https://aka.ms/vcpython27
+
+## Installing locally for testing
+
+Initialize the submodules and build the main `libcrc32c.so` shared
+library using `cmake` / `make`:
+
+```bash
+$ cd python-crc32c
+$ git submodule update --init --recursive
+$ python -m venv venv
+$ venv/bin/pip install --upgrade setuptools pip wheel
+$ venv/bin/pip install cmake
+$ mkdir usr
+$ export CRC32C_INSTALL_PREFIX=$(pwd)/usr
+$ mkdir google_crc32c/build
+$ cd google_crc32c/build
+$ ../../venv/bin/cmake \
+> -DCRC32C_BUILD_TESTS=no \
+> -DCRC32C_BUILD_BENCHMARKS=no \
+> -DBUILD_SHARED_LIBS=yes \
+> -DCMAKE_INSTALL_PREFIX:PATH=${CRC32C_INSTALL_PREFIX} \
+> ..
+$ make all install
+$ cd ../..
+```
+
+Now, run the tests:
+
+```bash
+$ venv/bin/pip install -e .[testing]
+$ venv/bin/py.test tests/
+============================= test session starts ==============================
+platform linux -- Python 3.6.7, pytest-3.10.0, py-1.7.0, pluggy-0.8.0
+rootdir: ..., inifile:
+collected 9 items
+
+tests/test___init__.py ......... [100%]
+
+=========================== 9 passed in 0.03 seconds ===========================
+```
+
+
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/RECORD b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/RECORD
new file mode 100644
index 000000000..4b6862784
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/RECORD
@@ -0,0 +1,19 @@
+google_crc32c-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_crc32c-1.0.0.dist-info/LICENSE,sha256=TEJ1CKTAgYeEzqO7D3E2Qn7clHAVAj5RNru9TEvY57w,11559
+google_crc32c-1.0.0.dist-info/METADATA,sha256=YI6LLOK-kY02aENeO2om7d8KWJo4P4Me5aTyQKOG-v0,5153
+google_crc32c-1.0.0.dist-info/RECORD,,
+google_crc32c-1.0.0.dist-info/WHEEL,sha256=ZFeOeZQCWkgYx9PG5WAxk1yIHroxd2erWFNpu0USMOg,102
+google_crc32c-1.0.0.dist-info/top_level.txt,sha256=r7PLPlKjfhMZLqeRsKXIQdIzbe3Frv_2_b8XmcvZ4FQ,14
+google_crc32c-1.0.0.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
+google_crc32c/__config__.py,sha256=PtG7CIVAqqcxuYQuGi7fH4eIGbXnMNB4HYnvkCCwxb0,1110
+google_crc32c/__init__.py,sha256=7dkqkjDtoCNKfWEJClGt81dzo5MEsR6sISEsg4I_1HA,1328
+google_crc32c/__pycache__/__config__.cpython-36.pyc,,
+google_crc32c/__pycache__/__init__.cpython-36.pyc,,
+google_crc32c/__pycache__/_checksum.cpython-36.pyc,,
+google_crc32c/__pycache__/cffi.cpython-36.pyc,,
+google_crc32c/__pycache__/python.cpython-36.pyc,,
+google_crc32c/_checksum.py,sha256=jIfqcDUBPuGpFgRjEKurFQHNEw49-rttsm8hA1k6xc4,2703
+google_crc32c/_crc32c_cffi.cp36-win32.pyd,sha256=huvzilEUz3auM05Z8TlnA9_GIWgdks1wA_Nyu4POaOY,9728
+google_crc32c/cffi.py,sha256=H7oKOdZ7DPcRR7GUsbS3mDvqwv0vM6Z4jFU-UzQt-m4,2353
+google_crc32c/extra-dll/crc32c.dll,sha256=RMwg1cKbBD4WvgYkQ8mkUp3onPDGBdZdqgXuf6f_cc4,31232
+google_crc32c/python.py,sha256=1KRCuv_GLTLPNiR1VBPuEPYB_Yfayw9FZ2iFhDrfIkw,5785
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/WHEEL b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/WHEEL
new file mode 100644
index 000000000..f2456e30b
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: false
+Tag: cp36-cp36m-win32
+
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/top_level.txt
new file mode 100644
index 000000000..10b44e607
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+google_crc32c
diff --git a/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/zip-safe b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/zip-safe
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c-1.0.0.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/venv/Lib/site-packages/google_crc32c/__config__.py b/venv/Lib/site-packages/google_crc32c/__config__.py
new file mode 100644
index 000000000..7a3642e7b
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c/__config__.py
@@ -0,0 +1,38 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import pkg_resources
+
+
+def modify_path():
+ """Modify the module search path."""
+ # Only modify path on Windows.
+ if os.name != "nt":
+ return
+
+ path = os.environ.get("PATH")
+ if path is None:
+ return
+
+ try:
+ extra_dll_dir = pkg_resources.resource_filename("google_crc32c", "extra-dll")
+ if os.path.isdir(extra_dll_dir):
+ os.environ["PATH"] = path + os.pathsep + extra_dll_dir
+ except ImportError:
+ pass
+
+
+modify_path()
diff --git a/venv/Lib/site-packages/google_crc32c/__init__.py b/venv/Lib/site-packages/google_crc32c/__init__.py
new file mode 100644
index 000000000..1e1c0649e
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c/__init__.py
@@ -0,0 +1,37 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import warnings
+
+_SLOW_CRC32C_WARNING = (
+ "As the c extension couldn't be imported, `google-crc32c` is using a "
+ "pure python implementation that is significantly slower. If possible, "
+ "please configure a c build environment and compile the extension"
+)
+
+# If available, default to CFFI Implementation, otherwise, use pure python.
+try:
+ from google_crc32c import cffi as _crc32c
+ implementation = "cffi"
+except ImportError:
+ from google_crc32c import python as _crc32c
+ warnings.warn(_SLOW_CRC32C_WARNING, RuntimeWarning)
+ implementation = "python"
+
+extend = _crc32c.extend
+value = _crc32c.value
+
+Checksum = _crc32c.Checksum
+
+__all__ = ["extend", "value", "Checksum", "implementation"]
diff --git a/venv/Lib/site-packages/google_crc32c/__pycache__/__config__.cpython-36.pyc b/venv/Lib/site-packages/google_crc32c/__pycache__/__config__.cpython-36.pyc
new file mode 100644
index 000000000..df05108aa
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/__pycache__/__config__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google_crc32c/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/google_crc32c/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..79f488e50
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google_crc32c/__pycache__/_checksum.cpython-36.pyc b/venv/Lib/site-packages/google_crc32c/__pycache__/_checksum.cpython-36.pyc
new file mode 100644
index 000000000..5393265eb
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/__pycache__/_checksum.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google_crc32c/__pycache__/cffi.cpython-36.pyc b/venv/Lib/site-packages/google_crc32c/__pycache__/cffi.cpython-36.pyc
new file mode 100644
index 000000000..da53dcd95
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/__pycache__/cffi.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google_crc32c/__pycache__/python.cpython-36.pyc b/venv/Lib/site-packages/google_crc32c/__pycache__/python.cpython-36.pyc
new file mode 100644
index 000000000..298c4f20d
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/__pycache__/python.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/google_crc32c/_checksum.py b/venv/Lib/site-packages/google_crc32c/_checksum.py
new file mode 100644
index 000000000..fa299c7cc
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c/_checksum.py
@@ -0,0 +1,86 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import struct
+
+
+class CommonChecksum(object):
+ """Hashlib-alike helper for CRC32C operations.
+
+ This class should not be used directly and requires an update implementation.
+
+ Args:
+ initial_value (Optional[bytes]): the initial chunk of data from
+ which the CRC32C checksum is computed. Defaults to b''.
+ """
+
+ def __init__(self, initial_value=b""):
+ self._crc = 0
+ if initial_value != b"":
+ self.update(initial_value)
+
+ def update(self, data):
+ """Update the checksum with a new chunk of data.
+
+ Args:
+ chunk (Optional[bytes]): a chunk of data used to extend
+ the CRC32C checksum.
+ """
+ raise NotImplemented()
+
+ def digest(self):
+ """Big-endian order, per RFC 4960.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects#crc32c
+
+ Returns:
+ bytes: An eight-byte digest string.
+ """
+ return struct.pack(">L", self._crc)
+
+ def hexdigest(self):
+ """Like :meth:`digest` except returns as a bytestring of double length.
+
+ Returns
+ bytes: A sixteen byte digest string, contaiing only hex digits.
+ """
+ return "{:08x}".format(self._crc).encode("ascii")
+
+ def copy(self):
+ """Create another checksum with the same CRC32C value.
+
+ Returns:
+ Checksum: the new instance.
+ """
+ clone = self.__class__()
+ clone._crc = self._crc
+ return clone
+
+ def consume(self, stream, chunksize):
+ """Consume chunks from a stream, extending our CRC32 checksum.
+
+ Args:
+ stream (BinaryIO): the stream to consume.
+ chunksize (int): the size of the read to perform
+
+ Returns:
+ Generator[bytes, None, None]: Iterable of the chunks read from the
+ stream.
+ """
+ while True:
+ chunk = stream.read(chunksize)
+ if not chunk:
+ break
+ self.update(chunk)
+ yield chunk
diff --git a/venv/Lib/site-packages/google_crc32c/_crc32c_cffi.cp36-win32.pyd b/venv/Lib/site-packages/google_crc32c/_crc32c_cffi.cp36-win32.pyd
new file mode 100644
index 000000000..425292616
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/_crc32c_cffi.cp36-win32.pyd differ
diff --git a/venv/Lib/site-packages/google_crc32c/cffi.py b/venv/Lib/site-packages/google_crc32c/cffi.py
new file mode 100644
index 000000000..2de1dc39b
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c/cffi.py
@@ -0,0 +1,72 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import struct
+
+# NOTE: ``__config__`` **must** be the first import because it (may)
+# modify the search path used to locate shared libraries.
+import google_crc32c.__config__
+import google_crc32c._crc32c_cffi
+from google_crc32c._checksum import CommonChecksum
+
+
+def extend(crc, chunk):
+ """Update an existing CRC checksum with new chunk of data.
+
+ Args
+ crc (int): An existing CRC check sum.
+ chunk (Union[bytes, List[int], Tuple[int]]): A new chunk of data.
+ Intended to be a byte string or similar.
+
+ Returns
+ int: New CRC checksum computed by extending existing CRC
+ with ``chunk``.
+ """
+ return google_crc32c._crc32c_cffi.lib.crc32c_extend(crc, chunk, len(chunk))
+
+
+def value(chunk):
+ """Compute a CRC checksum for a chunk of data.
+
+ Args
+ chunk (Union[bytes, List[int], Tuple[int]]): A new chunk of data.
+ Intended to be a byte string or similar.
+
+ Returns
+ int: New CRC checksum computed for ``chunk``.
+ """
+ return google_crc32c._crc32c_cffi.lib.crc32c_value(chunk, len(chunk))
+
+
+class Checksum(CommonChecksum):
+ """Hashlib-alike helper for CRC32C operations.
+
+ Args:
+ initial_value (Optional[bytes]): the initial chunk of data from
+ which the CRC32C checksum is computed. Defaults to b''.
+ """
+
+ __slots__ = ("_crc",)
+
+ def __init__(self, initial_value=b""):
+ self._crc = value(initial_value)
+
+ def update(self, chunk):
+ """Update the checksum with a new chunk of data.
+
+ Args:
+ chunk (Optional[bytes]): a chunk of data used to extend
+ the CRC32C checksum.
+ """
+ self._crc = extend(self._crc, chunk)
diff --git a/venv/Lib/site-packages/google_crc32c/extra-dll/crc32c.dll b/venv/Lib/site-packages/google_crc32c/extra-dll/crc32c.dll
new file mode 100644
index 000000000..2a679d84a
Binary files /dev/null and b/venv/Lib/site-packages/google_crc32c/extra-dll/crc32c.dll differ
diff --git a/venv/Lib/site-packages/google_crc32c/python.py b/venv/Lib/site-packages/google_crc32c/python.py
new file mode 100644
index 000000000..0ab5f353a
--- /dev/null
+++ b/venv/Lib/site-packages/google_crc32c/python.py
@@ -0,0 +1,124 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import array
+import struct
+
+from google_crc32c._checksum import CommonChecksum
+
+
+def extend(crc, chunk):
+ """Update an existing CRC checksum with new chunk of data.
+
+ Args
+ crc (int): An existing CRC check sum.
+ chunk (Union[bytes, List[int], Tuple[int]]): A new chunk of data.
+ Intended to be a byte string or similar.
+
+ Returns
+ int: New CRC checksum computed by extending existing CRC
+ with ``chunk``.
+ """
+ c = Checksum()
+ c._crc = crc
+ c.update(chunk)
+ return c._crc
+
+
+def value(chunk):
+ """Compute a CRC checksum for a chunk of data.
+
+ Args
+ chunk (Union[bytes, List[int], Tuple[int]]): A new chunk of data.
+ Intended to be a byte string or similar.
+
+ Returns
+ int: New CRC checksum computed for ``chunk``.
+ """
+ c = Checksum()
+ c.update(chunk)
+ return c._crc
+
+
+class Checksum(CommonChecksum):
+ """Hashlib-alike helper for CRC32C operations.
+
+ Args:
+ initial_value (Optional[bytes]): the initial chunk of data from
+ which the CRC32C checksum is computed. Defaults to b''.
+ """
+
+ def __init__(self, initial_value=b""):
+ self._crc = 0
+ if initial_value != b"":
+ self.update(initial_value)
+
+ def update(self, data):
+ """Update the checksum with a new chunk of data.
+
+ Args:
+ chunk (Optional[bytes]): a chunk of data used to extend
+ the CRC32C checksum.
+ """
+ if type(data) != array.array or data.itemsize != 1:
+ buffer = array.array("B", data)
+ else:
+ buffer = data
+ self._crc = self._crc ^ 0xFFFFFFFF
+ for b in buffer:
+ table_poly = _TABLE[(b ^ self._crc) & 0xFF]
+ self._crc = table_poly ^ ((self._crc >> 8) & 0xFFFFFFFF)
+ self._crc = self._crc ^ 0xFFFFFFFF
+
+
+# fmt:off
+_TABLE = [
+ 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, 0x26a1e7e8,
+ 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, 0x4d43cfd0, 0xbf284cd3,
+ 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, 0xf165b798, 0x30e349b, 0xd7c45070,
+ 0x25afd373, 0x36ff2087, 0xc494a384, 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54,
+ 0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29,
+ 0x33ed7d2a, 0xe72719c1, 0x154c9ac2, 0x61c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,
+ 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, 0x30e349b1,
+ 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x5125dad, 0x1642ae59, 0xe4292d5a,
+ 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, 0x7da08661, 0x8fcb0562, 0x9c9bf696,
+ 0x6ef07595, 0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0,
+ 0x67dafa54, 0x95b17957, 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0xc38d26c,
+ 0xfe53516f, 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,
+ 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, 0x3ac7f2eb,
+ 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0xf36e6f7, 0x61c69362, 0x93ad1061,
+ 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789, 0xeb1fcbad,
+ 0x197448ae, 0xa24bb5a, 0xf84f3859, 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46,
+ 0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5,
+ 0xa55230e6, 0xfb410cc2, 0x92a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,
+ 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, 0x456cac67,
+ 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x82f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043,
+ 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, 0x92a8fc17, 0x60c37f14, 0x73938ce0,
+ 0x81f80fe3, 0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db,
+ 0xf94ad42f, 0xb21572c, 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6,
+ 0x502036a5, 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,
+ 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, 0xe330a81,
+ 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, 0x758fe5d6, 0x87e466d5,
+ 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19,
+ 0xd3d3e1a, 0x1e6dcdee, 0xec064eed, 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530,
+ 0x417b1db, 0xf67c32d8, 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc,
+ 0x5a048dff, 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,
+ 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x7198540, 0x590ab964,
+ 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f,
+ 0xe330a81a, 0x115b2b19, 0x20bd8ed, 0xf0605bee, 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2,
+ 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9,
+ 0x4f48173d, 0xbd23943e, 0xf36e6f75, 0x105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a,
+ 0xc69f7b69, 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
+ 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351
+]
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0-py3.8-nspkg.pth b/venv/Lib/site-packages/google_resumable_media-1.1.0-py3.8-nspkg.pth
new file mode 100644
index 000000000..baef7a0f4
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0-py3.8-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/INSTALLER b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/LICENSE b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/METADATA b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/METADATA
new file mode 100644
index 000000000..a78525a27
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/METADATA
@@ -0,0 +1,61 @@
+Metadata-Version: 2.1
+Name: google-resumable-media
+Version: 1.1.0
+Summary: Utilities for Google Media Downloads and Resumable Uploads
+Home-page: https://github.com/googleapis/google-resumable-media-python
+Author: Google Cloud Platform
+Author-email: googleapis-publisher@google.com
+License: Apache 2.0
+Platform: Posix; MacOS X; Windows
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Internet
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
+Requires-Dist: six
+Requires-Dist: crcmod (>=1.7) ; python_version == "2.7"
+Requires-Dist: google-crc32c (<2.0dev,>=1.0) ; python_version >= "3.5"
+Provides-Extra: aiohttp
+Requires-Dist: aiohttp (<4.0.0dev,>=3.6.2) ; (python_version >= "3.6") and extra == 'aiohttp'
+Provides-Extra: requests
+Requires-Dist: requests (<3.0.0dev,>=2.18.0) ; extra == 'requests'
+
+``google-resumable-media``
+==========================
+
+
+Utilities for Google Media Downloads and Resumable Uploads
+
+
+See the `docs`_ for examples and usage.
+
+.. _docs: https://googleapis.dev/python/google-resumable-media/latest/index.html
+
+Experimental `asyncio` Support
+------------------------------
+While still in development and subject to change, this library has `asyncio`
+support at `google._async_resumable_media`.
+
+Supported Python Versions
+-------------------------
+Python >= 3.5
+
+Deprecated Python Versions
+--------------------------
+Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+License
+-------
+
+Apache 2.0 - See `the LICENSE`_ for more information.
+
+.. _the LICENSE: https://github.com/googleapis/google-resumable-media-python/blob/master/LICENSE
+
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/RECORD b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/RECORD
new file mode 100644
index 000000000..95bbee601
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/RECORD
@@ -0,0 +1,42 @@
+google/_async_resumable_media/__init__.py,sha256=tHyv2HJB1K4MNpM1OuMw0ibsboXc2COYlTHTy88y_Us,1742
+google/_async_resumable_media/__pycache__/__init__.cpython-36.pyc,,
+google/_async_resumable_media/__pycache__/_download.cpython-36.pyc,,
+google/_async_resumable_media/__pycache__/_helpers.cpython-36.pyc,,
+google/_async_resumable_media/__pycache__/_upload.cpython-36.pyc,,
+google/_async_resumable_media/_download.py,sha256=5zDkZLTjFVp-G4a6l-JpUQP-TSS-jS3Vn-DLtIRCxVs,20399
+google/_async_resumable_media/_helpers.py,sha256=uJdS_x4GkLoKMdWJ22c7SJVmDVOQAppXYgkTdnVFtyg,6501
+google/_async_resumable_media/_upload.py,sha256=xQ2tTUogYrd9MJayTzPk_RpHb1utSBYtXQlHZ4Yb7Gs,37423
+google/_async_resumable_media/requests/__init__.py,sha256=OdbTyerqKB9JtcAH9Pjxy_v9eMWEKRSGkmENfyYGezY,21663
+google/_async_resumable_media/requests/__pycache__/__init__.cpython-36.pyc,,
+google/_async_resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc,,
+google/_async_resumable_media/requests/__pycache__/download.cpython-36.pyc,,
+google/_async_resumable_media/requests/__pycache__/upload.cpython-36.pyc,,
+google/_async_resumable_media/requests/_request_helpers.py,sha256=36-t3Szo4jEsw4DRokgRUd8c9Nx39kRWYBvFaNToXpE,5080
+google/_async_resumable_media/requests/download.py,sha256=SZ-sMQ484Anx8RTFOH2wfGuvjzv12dRPMOvcOCKeWgo,18494
+google/_async_resumable_media/requests/upload.py,sha256=RQ9mL-gGXKae_KFt2TQpkyocUpqoMW9sr1kwtrhJNzY,19326
+google/resumable_media/__init__.py,sha256=tHyv2HJB1K4MNpM1OuMw0ibsboXc2COYlTHTy88y_Us,1742
+google/resumable_media/__pycache__/__init__.cpython-36.pyc,,
+google/resumable_media/__pycache__/_download.cpython-36.pyc,,
+google/resumable_media/__pycache__/_helpers.cpython-36.pyc,,
+google/resumable_media/__pycache__/_upload.cpython-36.pyc,,
+google/resumable_media/__pycache__/common.cpython-36.pyc,,
+google/resumable_media/_download.py,sha256=-48TD_OT1MbtlxaOFBrpmCL-UuBIZWUQP14PuXwhklk,20549
+google/resumable_media/_helpers.py,sha256=omB85j8D7LCpdwAcYHqiazs2UE3RYQS74tEydwIwjb8,12070
+google/resumable_media/_upload.py,sha256=D9fliWwiSNtHZ0YT1kFO1pBo7OTy3kVy8RXz6dlUvSI,39105
+google/resumable_media/common.py,sha256=rU9jV2D61M90jAn_FD9WDc5-m338XfTw5mUtEscBi9Y,5219
+google/resumable_media/requests/__init__.py,sha256=t_bZg3chx8tdVT6tPkBMdFLvECvAK51g9wPxRBoZoBg,21614
+google/resumable_media/requests/__pycache__/__init__.cpython-36.pyc,,
+google/resumable_media/requests/__pycache__/_request_helpers.cpython-36.pyc,,
+google/resumable_media/requests/__pycache__/download.cpython-36.pyc,,
+google/resumable_media/requests/__pycache__/upload.cpython-36.pyc,,
+google/resumable_media/requests/_request_helpers.py,sha256=Zn4H2XAsl3gyGjfAHZgzYqiRXIYSHvE3WRrN5LPc4Mc,4440
+google/resumable_media/requests/download.py,sha256=kgDKmxNWHJTWNTkU-QgyFn-9k8URDGA1E7QnBGGXEHw,19647
+google/resumable_media/requests/upload.py,sha256=k_CWvoIdjEHazFwhUqTSFicXwW-BqoVBLfW7TVbjqbw,19942
+google_resumable_media-1.1.0-py3.8-nspkg.pth,sha256=xH5gTxc4UipYP3qrbP-4CCHNGBV97eBR4QqhheCvBl4,539
+google_resumable_media-1.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_resumable_media-1.1.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+google_resumable_media-1.1.0.dist-info/METADATA,sha256=zXELNBUzDK9yD4etxipb5RBOF_06zBBRY4Cvu_Zo0oM,2128
+google_resumable_media-1.1.0.dist-info/RECORD,,
+google_resumable_media-1.1.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+google_resumable_media-1.1.0.dist-info/namespace_packages.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
+google_resumable_media-1.1.0.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/WHEEL b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/WHEEL
new file mode 100644
index 000000000..6d38aa060
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/namespace_packages.txt b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/namespace_packages.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/namespace_packages.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/top_level.txt b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/top_level.txt
new file mode 100644
index 000000000..cb429113e
--- /dev/null
+++ b/venv/Lib/site-packages/google_resumable_media-1.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/venv/Lib/site-packages/googleapiclient/__init__.py b/venv/Lib/site-packages/googleapiclient/__init__.py
new file mode 100644
index 000000000..c9218dd85
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+logging.getLogger(__name__).addHandler(NullHandler())
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..afd7c6f43
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/_auth.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/_auth.cpython-36.pyc
new file mode 100644
index 000000000..d2a677f6d
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/_auth.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/_helpers.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/_helpers.cpython-36.pyc
new file mode 100644
index 000000000..67a9106d1
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/_helpers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/channel.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/channel.cpython-36.pyc
new file mode 100644
index 000000000..d80e2f231
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/channel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/discovery.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/discovery.cpython-36.pyc
new file mode 100644
index 000000000..abfccbca1
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/discovery.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/errors.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/errors.cpython-36.pyc
new file mode 100644
index 000000000..1f4ce69e5
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/errors.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/http.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/http.cpython-36.pyc
new file mode 100644
index 000000000..8e1631808
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/http.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/mimeparse.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/mimeparse.cpython-36.pyc
new file mode 100644
index 000000000..8cf34277f
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/mimeparse.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/model.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/model.cpython-36.pyc
new file mode 100644
index 000000000..8b1791e03
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/model.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/sample_tools.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/sample_tools.cpython-36.pyc
new file mode 100644
index 000000000..ee38d07b3
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/sample_tools.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/__pycache__/schema.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/__pycache__/schema.cpython-36.pyc
new file mode 100644
index 000000000..ceee5e9f3
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/__pycache__/schema.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/_auth.py b/venv/Lib/site-packages/googleapiclient/_auth.py
new file mode 100644
index 000000000..d045fc147
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/_auth.py
@@ -0,0 +1,162 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for authentication using oauth2client or google-auth."""
+
+import httplib2
+
+try:
+ import google.auth
+ import google.auth.credentials
+
+ HAS_GOOGLE_AUTH = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH = False
+
+try:
+ import google_auth_httplib2
+except ImportError: # pragma: NO COVER
+ google_auth_httplib2 = None
+
+try:
+ import oauth2client
+ import oauth2client.client
+
+ HAS_OAUTH2CLIENT = True
+except ImportError: # pragma: NO COVER
+ HAS_OAUTH2CLIENT = False
+
+
+def credentials_from_file(filename, scopes=None, quota_project_id=None):
+ """Returns credentials loaded from a file."""
+ if HAS_GOOGLE_AUTH:
+ credentials, _ = google.auth.load_credentials_from_file(filename, scopes=scopes, quota_project_id=quota_project_id)
+ return credentials
+ else:
+ raise EnvironmentError(
+ "client_options.credentials_file is only supported in google-auth.")
+
+
+def default_credentials(scopes=None, quota_project_id=None):
+ """Returns Application Default Credentials."""
+ if HAS_GOOGLE_AUTH:
+ credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id)
+ return credentials
+ elif HAS_OAUTH2CLIENT:
+ if scopes is not None or quota_project_id is not None:
+ raise EnvironmentError(
+ "client_options.scopes and client_options.quota_project_id are not supported in oauth2client."
+ "Please install google-auth."
+ )
+ return oauth2client.client.GoogleCredentials.get_application_default()
+ else:
+ raise EnvironmentError(
+ "No authentication library is available. Please install either "
+ "google-auth or oauth2client."
+ )
+
+
+def with_scopes(credentials, scopes):
+ """Scopes the credentials if necessary.
+
+ Args:
+ credentials (Union[
+ google.auth.credentials.Credentials,
+ oauth2client.client.Credentials]): The credentials to scope.
+ scopes (Sequence[str]): The list of scopes.
+
+ Returns:
+ Union[google.auth.credentials.Credentials,
+ oauth2client.client.Credentials]: The scoped credentials.
+ """
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ return google.auth.credentials.with_scopes_if_required(credentials, scopes)
+ else:
+ try:
+ if credentials.create_scoped_required():
+ return credentials.create_scoped(scopes)
+ else:
+ return credentials
+ except AttributeError:
+ return credentials
+
+
+def authorized_http(credentials):
+ """Returns an http client that is authorized with the given credentials.
+
+ Args:
+ credentials (Union[
+ google.auth.credentials.Credentials,
+ oauth2client.client.Credentials]): The credentials to use.
+
+ Returns:
+ Union[httplib2.Http, google_auth_httplib2.AuthorizedHttp]: An
+ authorized http client.
+ """
+ from googleapiclient.http import build_http
+
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ if google_auth_httplib2 is None:
+ raise ValueError(
+ "Credentials from google.auth specified, but "
+ "google-api-python-client is unable to use these credentials "
+ "unless google-auth-httplib2 is installed. Please install "
+ "google-auth-httplib2."
+ )
+ return google_auth_httplib2.AuthorizedHttp(credentials, http=build_http())
+ else:
+ return credentials.authorize(build_http())
+
+
+def refresh_credentials(credentials):
+ # Refresh must use a new http instance, as the one associated with the
+ # credentials could be a AuthorizedHttp or an oauth2client-decorated
+ # Http instance which would cause a weird recursive loop of refreshing
+ # and likely tear a hole in spacetime.
+ refresh_http = httplib2.Http()
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ request = google_auth_httplib2.Request(refresh_http)
+ return credentials.refresh(request)
+ else:
+ return credentials.refresh(refresh_http)
+
+
+def apply_credentials(credentials, headers):
+ # oauth2client and google-auth have the same interface for this.
+ if not is_valid(credentials):
+ refresh_credentials(credentials)
+ return credentials.apply(headers)
+
+
+def is_valid(credentials):
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ return credentials.valid
+ else:
+ return (
+ credentials.access_token is not None
+ and not credentials.access_token_expired
+ )
+
+
+def get_credentials_from_http(http):
+ if http is None:
+ return None
+ elif hasattr(http.request, "credentials"):
+ return http.request.credentials
+ elif hasattr(http, "credentials") and not isinstance(
+ http.credentials, httplib2.Credentials
+ ):
+ return http.credentials
+ else:
+ return None
diff --git a/venv/Lib/site-packages/googleapiclient/_helpers.py b/venv/Lib/site-packages/googleapiclient/_helpers.py
new file mode 100644
index 000000000..ddbd0e272
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/_helpers.py
@@ -0,0 +1,211 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for commonly used utilities."""
+
+import functools
+import inspect
+import logging
+import warnings
+
+import six
+from six.moves import urllib
+
+
+logger = logging.getLogger(__name__)
+
+POSITIONAL_WARNING = "WARNING"
+POSITIONAL_EXCEPTION = "EXCEPTION"
+POSITIONAL_IGNORE = "IGNORE"
+POSITIONAL_SET = frozenset(
+ [POSITIONAL_WARNING, POSITIONAL_EXCEPTION, POSITIONAL_IGNORE]
+)
+
+positional_parameters_enforcement = POSITIONAL_WARNING
+
+_SYM_LINK_MESSAGE = "File: {0}: Is a symbolic link."
+_IS_DIR_MESSAGE = "{0}: Is a directory"
+_MISSING_FILE_MESSAGE = "Cannot access {0}: No such file or directory"
+
+
+def positional(max_positional_args):
+ """A decorator to declare that only the first N arguments may be positional.
+
+ This decorator makes it easy to support Python 3 style keyword-only
+ parameters. For example, in Python 3 it is possible to write::
+
+ def fn(pos1, *, kwonly1=None, kwonly1=None):
+ ...
+
+ All named parameters after ``*`` must be a keyword::
+
+ fn(10, 'kw1', 'kw2') # Raises exception.
+ fn(10, kwonly1='kw1') # Ok.
+
+ Example
+ ^^^^^^^
+
+ To define a function like above, do::
+
+ @positional(1)
+ def fn(pos1, kwonly1=None, kwonly2=None):
+ ...
+
+ If no default value is provided to a keyword argument, it becomes a
+ required keyword argument::
+
+ @positional(0)
+ def fn(required_kw):
+ ...
+
+ This must be called with the keyword parameter::
+
+ fn() # Raises exception.
+ fn(10) # Raises exception.
+ fn(required_kw=10) # Ok.
+
+ When defining instance or class methods always remember to account for
+ ``self`` and ``cls``::
+
+ class MyClass(object):
+
+ @positional(2)
+ def my_method(self, pos1, kwonly1=None):
+ ...
+
+ @classmethod
+ @positional(2)
+ def my_method(cls, pos1, kwonly1=None):
+ ...
+
+ The positional decorator behavior is controlled by
+ ``_helpers.positional_parameters_enforcement``, which may be set to
+ ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
+ ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
+ nothing, respectively, if a declaration is violated.
+
+ Args:
+ max_positional_arguments: Maximum number of positional arguments. All
+ parameters after the this index must be
+ keyword only.
+
+ Returns:
+ A decorator that prevents using arguments after max_positional_args
+ from being used as positional parameters.
+
+ Raises:
+ TypeError: if a key-word only argument is provided as a positional
+ parameter, but only if
+ _helpers.positional_parameters_enforcement is set to
+ POSITIONAL_EXCEPTION.
+ """
+
+ def positional_decorator(wrapped):
+ @functools.wraps(wrapped)
+ def positional_wrapper(*args, **kwargs):
+ if len(args) > max_positional_args:
+ plural_s = ""
+ if max_positional_args != 1:
+ plural_s = "s"
+ message = (
+ "{function}() takes at most {args_max} positional "
+ "argument{plural} ({args_given} given)".format(
+ function=wrapped.__name__,
+ args_max=max_positional_args,
+ args_given=len(args),
+ plural=plural_s,
+ )
+ )
+ if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
+ raise TypeError(message)
+ elif positional_parameters_enforcement == POSITIONAL_WARNING:
+ logger.warning(message)
+ return wrapped(*args, **kwargs)
+
+ return positional_wrapper
+
+ if isinstance(max_positional_args, six.integer_types):
+ return positional_decorator
+ else:
+ args, _, _, defaults = inspect.getargspec(max_positional_args)
+ return positional(len(args) - len(defaults))(max_positional_args)
+
+
+def parse_unique_urlencoded(content):
+ """Parses unique key-value parameters from urlencoded content.
+
+ Args:
+ content: string, URL-encoded key-value pairs.
+
+ Returns:
+ dict, The key-value pairs from ``content``.
+
+ Raises:
+ ValueError: if one of the keys is repeated.
+ """
+ urlencoded_params = urllib.parse.parse_qs(content)
+ params = {}
+ for key, value in six.iteritems(urlencoded_params):
+ if len(value) != 1:
+ msg = "URL-encoded content contains a repeated value:" "%s -> %s" % (
+ key,
+ ", ".join(value),
+ )
+ raise ValueError(msg)
+ params[key] = value[0]
+ return params
+
+
+def update_query_params(uri, params):
+ """Updates a URI with new query parameters.
+
+ If a given key from ``params`` is repeated in the ``uri``, then
+ the URI will be considered invalid and an error will occur.
+
+ If the URI is valid, then each value from ``params`` will
+ replace the corresponding value in the query parameters (if
+ it exists).
+
+ Args:
+ uri: string, A valid URI, with potential existing query parameters.
+ params: dict, A dictionary of query parameters.
+
+ Returns:
+ The same URI but with the new query parameters added.
+ """
+ parts = urllib.parse.urlparse(uri)
+ query_params = parse_unique_urlencoded(parts.query)
+ query_params.update(params)
+ new_query = urllib.parse.urlencode(query_params)
+ new_parts = parts._replace(query=new_query)
+ return urllib.parse.urlunparse(new_parts)
+
+
+def _add_query_parameter(url, name, value):
+ """Adds a query parameter to a url.
+
+ Replaces the current value if it already exists in the URL.
+
+ Args:
+ url: string, url to add the query parameter to.
+ name: string, query parameter name.
+ value: string, query parameter value.
+
+ Returns:
+ Updated query parameter. Does not update the url if value is None.
+ """
+ if value is None:
+ return url
+ else:
+ return update_query_params(url, {name: value})
diff --git a/venv/Lib/site-packages/googleapiclient/channel.py b/venv/Lib/site-packages/googleapiclient/channel.py
new file mode 100644
index 000000000..efff0f657
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/channel.py
@@ -0,0 +1,317 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Channel notifications support.
+
+Classes and functions to support channel subscriptions and notifications
+on those channels.
+
+Notes:
+ - This code is based on experimental APIs and is subject to change.
+ - Notification does not do deduplication of notification ids, that's up to
+ the receiver.
+ - Storing the Channel between calls is up to the caller.
+
+
+Example setting up a channel:
+
+ # Create a new channel that gets notifications via webhook.
+ channel = new_webhook_channel("https://example.com/my_web_hook")
+
+ # Store the channel, keyed by 'channel.id'. Store it before calling the
+ # watch method because notifications may start arriving before the watch
+ # method returns.
+ ...
+
+ resp = service.objects().watchAll(
+ bucket="some_bucket_id", body=channel.body()).execute()
+ channel.update(resp)
+
+ # Store the channel, keyed by 'channel.id'. Store it after being updated
+ # since the resource_id value will now be correct, and that's needed to
+ # stop a subscription.
+ ...
+
+
+An example Webhook implementation using webapp2. Note that webapp2 puts
+headers in a case insensitive dictionary, as headers aren't guaranteed to
+always be upper case.
+
+ id = self.request.headers[X_GOOG_CHANNEL_ID]
+
+ # Retrieve the channel by id.
+ channel = ...
+
+ # Parse notification from the headers, including validating the id.
+ n = notification_from_headers(channel, self.request.headers)
+
+ # Do app specific stuff with the notification here.
+ if n.resource_state == 'sync':
+ # Code to handle sync state.
+ elif n.resource_state == 'exists':
+ # Code to handle the exists state.
+ elif n.resource_state == 'not_exists':
+ # Code to handle the not exists state.
+
+
+Example of unsubscribing.
+
+ service.channels().stop(channel.body()).execute()
+"""
+from __future__ import absolute_import
+
+import datetime
+import uuid
+
+from googleapiclient import errors
+from googleapiclient import _helpers as util
+import six
+
+
+# The unix time epoch starts at midnight 1970.
+EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+# Map the names of the parameters in the JSON channel description to
+# the parameter names we use in the Channel class.
+CHANNEL_PARAMS = {
+ "address": "address",
+ "id": "id",
+ "expiration": "expiration",
+ "params": "params",
+ "resourceId": "resource_id",
+ "resourceUri": "resource_uri",
+ "type": "type",
+ "token": "token",
+}
+
+X_GOOG_CHANNEL_ID = "X-GOOG-CHANNEL-ID"
+X_GOOG_MESSAGE_NUMBER = "X-GOOG-MESSAGE-NUMBER"
+X_GOOG_RESOURCE_STATE = "X-GOOG-RESOURCE-STATE"
+X_GOOG_RESOURCE_URI = "X-GOOG-RESOURCE-URI"
+X_GOOG_RESOURCE_ID = "X-GOOG-RESOURCE-ID"
+
+
+def _upper_header_keys(headers):
+ new_headers = {}
+ for k, v in six.iteritems(headers):
+ new_headers[k.upper()] = v
+ return new_headers
+
+
+class Notification(object):
+ """A Notification from a Channel.
+
+ Notifications are not usually constructed directly, but are returned
+ from functions like notification_from_headers().
+
+ Attributes:
+ message_number: int, The unique id number of this notification.
+ state: str, The state of the resource being monitored.
+ uri: str, The address of the resource being monitored.
+ resource_id: str, The unique identifier of the version of the resource at
+ this event.
+ """
+
+ @util.positional(5)
+ def __init__(self, message_number, state, resource_uri, resource_id):
+ """Notification constructor.
+
+ Args:
+ message_number: int, The unique id number of this notification.
+ state: str, The state of the resource being monitored. Can be one
+ of "exists", "not_exists", or "sync".
+ resource_uri: str, The address of the resource being monitored.
+ resource_id: str, The identifier of the watched resource.
+ """
+ self.message_number = message_number
+ self.state = state
+ self.resource_uri = resource_uri
+ self.resource_id = resource_id
+
+
+class Channel(object):
+ """A Channel for notifications.
+
+ Usually not constructed directly, instead it is returned from helper
+ functions like new_webhook_channel().
+
+ Attributes:
+ type: str, The type of delivery mechanism used by this channel. For
+ example, 'web_hook'.
+ id: str, A UUID for the channel.
+ token: str, An arbitrary string associated with the channel that
+ is delivered to the target address with each event delivered
+ over this channel.
+ address: str, The address of the receiving entity where events are
+ delivered. Specific to the channel type.
+ expiration: int, The time, in milliseconds from the epoch, when this
+ channel will expire.
+ params: dict, A dictionary of string to string, with additional parameters
+ controlling delivery channel behavior.
+ resource_id: str, An opaque id that identifies the resource that is
+ being watched. Stable across different API versions.
+ resource_uri: str, The canonicalized ID of the watched resource.
+ """
+
+ @util.positional(5)
+ def __init__(
+ self,
+ type,
+ id,
+ token,
+ address,
+ expiration=None,
+ params=None,
+ resource_id="",
+ resource_uri="",
+ ):
+ """Create a new Channel.
+
+ In user code, this Channel constructor will not typically be called
+ manually since there are functions for creating channels for each specific
+ type with a more customized set of arguments to pass.
+
+ Args:
+ type: str, The type of delivery mechanism used by this channel. For
+ example, 'web_hook'.
+ id: str, A UUID for the channel.
+ token: str, An arbitrary string associated with the channel that
+ is delivered to the target address with each event delivered
+ over this channel.
+ address: str, The address of the receiving entity where events are
+ delivered. Specific to the channel type.
+ expiration: int, The time, in milliseconds from the epoch, when this
+ channel will expire.
+ params: dict, A dictionary of string to string, with additional parameters
+ controlling delivery channel behavior.
+ resource_id: str, An opaque id that identifies the resource that is
+ being watched. Stable across different API versions.
+ resource_uri: str, The canonicalized ID of the watched resource.
+ """
+ self.type = type
+ self.id = id
+ self.token = token
+ self.address = address
+ self.expiration = expiration
+ self.params = params
+ self.resource_id = resource_id
+ self.resource_uri = resource_uri
+
+ def body(self):
+ """Build a body from the Channel.
+
+ Constructs a dictionary that's appropriate for passing into watch()
+ methods as the value of body argument.
+
+ Returns:
+ A dictionary representation of the channel.
+ """
+ result = {
+ "id": self.id,
+ "token": self.token,
+ "type": self.type,
+ "address": self.address,
+ }
+ if self.params:
+ result["params"] = self.params
+ if self.resource_id:
+ result["resourceId"] = self.resource_id
+ if self.resource_uri:
+ result["resourceUri"] = self.resource_uri
+ if self.expiration:
+ result["expiration"] = self.expiration
+
+ return result
+
+ def update(self, resp):
+ """Update a channel with information from the response of watch().
+
+ When a request is sent to watch() a resource, the response returned
+ from the watch() request is a dictionary with updated channel information,
+ such as the resource_id, which is needed when stopping a subscription.
+
+ Args:
+ resp: dict, The response from a watch() method.
+ """
+ for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
+ value = resp.get(json_name)
+ if value is not None:
+ setattr(self, param_name, value)
+
+
+def notification_from_headers(channel, headers):
+ """Parse a notification from the webhook request headers, validate
+ the notification, and return a Notification object.
+
+ Args:
+ channel: Channel, The channel that the notification is associated with.
+ headers: dict, A dictionary like object that contains the request headers
+ from the webhook HTTP request.
+
+ Returns:
+ A Notification object.
+
+ Raises:
+ errors.InvalidNotificationError if the notification is invalid.
+ ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
+ """
+ headers = _upper_header_keys(headers)
+ channel_id = headers[X_GOOG_CHANNEL_ID]
+ if channel.id != channel_id:
+ raise errors.InvalidNotificationError(
+ "Channel id mismatch: %s != %s" % (channel.id, channel_id)
+ )
+ else:
+ message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
+ state = headers[X_GOOG_RESOURCE_STATE]
+ resource_uri = headers[X_GOOG_RESOURCE_URI]
+ resource_id = headers[X_GOOG_RESOURCE_ID]
+ return Notification(message_number, state, resource_uri, resource_id)
+
+
+@util.positional(2)
+def new_webhook_channel(url, token=None, expiration=None, params=None):
+ """Create a new webhook Channel.
+
+ Args:
+ url: str, URL to post notifications to.
+ token: str, An arbitrary string associated with the channel that
+ is delivered to the target address with each notification delivered
+ over this channel.
+ expiration: datetime.datetime, A time in the future when the channel
+ should expire. Can also be None if the subscription should use the
+ default expiration. Note that different services may have different
+ limits on how long a subscription lasts. Check the response from the
+ watch() method to see the value the service has set for an expiration
+ time.
+ params: dict, Extra parameters to pass on channel creation. Currently
+ not used for webhook channels.
+ """
+ expiration_ms = 0
+ if expiration:
+ delta = expiration - EPOCH
+ expiration_ms = (
+ delta.microseconds / 1000 + (delta.seconds + delta.days * 24 * 3600) * 1000
+ )
+ if expiration_ms < 0:
+ expiration_ms = 0
+
+ return Channel(
+ "web_hook",
+ str(uuid.uuid4()),
+ token,
+ url,
+ expiration=expiration_ms,
+ params=params,
+ )
diff --git a/venv/Lib/site-packages/googleapiclient/discovery.py b/venv/Lib/site-packages/googleapiclient/discovery.py
new file mode 100644
index 000000000..6363809f1
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/discovery.py
@@ -0,0 +1,1488 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for discovery based APIs.
+
+A client library for Google's discovery based APIs.
+"""
+from __future__ import absolute_import
+import six
+from six.moves import zip
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+__all__ = ["build", "build_from_document", "fix_method_name", "key2param"]
+
+from six import BytesIO
+from six.moves import http_client
+from six.moves.urllib.parse import urlencode, urlparse, urljoin, urlunparse, parse_qsl
+
+# Standard library imports
+import copy
+from collections import OrderedDict
+
+try:
+ from email.generator import BytesGenerator
+except ImportError:
+ from email.generator import Generator as BytesGenerator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+import json
+import keyword
+import logging
+import mimetypes
+import os
+import re
+
+# Third-party imports
+import httplib2
+import uritemplate
+import google.api_core.client_options
+from google.auth.transport import mtls
+from google.auth.exceptions import MutualTLSChannelError
+
+try:
+ import google_auth_httplib2
+except ImportError: # pragma: NO COVER
+ google_auth_httplib2 = None
+
+# Local imports
+from googleapiclient import _auth
+from googleapiclient import mimeparse
+from googleapiclient.errors import HttpError
+from googleapiclient.errors import InvalidJsonError
+from googleapiclient.errors import MediaUploadSizeError
+from googleapiclient.errors import UnacceptableMimeTypeError
+from googleapiclient.errors import UnknownApiNameOrVersion
+from googleapiclient.errors import UnknownFileType
+from googleapiclient.http import build_http
+from googleapiclient.http import BatchHttpRequest
+from googleapiclient.http import HttpMock
+from googleapiclient.http import HttpMockSequence
+from googleapiclient.http import HttpRequest
+from googleapiclient.http import MediaFileUpload
+from googleapiclient.http import MediaUpload
+from googleapiclient.model import JsonModel
+from googleapiclient.model import MediaModel
+from googleapiclient.model import RawModel
+from googleapiclient.schema import Schemas
+
+from googleapiclient._helpers import _add_query_parameter
+from googleapiclient._helpers import positional
+
+
+# The client library requires a version of httplib2 that supports RETRIES.
+httplib2.RETRIES = 1
+
+logger = logging.getLogger(__name__)
+
+URITEMPLATE = re.compile("{[^}]*}")
+VARNAME = re.compile("[a-zA-Z0-9_-]+")
+DISCOVERY_URI = (
+ "https://www.googleapis.com/discovery/v1/apis/" "{api}/{apiVersion}/rest"
+)
+V1_DISCOVERY_URI = DISCOVERY_URI
+V2_DISCOVERY_URI = (
+ "https://{api}.googleapis.com/$discovery/rest?" "version={apiVersion}"
+)
+DEFAULT_METHOD_DOC = "A description of how to use this function"
+HTTP_PAYLOAD_METHODS = frozenset(["PUT", "POST", "PATCH"])
+
+_MEDIA_SIZE_BIT_SHIFTS = {"KB": 10, "MB": 20, "GB": 30, "TB": 40}
+BODY_PARAMETER_DEFAULT_VALUE = {"description": "The request body.", "type": "object"}
+MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
+ "description": (
+ "The filename of the media request body, or an instance "
+ "of a MediaUpload object."
+ ),
+ "type": "string",
+ "required": False,
+}
+MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE = {
+ "description": (
+ "The MIME type of the media request body, or an instance "
+ "of a MediaUpload object."
+ ),
+ "type": "string",
+ "required": False,
+}
+_PAGE_TOKEN_NAMES = ("pageToken", "nextPageToken")
+
+# Parameters controlling mTLS behavior. See https://google.aip.dev/auth/4114.
+GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
+GOOGLE_API_USE_MTLS_ENDPOINT = "GOOGLE_API_USE_MTLS_ENDPOINT"
+
+# Parameters accepted by the stack, but not visible via discovery.
+# TODO(dhermes): Remove 'userip' in 'v2'.
+STACK_QUERY_PARAMETERS = frozenset(["trace", "pp", "userip", "strict"])
+STACK_QUERY_PARAMETER_DEFAULT_VALUE = {"type": "string", "location": "query"}
+
+# Library-specific reserved words beyond Python keywords.
+RESERVED_WORDS = frozenset(["body"])
+
+# patch _write_lines to avoid munging '\r' into '\n'
+# ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
+class _BytesGenerator(BytesGenerator):
+ _write_lines = BytesGenerator.write
+
+
+def fix_method_name(name):
+ """Fix method names to avoid '$' characters and reserved word conflicts.
+
+ Args:
+ name: string, method name.
+
+ Returns:
+ The name with '_' appended if the name is a reserved word and '$' and '-'
+ replaced with '_'.
+ """
+ name = name.replace("$", "_").replace("-", "_")
+ if keyword.iskeyword(name) or name in RESERVED_WORDS:
+ return name + "_"
+ else:
+ return name
+
+
+def key2param(key):
+ """Converts key names into parameter names.
+
+ For example, converting "max-results" -> "max_results"
+
+ Args:
+ key: string, the method key name.
+
+ Returns:
+ A safe method name based on the key name.
+ """
+ result = []
+ key = list(key)
+ if not key[0].isalpha():
+ result.append("x")
+ for c in key:
+ if c.isalnum():
+ result.append(c)
+ else:
+ result.append("_")
+
+ return "".join(result)
+
+
+@positional(2)
+def build(
+ serviceName,
+ version,
+ http=None,
+ discoveryServiceUrl=DISCOVERY_URI,
+ developerKey=None,
+ model=None,
+ requestBuilder=HttpRequest,
+ credentials=None,
+ cache_discovery=True,
+ cache=None,
+ client_options=None,
+ adc_cert_path=None,
+ adc_key_path=None,
+ num_retries=1,
+):
+ """Construct a Resource for interacting with an API.
+
+ Construct a Resource object for interacting with an API. The serviceName and
+ version are the names from the Discovery service.
+
+ Args:
+ serviceName: string, name of the service.
+ version: string, the version of the service.
+ http: httplib2.Http, An instance of httplib2.Http or something that acts
+ like it that HTTP requests will be made through.
+ discoveryServiceUrl: string, a URI Template that points to the location of
+ the discovery service. It should have two parameters {api} and
+ {apiVersion} that when filled in produce an absolute URI to the discovery
+ document for that service.
+ developerKey: string, key obtained from
+ https://code.google.com/apis/console.
+ model: googleapiclient.Model, converts to and from the wire format.
+ requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP
+ request.
+ credentials: oauth2client.Credentials or
+ google.auth.credentials.Credentials, credentials to be used for
+ authentication.
+ cache_discovery: Boolean, whether or not to cache the discovery doc.
+ cache: googleapiclient.discovery_cache.base.CacheBase, an optional
+ cache object for the discovery documents.
+ client_options: Mapping object or google.api_core.client_options, client
+ options to set user options on the client.
+ (1) The API endpoint should be set through client_options. If API endpoint
+ is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used
+ to control which endpoint to use.
+ (2) client_cert_source is not supported, client cert should be provided using
+ client_encrypted_cert_source instead. In order to use the provided client
+ cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be
+ set to `true`.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_cert_path: str, client certificate file path to save the application
+ default client certificate for mTLS. This field is required if you want to
+ use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be set to `true` in order to use this field,
+ otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_key_path: str, client encrypted private key file path to save the
+ application default client encrypted private key for mTLS. This field is
+ required if you want to use the default client certificate.
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to
+ `true` in order to use this field, otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ num_retries: Integer, number of times to retry discovery with
+ randomized exponential backoff in case of intermittent/connection issues.
+
+ Returns:
+ A Resource object with methods for interacting with the service.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: if there are any problems
+ setting up mutual TLS channel.
+ """
+ params = {"api": serviceName, "apiVersion": version}
+
+ if http is None:
+ discovery_http = build_http()
+ else:
+ discovery_http = http
+
+ service = None
+
+ for discovery_url in _discovery_service_uri_options(discoveryServiceUrl, version):
+ requested_url = uritemplate.expand(discovery_url, params)
+
+ try:
+ content = _retrieve_discovery_doc(
+ requested_url,
+ discovery_http,
+ cache_discovery,
+ cache,
+ developerKey,
+ num_retries=num_retries,
+ )
+ service = build_from_document(
+ content,
+ base=discovery_url,
+ http=http,
+ developerKey=developerKey,
+ model=model,
+ requestBuilder=requestBuilder,
+ credentials=credentials,
+ client_options=client_options,
+ adc_cert_path=adc_cert_path,
+ adc_key_path=adc_key_path,
+ )
+ break # exit if a service was created
+ except HttpError as e:
+ if e.resp.status == http_client.NOT_FOUND:
+ continue
+ else:
+ raise e
+
+ # If discovery_http was created by this function, we are done with it
+ # and can safely close it
+ if http is None:
+ discovery_http.close()
+
+ if service is None:
+ raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, version))
+ else:
+ return service
+
+
+def _discovery_service_uri_options(discoveryServiceUrl, version):
+ """
+ Returns Discovery URIs to be used for attemnting to build the API Resource.
+
+ Args:
+ discoveryServiceUrl:
+ string, the Original Discovery Service URL preferred by the customer.
+ version:
+ string, API Version requested
+
+ Returns:
+ A list of URIs to be tried for the Service Discovery, in order.
+ """
+
+ urls = [discoveryServiceUrl, V2_DISCOVERY_URI]
+ # V1 Discovery won't work if the requested version is None
+ if discoveryServiceUrl == V1_DISCOVERY_URI and version is None:
+ logger.warning(
+ "Discovery V1 does not support empty versions. Defaulting to V2..."
+ )
+ urls.pop(0)
+ return list(OrderedDict.fromkeys(urls))
+
+
+def _retrieve_discovery_doc(
+ url, http, cache_discovery, cache=None, developerKey=None, num_retries=1
+):
+ """Retrieves the discovery_doc from cache or the internet.
+
+ Args:
+ url: string, the URL of the discovery document.
+ http: httplib2.Http, An instance of httplib2.Http or something that acts
+ like it through which HTTP requests will be made.
+ cache_discovery: Boolean, whether or not to cache the discovery doc.
+ cache: googleapiclient.discovery_cache.base.Cache, an optional cache
+ object for the discovery documents.
+ developerKey: string, Key for controlling API usage, generated
+ from the API Console.
+ num_retries: Integer, number of times to retry discovery with
+ randomized exponential backoff in case of intermittent/connection issues.
+
+ Returns:
+ A unicode string representation of the discovery document.
+ """
+ if cache_discovery:
+ from . import discovery_cache
+
+ if cache is None:
+ cache = discovery_cache.autodetect()
+ if cache:
+ content = cache.get(url)
+ if content:
+ return content
+
+ actual_url = url
+ # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
+ # variable that contains the network address of the client sending the
+ # request. If it exists then add that to the request for the discovery
+ # document to avoid exceeding the quota on discovery requests.
+ if "REMOTE_ADDR" in os.environ:
+ actual_url = _add_query_parameter(url, "userIp", os.environ["REMOTE_ADDR"])
+ if developerKey:
+ actual_url = _add_query_parameter(url, "key", developerKey)
+ logger.debug("URL being requested: GET %s", actual_url)
+
+ # Execute this request with retries build into HttpRequest
+ # Note that it will already raise an error if we don't get a 2xx response
+ req = HttpRequest(http, HttpRequest.null_postproc, actual_url)
+ resp, content = req.execute(num_retries=num_retries)
+
+ try:
+ content = content.decode("utf-8")
+ except AttributeError:
+ pass
+
+ try:
+ service = json.loads(content)
+ except ValueError as e:
+ logger.error("Failed to parse as JSON: " + content)
+ raise InvalidJsonError()
+ if cache_discovery and cache:
+ cache.set(url, content)
+ return content
+
+
+@positional(1)
+def build_from_document(
+ service,
+ base=None,
+ future=None,
+ http=None,
+ developerKey=None,
+ model=None,
+ requestBuilder=HttpRequest,
+ credentials=None,
+ client_options=None,
+ adc_cert_path=None,
+ adc_key_path=None,
+):
+ """Create a Resource for interacting with an API.
+
+ Same as `build()`, but constructs the Resource object from a discovery
+ document that is it given, as opposed to retrieving one over HTTP.
+
+ Args:
+ service: string or object, the JSON discovery document describing the API.
+ The value passed in may either be the JSON string or the deserialized
+ JSON.
+ base: string, base URI for all HTTP requests, usually the discovery URI.
+ This parameter is no longer used as rootUrl and servicePath are included
+ within the discovery document. (deprecated)
+ future: string, discovery document with future capabilities (deprecated).
+ http: httplib2.Http, An instance of httplib2.Http or something that acts
+ like it that HTTP requests will be made through.
+ developerKey: string, Key for controlling API usage, generated
+ from the API Console.
+ model: Model class instance that serializes and de-serializes requests and
+ responses.
+ requestBuilder: Takes an http request and packages it up to be executed.
+ credentials: oauth2client.Credentials or
+ google.auth.credentials.Credentials, credentials to be used for
+ authentication.
+ client_options: Mapping object or google.api_core.client_options, client
+ options to set user options on the client.
+ (1) The API endpoint should be set through client_options. If API endpoint
+ is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used
+ to control which endpoint to use.
+ (2) client_cert_source is not supported, client cert should be provided using
+ client_encrypted_cert_source instead. In order to use the provided client
+ cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be
+ set to `true`.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_cert_path: str, client certificate file path to save the application
+ default client certificate for mTLS. This field is required if you want to
+ use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be set to `true` in order to use this field,
+ otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_key_path: str, client encrypted private key file path to save the
+ application default client encrypted private key for mTLS. This field is
+ required if you want to use the default client certificate.
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to
+ `true` in order to use this field, otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+
+ Returns:
+ A Resource object with methods for interacting with the service.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: if there are any problems
+ setting up mutual TLS channel.
+ """
+
+ if client_options is None:
+ client_options = google.api_core.client_options.ClientOptions()
+ if isinstance(client_options, six.moves.collections_abc.Mapping):
+ client_options = google.api_core.client_options.from_dict(client_options)
+
+ if http is not None:
+ # if http is passed, the user cannot provide credentials
+ banned_options = [
+ (credentials, "credentials"),
+ (client_options.credentials_file, "client_options.credentials_file"),
+ ]
+ for option, name in banned_options:
+ if option is not None:
+ raise ValueError("Arguments http and {} are mutually exclusive".format(name))
+
+ if isinstance(service, six.string_types):
+ service = json.loads(service)
+ elif isinstance(service, six.binary_type):
+ service = json.loads(service.decode("utf-8"))
+
+ if "rootUrl" not in service and isinstance(http, (HttpMock, HttpMockSequence)):
+ logger.error(
+ "You are using HttpMock or HttpMockSequence without"
+ + "having the service discovery doc in cache. Try calling "
+ + "build() without mocking once first to populate the "
+ + "cache."
+ )
+ raise InvalidJsonError()
+
+ # If an API Endpoint is provided on client options, use that as the base URL
+ base = urljoin(service["rootUrl"], service["servicePath"])
+ if client_options.api_endpoint:
+ base = client_options.api_endpoint
+
+ schema = Schemas(service)
+
+ # If the http client is not specified, then we must construct an http client
+ # to make requests. If the service has scopes, then we also need to setup
+ # authentication.
+ if http is None:
+ # Does the service require scopes?
+ scopes = list(
+ service.get("auth", {}).get("oauth2", {}).get("scopes", {}).keys()
+ )
+
+ # If so, then the we need to setup authentication if no developerKey is
+ # specified.
+ if scopes and not developerKey:
+ # Make sure the user didn't pass multiple credentials
+ if client_options.credentials_file and credentials:
+ raise google.api_core.exceptions.DuplicateCredentialArgs(
+ "client_options.credentials_file and credentials are mutually exclusive."
+ )
+ # Check for credentials file via client options
+ if client_options.credentials_file:
+ credentials = _auth.credentials_from_file(
+ client_options.credentials_file,
+ scopes=client_options.scopes,
+ quota_project_id=client_options.quota_project_id,
+ )
+ # If the user didn't pass in credentials, attempt to acquire application
+ # default credentials.
+ if credentials is None:
+ credentials = _auth.default_credentials(
+ scopes=client_options.scopes,
+ quota_project_id=client_options.quota_project_id,
+ )
+
+ # The credentials need to be scoped.
+ # If the user provided scopes via client_options don't override them
+ if not client_options.scopes:
+ credentials = _auth.with_scopes(credentials, scopes)
+
+ # If credentials are provided, create an authorized http instance;
+ # otherwise, skip authentication.
+ if credentials:
+ http = _auth.authorized_http(credentials)
+
+ # If the service doesn't require scopes then there is no need for
+ # authentication.
+ else:
+ http = build_http()
+
+ # Obtain client cert and create mTLS http channel if cert exists.
+ client_cert_to_use = None
+ use_client_cert = os.getenv(GOOGLE_API_USE_CLIENT_CERTIFICATE, "false")
+ if not use_client_cert in ("true", "false"):
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_CLIENT_CERTIFICATE value. Accepted values: true, false"
+ )
+ if client_options and client_options.client_cert_source:
+ raise MutualTLSChannelError(
+ "ClientOptions.client_cert_source is not supported, please use ClientOptions.client_encrypted_cert_source."
+ )
+ if use_client_cert == "true":
+ if (
+ client_options
+ and hasattr(client_options, "client_encrypted_cert_source")
+ and client_options.client_encrypted_cert_source
+ ):
+ client_cert_to_use = client_options.client_encrypted_cert_source
+ elif (
+ adc_cert_path and adc_key_path and mtls.has_default_client_cert_source()
+ ):
+ client_cert_to_use = mtls.default_client_encrypted_cert_source(
+ adc_cert_path, adc_key_path
+ )
+ if client_cert_to_use:
+ cert_path, key_path, passphrase = client_cert_to_use()
+
+ # The http object we built could be google_auth_httplib2.AuthorizedHttp
+ # or httplib2.Http. In the first case we need to extract the wrapped
+ # httplib2.Http object from google_auth_httplib2.AuthorizedHttp.
+ http_channel = (
+ http.http
+ if google_auth_httplib2
+ and isinstance(http, google_auth_httplib2.AuthorizedHttp)
+ else http
+ )
+ http_channel.add_certificate(key_path, cert_path, "", passphrase)
+
+ # If user doesn't provide api endpoint via client options, decide which
+ # api endpoint to use.
+ if "mtlsRootUrl" in service and (
+ not client_options or not client_options.api_endpoint
+ ):
+ mtls_endpoint = urljoin(service["mtlsRootUrl"], service["servicePath"])
+ use_mtls_endpoint = os.getenv(GOOGLE_API_USE_MTLS_ENDPOINT, "auto")
+
+ if not use_mtls_endpoint in ("never", "auto", "always"):
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Switch to mTLS endpoint, if environment variable is "always", or
+ # environment varibable is "auto" and client cert exists.
+ if use_mtls_endpoint == "always" or (
+ use_mtls_endpoint == "auto" and client_cert_to_use
+ ):
+ base = mtls_endpoint
+
+ if model is None:
+ features = service.get("features", [])
+ model = JsonModel("dataWrapper" in features)
+
+ return Resource(
+ http=http,
+ baseUrl=base,
+ model=model,
+ developerKey=developerKey,
+ requestBuilder=requestBuilder,
+ resourceDesc=service,
+ rootDesc=service,
+ schema=schema,
+ )
+
+
+def _cast(value, schema_type):
+ """Convert value to a string based on JSON Schema type.
+
+ See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
+ JSON Schema.
+
+ Args:
+ value: any, the value to convert
+ schema_type: string, the type that value should be interpreted as
+
+ Returns:
+ A string representation of 'value' based on the schema_type.
+ """
+ if schema_type == "string":
+ if type(value) == type("") or type(value) == type(u""):
+ return value
+ else:
+ return str(value)
+ elif schema_type == "integer":
+ return str(int(value))
+ elif schema_type == "number":
+ return str(float(value))
+ elif schema_type == "boolean":
+ return str(bool(value)).lower()
+ else:
+ if type(value) == type("") or type(value) == type(u""):
+ return value
+ else:
+ return str(value)
+
+
+def _media_size_to_long(maxSize):
+ """Convert a string media size, such as 10GB or 3TB into an integer.
+
+ Args:
+ maxSize: string, size as a string, such as 2MB or 7GB.
+
+ Returns:
+ The size as an integer value.
+ """
+ if len(maxSize) < 2:
+ return 0
+ units = maxSize[-2:].upper()
+ bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
+ if bit_shift is not None:
+ return int(maxSize[:-2]) << bit_shift
+ else:
+ return int(maxSize)
+
+
+def _media_path_url_from_info(root_desc, path_url):
+ """Creates an absolute media path URL.
+
+ Constructed using the API root URI and service path from the discovery
+ document and the relative path for the API method.
+
+ Args:
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ path_url: String; the relative URL for the API method. Relative to the API
+ root, which is specified in the discovery document.
+
+ Returns:
+ String; the absolute URI for media upload for the API method.
+ """
+ return "%(root)supload/%(service_path)s%(path)s" % {
+ "root": root_desc["rootUrl"],
+ "service_path": root_desc["servicePath"],
+ "path": path_url,
+ }
+
+
+def _fix_up_parameters(method_desc, root_desc, http_method, schema):
+ """Updates parameters of an API method with values specific to this library.
+
+ Specifically, adds whatever global parameters are specified by the API to the
+ parameters for the individual method. Also adds parameters which don't
+ appear in the discovery document, but are available to all discovery based
+ APIs (these are listed in STACK_QUERY_PARAMETERS).
+
+ SIDE EFFECTS: This updates the parameters dictionary object in the method
+ description.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value comes
+ from the dictionary of methods stored in the 'methods' key in the
+ deserialized discovery document.
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ http_method: String; the HTTP method used to call the API method described
+ in method_desc.
+ schema: Object, mapping of schema names to schema descriptions.
+
+ Returns:
+ The updated Dictionary stored in the 'parameters' key of the method
+ description dictionary.
+ """
+ parameters = method_desc.setdefault("parameters", {})
+
+ # Add in the parameters common to all methods.
+ for name, description in six.iteritems(root_desc.get("parameters", {})):
+ parameters[name] = description
+
+ # Add in undocumented query parameters.
+ for name in STACK_QUERY_PARAMETERS:
+ parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
+
+ # Add 'body' (our own reserved word) to parameters if the method supports
+ # a request payload.
+ if http_method in HTTP_PAYLOAD_METHODS and "request" in method_desc:
+ body = BODY_PARAMETER_DEFAULT_VALUE.copy()
+ body.update(method_desc["request"])
+ parameters["body"] = body
+
+ return parameters
+
+
+def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
+ """Adds 'media_body' and 'media_mime_type' parameters if supported by method.
+
+ SIDE EFFECTS: If there is a 'mediaUpload' in the method description, adds
+ 'media_upload' key to parameters.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value comes
+ from the dictionary of methods stored in the 'methods' key in the
+ deserialized discovery document.
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ path_url: String; the relative URL for the API method. Relative to the API
+ root, which is specified in the discovery document.
+ parameters: A dictionary describing method parameters for method described
+ in method_desc.
+
+ Returns:
+ Triple (accept, max_size, media_path_url) where:
+ - accept is a list of strings representing what content types are
+ accepted for media upload. Defaults to empty list if not in the
+ discovery document.
+ - max_size is a long representing the max size in bytes allowed for a
+ media upload. Defaults to 0L if not in the discovery document.
+ - media_path_url is a String; the absolute URI for media upload for the
+ API method. Constructed using the API root URI and service path from
+ the discovery document and the relative path for the API method. If
+ media upload is not supported, this is None.
+ """
+ media_upload = method_desc.get("mediaUpload", {})
+ accept = media_upload.get("accept", [])
+ max_size = _media_size_to_long(media_upload.get("maxSize", ""))
+ media_path_url = None
+
+ if media_upload:
+ media_path_url = _media_path_url_from_info(root_desc, path_url)
+ parameters["media_body"] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
+ parameters["media_mime_type"] = MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE.copy()
+
+ return accept, max_size, media_path_url
+
+
+def _fix_up_method_description(method_desc, root_desc, schema):
+ """Updates a method description in a discovery document.
+
+ SIDE EFFECTS: Changes the parameters dictionary in the method description with
+ extra parameters which are used locally.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value comes
+ from the dictionary of methods stored in the 'methods' key in the
+ deserialized discovery document.
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ schema: Object, mapping of schema names to schema descriptions.
+
+ Returns:
+ Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
+ where:
+ - path_url is a String; the relative URL for the API method. Relative to
+ the API root, which is specified in the discovery document.
+ - http_method is a String; the HTTP method used to call the API method
+ described in the method description.
+ - method_id is a String; the name of the RPC method associated with the
+ API method, and is in the method description in the 'id' key.
+ - accept is a list of strings representing what content types are
+ accepted for media upload. Defaults to empty list if not in the
+ discovery document.
+ - max_size is a long representing the max size in bytes allowed for a
+ media upload. Defaults to 0L if not in the discovery document.
+ - media_path_url is a String; the absolute URI for media upload for the
+ API method. Constructed using the API root URI and service path from
+ the discovery document and the relative path for the API method. If
+ media upload is not supported, this is None.
+ """
+ path_url = method_desc["path"]
+ http_method = method_desc["httpMethod"]
+ method_id = method_desc["id"]
+
+ parameters = _fix_up_parameters(method_desc, root_desc, http_method, schema)
+ # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
+ # 'parameters' key and needs to know if there is a 'body' parameter because it
+ # also sets a 'media_body' parameter.
+ accept, max_size, media_path_url = _fix_up_media_upload(
+ method_desc, root_desc, path_url, parameters
+ )
+
+ return path_url, http_method, method_id, accept, max_size, media_path_url
+
+
+def _urljoin(base, url):
+ """Custom urljoin replacement supporting : before / in url."""
+ # In general, it's unsafe to simply join base and url. However, for
+ # the case of discovery documents, we know:
+ # * base will never contain params, query, or fragment
+ # * url will never contain a scheme or net_loc.
+ # In general, this means we can safely join on /; we just need to
+ # ensure we end up with precisely one / joining base and url. The
+ # exception here is the case of media uploads, where url will be an
+ # absolute url.
+ if url.startswith("http://") or url.startswith("https://"):
+ return urljoin(base, url)
+ new_base = base if base.endswith("/") else base + "/"
+ new_url = url[1:] if url.startswith("/") else url
+ return new_base + new_url
+
+
+# TODO(dhermes): Convert this class to ResourceMethod and make it callable
+class ResourceMethodParameters(object):
+ """Represents the parameters associated with a method.
+
+ Attributes:
+ argmap: Map from method parameter name (string) to query parameter name
+ (string).
+ required_params: List of required parameters (represented by parameter
+ name as string).
+ repeated_params: List of repeated parameters (represented by parameter
+ name as string).
+ pattern_params: Map from method parameter name (string) to regular
+ expression (as a string). If the pattern is set for a parameter, the
+ value for that parameter must match the regular expression.
+ query_params: List of parameters (represented by parameter name as string)
+ that will be used in the query string.
+ path_params: Set of parameters (represented by parameter name as string)
+ that will be used in the base URL path.
+ param_types: Map from method parameter name (string) to parameter type. Type
+ can be any valid JSON schema type; valid values are 'any', 'array',
+ 'boolean', 'integer', 'number', 'object', or 'string'. Reference:
+ http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
+ enum_params: Map from method parameter name (string) to list of strings,
+ where each list of strings is the list of acceptable enum values.
+ """
+
+ def __init__(self, method_desc):
+ """Constructor for ResourceMethodParameters.
+
+ Sets default values and defers to set_parameters to populate.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value
+ comes from the dictionary of methods stored in the 'methods' key in
+ the deserialized discovery document.
+ """
+ self.argmap = {}
+ self.required_params = []
+ self.repeated_params = []
+ self.pattern_params = {}
+ self.query_params = []
+ # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
+ # parsing is gotten rid of.
+ self.path_params = set()
+ self.param_types = {}
+ self.enum_params = {}
+
+ self.set_parameters(method_desc)
+
+ def set_parameters(self, method_desc):
+ """Populates maps and lists based on method description.
+
+ Iterates through each parameter for the method and parses the values from
+ the parameter dictionary.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value
+ comes from the dictionary of methods stored in the 'methods' key in
+ the deserialized discovery document.
+ """
+ for arg, desc in six.iteritems(method_desc.get("parameters", {})):
+ param = key2param(arg)
+ self.argmap[param] = arg
+
+ if desc.get("pattern"):
+ self.pattern_params[param] = desc["pattern"]
+ if desc.get("enum"):
+ self.enum_params[param] = desc["enum"]
+ if desc.get("required"):
+ self.required_params.append(param)
+ if desc.get("repeated"):
+ self.repeated_params.append(param)
+ if desc.get("location") == "query":
+ self.query_params.append(param)
+ if desc.get("location") == "path":
+ self.path_params.add(param)
+ self.param_types[param] = desc.get("type", "string")
+
+ # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
+ # should have all path parameters already marked with
+ # 'location: path'.
+ for match in URITEMPLATE.finditer(method_desc["path"]):
+ for namematch in VARNAME.finditer(match.group(0)):
+ name = key2param(namematch.group(0))
+ self.path_params.add(name)
+ if name in self.query_params:
+ self.query_params.remove(name)
+
+
+def createMethod(methodName, methodDesc, rootDesc, schema):
+ """Creates a method for attaching to a Resource.
+
+ Args:
+ methodName: string, name of the method to use.
+ methodDesc: object, fragment of deserialized discovery document that
+ describes the method.
+ rootDesc: object, the entire deserialized discovery document.
+ schema: object, mapping of schema names to schema descriptions.
+ """
+ methodName = fix_method_name(methodName)
+ (
+ pathUrl,
+ httpMethod,
+ methodId,
+ accept,
+ maxSize,
+ mediaPathUrl,
+ ) = _fix_up_method_description(methodDesc, rootDesc, schema)
+
+ parameters = ResourceMethodParameters(methodDesc)
+
+ def method(self, **kwargs):
+ # Don't bother with doc string, it will be over-written by createMethod.
+
+ for name in six.iterkeys(kwargs):
+ if name not in parameters.argmap:
+ raise TypeError('Got an unexpected keyword argument "%s"' % name)
+
+ # Remove args that have a value of None.
+ keys = list(kwargs.keys())
+ for name in keys:
+ if kwargs[name] is None:
+ del kwargs[name]
+
+ for name in parameters.required_params:
+ if name not in kwargs:
+ # temporary workaround for non-paging methods incorrectly requiring
+ # page token parameter (cf. drive.changes.watch vs. drive.changes.list)
+ if name not in _PAGE_TOKEN_NAMES or _findPageTokenName(
+ _methodProperties(methodDesc, schema, "response")
+ ):
+ raise TypeError('Missing required parameter "%s"' % name)
+
+ for name, regex in six.iteritems(parameters.pattern_params):
+ if name in kwargs:
+ if isinstance(kwargs[name], six.string_types):
+ pvalues = [kwargs[name]]
+ else:
+ pvalues = kwargs[name]
+ for pvalue in pvalues:
+ if re.match(regex, pvalue) is None:
+ raise TypeError(
+ 'Parameter "%s" value "%s" does not match the pattern "%s"'
+ % (name, pvalue, regex)
+ )
+
+ for name, enums in six.iteritems(parameters.enum_params):
+ if name in kwargs:
+ # We need to handle the case of a repeated enum
+ # name differently, since we want to handle both
+ # arg='value' and arg=['value1', 'value2']
+ if name in parameters.repeated_params and not isinstance(
+ kwargs[name], six.string_types
+ ):
+ values = kwargs[name]
+ else:
+ values = [kwargs[name]]
+ for value in values:
+ if value not in enums:
+ raise TypeError(
+ 'Parameter "%s" value "%s" is not an allowed value in "%s"'
+ % (name, value, str(enums))
+ )
+
+ actual_query_params = {}
+ actual_path_params = {}
+ for key, value in six.iteritems(kwargs):
+ to_type = parameters.param_types.get(key, "string")
+ # For repeated parameters we cast each member of the list.
+ if key in parameters.repeated_params and type(value) == type([]):
+ cast_value = [_cast(x, to_type) for x in value]
+ else:
+ cast_value = _cast(value, to_type)
+ if key in parameters.query_params:
+ actual_query_params[parameters.argmap[key]] = cast_value
+ if key in parameters.path_params:
+ actual_path_params[parameters.argmap[key]] = cast_value
+ body_value = kwargs.get("body", None)
+ media_filename = kwargs.get("media_body", None)
+ media_mime_type = kwargs.get("media_mime_type", None)
+
+ if self._developerKey:
+ actual_query_params["key"] = self._developerKey
+
+ model = self._model
+ if methodName.endswith("_media"):
+ model = MediaModel()
+ elif "response" not in methodDesc:
+ model = RawModel()
+
+ headers = {}
+ headers, params, query, body = model.request(
+ headers, actual_path_params, actual_query_params, body_value
+ )
+
+ expanded_url = uritemplate.expand(pathUrl, params)
+ url = _urljoin(self._baseUrl, expanded_url + query)
+
+ resumable = None
+ multipart_boundary = ""
+
+ if media_filename:
+ # Ensure we end up with a valid MediaUpload object.
+ if isinstance(media_filename, six.string_types):
+ if media_mime_type is None:
+ logger.warning(
+ "media_mime_type argument not specified: trying to auto-detect for %s",
+ media_filename,
+ )
+ media_mime_type, _ = mimetypes.guess_type(media_filename)
+ if media_mime_type is None:
+ raise UnknownFileType(media_filename)
+ if not mimeparse.best_match([media_mime_type], ",".join(accept)):
+ raise UnacceptableMimeTypeError(media_mime_type)
+ media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type)
+ elif isinstance(media_filename, MediaUpload):
+ media_upload = media_filename
+ else:
+ raise TypeError("media_filename must be str or MediaUpload.")
+
+ # Check the maxSize
+ if media_upload.size() is not None and media_upload.size() > maxSize > 0:
+ raise MediaUploadSizeError("Media larger than: %s" % maxSize)
+
+ # Use the media path uri for media uploads
+ expanded_url = uritemplate.expand(mediaPathUrl, params)
+ url = _urljoin(self._baseUrl, expanded_url + query)
+ if media_upload.resumable():
+ url = _add_query_parameter(url, "uploadType", "resumable")
+
+ if media_upload.resumable():
+ # This is all we need to do for resumable, if the body exists it gets
+ # sent in the first request, otherwise an empty body is sent.
+ resumable = media_upload
+ else:
+ # A non-resumable upload
+ if body is None:
+ # This is a simple media upload
+ headers["content-type"] = media_upload.mimetype()
+ body = media_upload.getbytes(0, media_upload.size())
+ url = _add_query_parameter(url, "uploadType", "media")
+ else:
+ # This is a multipart/related upload.
+ msgRoot = MIMEMultipart("related")
+ # msgRoot should not write out it's own headers
+ setattr(msgRoot, "_write_headers", lambda self: None)
+
+ # attach the body as one part
+ msg = MIMENonMultipart(*headers["content-type"].split("/"))
+ msg.set_payload(body)
+ msgRoot.attach(msg)
+
+ # attach the media as the second part
+ msg = MIMENonMultipart(*media_upload.mimetype().split("/"))
+ msg["Content-Transfer-Encoding"] = "binary"
+
+ payload = media_upload.getbytes(0, media_upload.size())
+ msg.set_payload(payload)
+ msgRoot.attach(msg)
+ # encode the body: note that we can't use `as_string`, because
+ # it plays games with `From ` lines.
+ fp = BytesIO()
+ g = _BytesGenerator(fp, mangle_from_=False)
+ g.flatten(msgRoot, unixfrom=False)
+ body = fp.getvalue()
+
+ multipart_boundary = msgRoot.get_boundary()
+ headers["content-type"] = (
+ "multipart/related; " 'boundary="%s"'
+ ) % multipart_boundary
+ url = _add_query_parameter(url, "uploadType", "multipart")
+
+ logger.debug("URL being requested: %s %s" % (httpMethod, url))
+ return self._requestBuilder(
+ self._http,
+ model.response,
+ url,
+ method=httpMethod,
+ body=body,
+ headers=headers,
+ methodId=methodId,
+ resumable=resumable,
+ )
+
+ docs = [methodDesc.get("description", DEFAULT_METHOD_DOC), "\n\n"]
+ if len(parameters.argmap) > 0:
+ docs.append("Args:\n")
+
+ # Skip undocumented params and params common to all methods.
+ skip_parameters = list(rootDesc.get("parameters", {}).keys())
+ skip_parameters.extend(STACK_QUERY_PARAMETERS)
+
+ all_args = list(parameters.argmap.keys())
+ args_ordered = [key2param(s) for s in methodDesc.get("parameterOrder", [])]
+
+ # Move body to the front of the line.
+ if "body" in all_args:
+ args_ordered.append("body")
+
+ for name in all_args:
+ if name not in args_ordered:
+ args_ordered.append(name)
+
+ for arg in args_ordered:
+ if arg in skip_parameters:
+ continue
+
+ repeated = ""
+ if arg in parameters.repeated_params:
+ repeated = " (repeated)"
+ required = ""
+ if arg in parameters.required_params:
+ required = " (required)"
+ paramdesc = methodDesc["parameters"][parameters.argmap[arg]]
+ paramdoc = paramdesc.get("description", "A parameter")
+ if "$ref" in paramdesc:
+ docs.append(
+ (" %s: object, %s%s%s\n The object takes the" " form of:\n\n%s\n\n")
+ % (
+ arg,
+ paramdoc,
+ required,
+ repeated,
+ schema.prettyPrintByName(paramdesc["$ref"]),
+ )
+ )
+ else:
+ paramtype = paramdesc.get("type", "string")
+ docs.append(
+ " %s: %s, %s%s%s\n" % (arg, paramtype, paramdoc, required, repeated)
+ )
+ enum = paramdesc.get("enum", [])
+ enumDesc = paramdesc.get("enumDescriptions", [])
+ if enum and enumDesc:
+ docs.append(" Allowed values\n")
+ for (name, desc) in zip(enum, enumDesc):
+ docs.append(" %s - %s\n" % (name, desc))
+ if "response" in methodDesc:
+ if methodName.endswith("_media"):
+ docs.append("\nReturns:\n The media object as a string.\n\n ")
+ else:
+ docs.append("\nReturns:\n An object of the form:\n\n ")
+ docs.append(schema.prettyPrintSchema(methodDesc["response"]))
+
+ setattr(method, "__doc__", "".join(docs))
+ return (methodName, method)
+
+
+def createNextMethod(
+ methodName,
+ pageTokenName="pageToken",
+ nextPageTokenName="nextPageToken",
+ isPageTokenParameter=True,
+):
+ """Creates any _next methods for attaching to a Resource.
+
+ The _next methods allow for easy iteration through list() responses.
+
+ Args:
+ methodName: string, name of the method to use.
+ pageTokenName: string, name of request page token field.
+ nextPageTokenName: string, name of response page token field.
+ isPageTokenParameter: Boolean, True if request page token is a query
+ parameter, False if request page token is a field of the request body.
+ """
+ methodName = fix_method_name(methodName)
+
+ def methodNext(self, previous_request, previous_response):
+ """Retrieves the next page of results.
+
+Args:
+ previous_request: The request for the previous page. (required)
+ previous_response: The response from the request for the previous page. (required)
+
+Returns:
+ A request object that you can call 'execute()' on to request the next
+ page. Returns None if there are no more items in the collection.
+ """
+ # Retrieve nextPageToken from previous_response
+ # Use as pageToken in previous_request to create new request.
+
+ nextPageToken = previous_response.get(nextPageTokenName, None)
+ if not nextPageToken:
+ return None
+
+ request = copy.copy(previous_request)
+
+ if isPageTokenParameter:
+ # Replace pageToken value in URI
+ request.uri = _add_query_parameter(
+ request.uri, pageTokenName, nextPageToken
+ )
+ logger.debug("Next page request URL: %s %s" % (methodName, request.uri))
+ else:
+ # Replace pageToken value in request body
+ model = self._model
+ body = model.deserialize(request.body)
+ body[pageTokenName] = nextPageToken
+ request.body = model.serialize(body)
+ logger.debug("Next page request body: %s %s" % (methodName, body))
+
+ return request
+
+ return (methodName, methodNext)
+
+
+class Resource(object):
+ """A class for interacting with a resource."""
+
+ def __init__(
+ self,
+ http,
+ baseUrl,
+ model,
+ requestBuilder,
+ developerKey,
+ resourceDesc,
+ rootDesc,
+ schema,
+ ):
+ """Build a Resource from the API description.
+
+ Args:
+ http: httplib2.Http, Object to make http requests with.
+ baseUrl: string, base URL for the API. All requests are relative to this
+ URI.
+ model: googleapiclient.Model, converts to and from the wire format.
+ requestBuilder: class or callable that instantiates an
+ googleapiclient.HttpRequest object.
+ developerKey: string, key obtained from
+ https://code.google.com/apis/console
+ resourceDesc: object, section of deserialized discovery document that
+ describes a resource. Note that the top level discovery document
+ is considered a resource.
+ rootDesc: object, the entire deserialized discovery document.
+ schema: object, mapping of schema names to schema descriptions.
+ """
+ self._dynamic_attrs = []
+
+ self._http = http
+ self._baseUrl = baseUrl
+ self._model = model
+ self._developerKey = developerKey
+ self._requestBuilder = requestBuilder
+ self._resourceDesc = resourceDesc
+ self._rootDesc = rootDesc
+ self._schema = schema
+
+ self._set_service_methods()
+
+ def _set_dynamic_attr(self, attr_name, value):
+ """Sets an instance attribute and tracks it in a list of dynamic attributes.
+
+ Args:
+ attr_name: string; The name of the attribute to be set
+ value: The value being set on the object and tracked in the dynamic cache.
+ """
+ self._dynamic_attrs.append(attr_name)
+ self.__dict__[attr_name] = value
+
+ def __getstate__(self):
+ """Trim the state down to something that can be pickled.
+
+ Uses the fact that the instance variable _dynamic_attrs holds attrs that
+ will be wiped and restored on pickle serialization.
+ """
+ state_dict = copy.copy(self.__dict__)
+ for dynamic_attr in self._dynamic_attrs:
+ del state_dict[dynamic_attr]
+ del state_dict["_dynamic_attrs"]
+ return state_dict
+
+ def __setstate__(self, state):
+ """Reconstitute the state of the object from being pickled.
+
+ Uses the fact that the instance variable _dynamic_attrs holds attrs that
+ will be wiped and restored on pickle serialization.
+ """
+ self.__dict__.update(state)
+ self._dynamic_attrs = []
+ self._set_service_methods()
+
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc, exc_tb):
+ self.close()
+
+ def close(self):
+ """Close httplib2 connections."""
+ # httplib2 leaves sockets open by default.
+ # Cleanup using the `close` method.
+ # https://github.com/httplib2/httplib2/issues/148
+ self._http.http.close()
+
+ def _set_service_methods(self):
+ self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
+ self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
+ self._add_next_methods(self._resourceDesc, self._schema)
+
+ def _add_basic_methods(self, resourceDesc, rootDesc, schema):
+ # If this is the root Resource, add a new_batch_http_request() method.
+ if resourceDesc == rootDesc:
+ batch_uri = "%s%s" % (
+ rootDesc["rootUrl"],
+ rootDesc.get("batchPath", "batch"),
+ )
+
+ def new_batch_http_request(callback=None):
+ """Create a BatchHttpRequest object based on the discovery document.
+
+ Args:
+ callback: callable, A callback to be called for each response, of the
+ form callback(id, response, exception). The first parameter is the
+ request id, and the second is the deserialized response object. The
+ third is an apiclient.errors.HttpError exception object if an HTTP
+ error occurred while processing the request, or None if no error
+ occurred.
+
+ Returns:
+ A BatchHttpRequest object based on the discovery document.
+ """
+ return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
+
+ self._set_dynamic_attr("new_batch_http_request", new_batch_http_request)
+
+ # Add basic methods to Resource
+ if "methods" in resourceDesc:
+ for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
+ fixedMethodName, method = createMethod(
+ methodName, methodDesc, rootDesc, schema
+ )
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+ # Add in _media methods. The functionality of the attached method will
+ # change when it sees that the method name ends in _media.
+ if methodDesc.get("supportsMediaDownload", False):
+ fixedMethodName, method = createMethod(
+ methodName + "_media", methodDesc, rootDesc, schema
+ )
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+
+ def _add_nested_resources(self, resourceDesc, rootDesc, schema):
+ # Add in nested resources
+ if "resources" in resourceDesc:
+
+ def createResourceMethod(methodName, methodDesc):
+ """Create a method on the Resource to access a nested Resource.
+
+ Args:
+ methodName: string, name of the method to use.
+ methodDesc: object, fragment of deserialized discovery document that
+ describes the method.
+ """
+ methodName = fix_method_name(methodName)
+
+ def methodResource(self):
+ return Resource(
+ http=self._http,
+ baseUrl=self._baseUrl,
+ model=self._model,
+ developerKey=self._developerKey,
+ requestBuilder=self._requestBuilder,
+ resourceDesc=methodDesc,
+ rootDesc=rootDesc,
+ schema=schema,
+ )
+
+ setattr(methodResource, "__doc__", "A collection resource.")
+ setattr(methodResource, "__is_resource__", True)
+
+ return (methodName, methodResource)
+
+ for methodName, methodDesc in six.iteritems(resourceDesc["resources"]):
+ fixedMethodName, method = createResourceMethod(methodName, methodDesc)
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+
+ def _add_next_methods(self, resourceDesc, schema):
+ # Add _next() methods if and only if one of the names 'pageToken' or
+ # 'nextPageToken' occurs among the fields of both the method's response
+ # type either the method's request (query parameters) or request body.
+ if "methods" not in resourceDesc:
+ return
+ for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
+ nextPageTokenName = _findPageTokenName(
+ _methodProperties(methodDesc, schema, "response")
+ )
+ if not nextPageTokenName:
+ continue
+ isPageTokenParameter = True
+ pageTokenName = _findPageTokenName(methodDesc.get("parameters", {}))
+ if not pageTokenName:
+ isPageTokenParameter = False
+ pageTokenName = _findPageTokenName(
+ _methodProperties(methodDesc, schema, "request")
+ )
+ if not pageTokenName:
+ continue
+ fixedMethodName, method = createNextMethod(
+ methodName + "_next",
+ pageTokenName,
+ nextPageTokenName,
+ isPageTokenParameter,
+ )
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+
+
+def _findPageTokenName(fields):
+ """Search field names for one like a page token.
+
+ Args:
+ fields: container of string, names of fields.
+
+ Returns:
+ First name that is either 'pageToken' or 'nextPageToken' if one exists,
+ otherwise None.
+ """
+ return next(
+ (tokenName for tokenName in _PAGE_TOKEN_NAMES if tokenName in fields), None
+ )
+
+
+def _methodProperties(methodDesc, schema, name):
+ """Get properties of a field in a method description.
+
+ Args:
+ methodDesc: object, fragment of deserialized discovery document that
+ describes the method.
+ schema: object, mapping of schema names to schema descriptions.
+ name: string, name of top-level field in method description.
+
+ Returns:
+ Object representing fragment of deserialized discovery document
+ corresponding to 'properties' field of object corresponding to named field
+ in method description, if it exists, otherwise empty dict.
+ """
+ desc = methodDesc.get(name, {})
+ if "$ref" in desc:
+ desc = schema.get(desc["$ref"], {})
+ return desc.get("properties", {})
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/__init__.py b/venv/Lib/site-packages/googleapiclient/discovery_cache/__init__.py
new file mode 100644
index 000000000..455ff6224
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/discovery_cache/__init__.py
@@ -0,0 +1,49 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Caching utility for the discovery document."""
+
+from __future__ import absolute_import
+
+import logging
+import datetime
+import os
+
+LOGGER = logging.getLogger(__name__)
+
+DISCOVERY_DOC_MAX_AGE = 60 * 60 * 24 # 1 day
+
+
+def autodetect():
+ """Detects an appropriate cache module and returns it.
+
+ Returns:
+ googleapiclient.discovery_cache.base.Cache, a cache object which
+ is auto detected, or None if no cache object is available.
+ """
+ if 'APPENGINE_RUNTIME' in os.environ:
+ try:
+ from google.appengine.api import memcache
+ from . import appengine_memcache
+
+ return appengine_memcache.cache
+ except Exception:
+ pass
+ try:
+ from . import file_cache
+
+ return file_cache.cache
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ return None
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..c3b564e9d
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/appengine_memcache.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/appengine_memcache.cpython-36.pyc
new file mode 100644
index 000000000..bf604f106
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/appengine_memcache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/base.cpython-36.pyc
new file mode 100644
index 000000000..76c956b04
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/base.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/file_cache.cpython-36.pyc b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/file_cache.cpython-36.pyc
new file mode 100644
index 000000000..147b64cae
Binary files /dev/null and b/venv/Lib/site-packages/googleapiclient/discovery_cache/__pycache__/file_cache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/appengine_memcache.py b/venv/Lib/site-packages/googleapiclient/discovery_cache/appengine_memcache.py
new file mode 100644
index 000000000..1d18d7abb
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/discovery_cache/appengine_memcache.py
@@ -0,0 +1,56 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""App Engine memcache based cache for the discovery document."""
+
+import logging
+
+# This is only an optional dependency because we only import this
+# module when google.appengine.api.memcache is available.
+from google.appengine.api import memcache
+
+from . import base
+from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
+
+
+LOGGER = logging.getLogger(__name__)
+
+NAMESPACE = "google-api-client"
+
+
+class Cache(base.Cache):
+ """A cache with app engine memcache API."""
+
+ def __init__(self, max_age):
+ """Constructor.
+
+ Args:
+ max_age: Cache expiration in seconds.
+ """
+ self._max_age = max_age
+
+ def get(self, url):
+ try:
+ return memcache.get(url, namespace=NAMESPACE)
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+
+ def set(self, url, content):
+ try:
+ memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+
+
+cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/base.py b/venv/Lib/site-packages/googleapiclient/discovery_cache/base.py
new file mode 100644
index 000000000..fbe445924
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/discovery_cache/base.py
@@ -0,0 +1,46 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An abstract class for caching the discovery document."""
+
+import abc
+
+
+class Cache(object):
+ """A base abstract cache class."""
+
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractmethod
+ def get(self, url):
+ """Gets the content from the memcache with a given key.
+
+ Args:
+ url: string, the key for the cache.
+
+ Returns:
+ object, the value in the cache for the given key, or None if the key is
+ not in the cache.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set(self, url, content):
+ """Sets the given key and content in the cache.
+
+ Args:
+ url: string, the key for the cache.
+ content: string, the discovery document.
+ """
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/googleapiclient/discovery_cache/file_cache.py b/venv/Lib/site-packages/googleapiclient/discovery_cache/file_cache.py
new file mode 100644
index 000000000..36eb29a39
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/discovery_cache/file_cache.py
@@ -0,0 +1,146 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""File based cache for the discovery document.
+
+The cache is stored in a single file so that multiple processes can
+share the same cache. It locks the file whenever accesing to the
+file. When the cache content is corrupted, it will be initialized with
+an empty cache.
+"""
+
+from __future__ import division
+
+import datetime
+import json
+import logging
+import os
+import tempfile
+import threading
+
+try:
+ from oauth2client.contrib.locked_file import LockedFile
+except ImportError:
+ # oauth2client < 2.0.0
+ try:
+ from oauth2client.locked_file import LockedFile
+ except ImportError:
+ # oauth2client > 4.0.0 or google-auth
+ raise ImportError(
+ "file_cache is unavailable when using oauth2client >= 4.0.0 or google-auth"
+ )
+
+from . import base
+from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
+
+LOGGER = logging.getLogger(__name__)
+
+FILENAME = "google-api-python-client-discovery-doc.cache"
+EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+
+def _to_timestamp(date):
+ try:
+ return (date - EPOCH).total_seconds()
+ except AttributeError:
+ # The following is the equivalent of total_seconds() in Python2.6.
+ # See also: https://docs.python.org/2/library/datetime.html
+ delta = date - EPOCH
+ return (
+ delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10 ** 6
+ ) / 10 ** 6
+
+
+def _read_or_initialize_cache(f):
+ f.file_handle().seek(0)
+ try:
+ cache = json.load(f.file_handle())
+ except Exception:
+ # This means it opens the file for the first time, or the cache is
+ # corrupted, so initializing the file with an empty dict.
+ cache = {}
+ f.file_handle().truncate(0)
+ f.file_handle().seek(0)
+ json.dump(cache, f.file_handle())
+ return cache
+
+
+class Cache(base.Cache):
+ """A file based cache for the discovery documents."""
+
+ def __init__(self, max_age):
+ """Constructor.
+
+ Args:
+ max_age: Cache expiration in seconds.
+ """
+ self._max_age = max_age
+ self._file = os.path.join(tempfile.gettempdir(), FILENAME)
+ f = LockedFile(self._file, "a+", "r")
+ try:
+ f.open_and_lock()
+ if f.is_locked():
+ _read_or_initialize_cache(f)
+ # If we can not obtain the lock, other process or thread must
+ # have initialized the file.
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ finally:
+ f.unlock_and_close()
+
+ def get(self, url):
+ f = LockedFile(self._file, "r+", "r")
+ try:
+ f.open_and_lock()
+ if f.is_locked():
+ cache = _read_or_initialize_cache(f)
+ if url in cache:
+ content, t = cache.get(url, (None, 0))
+ if _to_timestamp(datetime.datetime.now()) < t + self._max_age:
+ return content
+ return None
+ else:
+ LOGGER.debug("Could not obtain a lock for the cache file.")
+ return None
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ finally:
+ f.unlock_and_close()
+
+ def set(self, url, content):
+ f = LockedFile(self._file, "r+", "r")
+ try:
+ f.open_and_lock()
+ if f.is_locked():
+ cache = _read_or_initialize_cache(f)
+ cache[url] = (content, _to_timestamp(datetime.datetime.now()))
+ # Remove stale cache.
+ for k, (_, timestamp) in list(cache.items()):
+ if (
+ _to_timestamp(datetime.datetime.now())
+ >= timestamp + self._max_age
+ ):
+ del cache[k]
+ f.file_handle().truncate(0)
+ f.file_handle().seek(0)
+ json.dump(cache, f.file_handle())
+ else:
+ LOGGER.debug("Could not obtain a lock for the cache file.")
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ finally:
+ f.unlock_and_close()
+
+
+cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/venv/Lib/site-packages/googleapiclient/errors.py b/venv/Lib/site-packages/googleapiclient/errors.py
new file mode 100644
index 000000000..2f7b112f0
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/errors.py
@@ -0,0 +1,180 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Errors for the library.
+
+All exceptions defined by the library
+should be defined in this file.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+import json
+
+from googleapiclient import _helpers as util
+
+
+class Error(Exception):
+ """Base error for this module."""
+
+ pass
+
+
+class HttpError(Error):
+ """HTTP data was invalid or unexpected."""
+
+ @util.positional(3)
+ def __init__(self, resp, content, uri=None):
+ self.resp = resp
+ if not isinstance(content, bytes):
+ raise TypeError("HTTP content should be bytes")
+ self.content = content
+ self.uri = uri
+ self.error_details = ""
+
+ def _get_reason(self):
+ """Calculate the reason for the error from the response content."""
+ reason = self.resp.reason
+ try:
+ data = json.loads(self.content.decode("utf-8"))
+ if isinstance(data, dict):
+ reason = data["error"]["message"]
+ if "details" in data["error"]:
+ self.error_details = data["error"]["details"]
+ elif "detail" in data["error"]:
+ self.error_details = data["error"]["detail"]
+ elif isinstance(data, list) and len(data) > 0:
+ first_error = data[0]
+ reason = first_error["error"]["message"]
+ if "details" in first_error["error"]:
+ self.error_details = first_error["error"]["details"]
+ except (ValueError, KeyError, TypeError):
+ pass
+ if reason is None:
+ reason = ""
+ return reason
+
+ def __repr__(self):
+ reason = self._get_reason()
+ if self.error_details:
+ return '' % (
+ self.resp.status,
+ self.uri,
+ reason.strip(),
+ self.error_details,
+ )
+ elif self.uri:
+ return '' % (
+ self.resp.status,
+ self.uri,
+ self._get_reason().strip(),
+ )
+ else:
+ return '' % (self.resp.status, self._get_reason())
+
+ __str__ = __repr__
+
+
+class InvalidJsonError(Error):
+ """The JSON returned could not be parsed."""
+
+ pass
+
+
+class UnknownFileType(Error):
+ """File type unknown or unexpected."""
+
+ pass
+
+
+class UnknownLinkType(Error):
+ """Link type unknown or unexpected."""
+
+ pass
+
+
+class UnknownApiNameOrVersion(Error):
+ """No API with that name and version exists."""
+
+ pass
+
+
+class UnacceptableMimeTypeError(Error):
+ """That is an unacceptable mimetype for this operation."""
+
+ pass
+
+
+class MediaUploadSizeError(Error):
+ """Media is larger than the method can accept."""
+
+ pass
+
+
+class ResumableUploadError(HttpError):
+ """Error occurred during resumable upload."""
+
+ pass
+
+
+class InvalidChunkSizeError(Error):
+ """The given chunksize is not valid."""
+
+ pass
+
+
+class InvalidNotificationError(Error):
+ """The channel Notification is invalid."""
+
+ pass
+
+
+class BatchError(HttpError):
+ """Error occurred during batch operations."""
+
+ @util.positional(2)
+ def __init__(self, reason, resp=None, content=None):
+ self.resp = resp
+ self.content = content
+ self.reason = reason
+
+ def __repr__(self):
+ if getattr(self.resp, "status", None) is None:
+ return '' % (self.reason)
+ else:
+ return '' % (self.resp.status, self.reason)
+
+ __str__ = __repr__
+
+
+class UnexpectedMethodError(Error):
+ """Exception raised by RequestMockBuilder on unexpected calls."""
+
+ @util.positional(1)
+ def __init__(self, methodId=None):
+ """Constructor for an UnexpectedMethodError."""
+ super(UnexpectedMethodError, self).__init__(
+ "Received unexpected call %s" % methodId
+ )
+
+
+class UnexpectedBodyError(Error):
+ """Exception raised by RequestMockBuilder on unexpected bodies."""
+
+ def __init__(self, expected, provided):
+ """Constructor for an UnexpectedMethodError."""
+ super(UnexpectedBodyError, self).__init__(
+ "Expected: [%s] - Provided: [%s]" % (expected, provided)
+ )
diff --git a/venv/Lib/site-packages/googleapiclient/http.py b/venv/Lib/site-packages/googleapiclient/http.py
new file mode 100644
index 000000000..926ca1bcf
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/http.py
@@ -0,0 +1,1922 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes to encapsulate a single HTTP request.
+
+The classes implement a command pattern, with every
+object supporting an execute() method that does the
+actual HTTP request.
+"""
+from __future__ import absolute_import
+import six
+from six.moves import http_client
+from six.moves import range
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+from six import BytesIO, StringIO
+from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
+
+import base64
+import copy
+import gzip
+import httplib2
+import json
+import logging
+import mimetypes
+import os
+import random
+import socket
+import sys
+import time
+import uuid
+
+# TODO(issue 221): Remove this conditional import jibbajabba.
+try:
+ import ssl
+except ImportError:
+ _ssl_SSLError = object()
+else:
+ _ssl_SSLError = ssl.SSLError
+
+from email.generator import Generator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+from email.parser import FeedParser
+
+from googleapiclient import _helpers as util
+
+from googleapiclient import _auth
+from googleapiclient.errors import BatchError
+from googleapiclient.errors import HttpError
+from googleapiclient.errors import InvalidChunkSizeError
+from googleapiclient.errors import ResumableUploadError
+from googleapiclient.errors import UnexpectedBodyError
+from googleapiclient.errors import UnexpectedMethodError
+from googleapiclient.model import JsonModel
+
+
+LOGGER = logging.getLogger(__name__)
+
+DEFAULT_CHUNK_SIZE = 100 * 1024 * 1024
+
+MAX_URI_LENGTH = 2048
+
+MAX_BATCH_LIMIT = 1000
+
+_TOO_MANY_REQUESTS = 429
+
+DEFAULT_HTTP_TIMEOUT_SEC = 60
+
+_LEGACY_BATCH_URI = "https://www.googleapis.com/batch"
+
+if six.PY2:
+ # That's a builtin python3 exception, nonexistent in python2.
+ # Defined to None to avoid NameError while trying to catch it
+ ConnectionError = None
+
+
+def _should_retry_response(resp_status, content):
+ """Determines whether a response should be retried.
+
+ Args:
+ resp_status: The response status received.
+ content: The response content body.
+
+ Returns:
+ True if the response should be retried, otherwise False.
+ """
+ # Retry on 5xx errors.
+ if resp_status >= 500:
+ return True
+
+ # Retry on 429 errors.
+ if resp_status == _TOO_MANY_REQUESTS:
+ return True
+
+ # For 403 errors, we have to check for the `reason` in the response to
+ # determine if we should retry.
+ if resp_status == six.moves.http_client.FORBIDDEN:
+ # If there's no details about the 403 type, don't retry.
+ if not content:
+ return False
+
+ # Content is in JSON format.
+ try:
+ data = json.loads(content.decode("utf-8"))
+ if isinstance(data, dict):
+ reason = data["error"]["errors"][0]["reason"]
+ else:
+ reason = data[0]["error"]["errors"]["reason"]
+ except (UnicodeDecodeError, ValueError, KeyError):
+ LOGGER.warning("Invalid JSON content from response: %s", content)
+ return False
+
+ LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason)
+
+ # Only retry on rate limit related failures.
+ if reason in ("userRateLimitExceeded", "rateLimitExceeded"):
+ return True
+
+ # Everything else is a success or non-retriable so break.
+ return False
+
+
+def _retry_request(
+ http, num_retries, req_type, sleep, rand, uri, method, *args, **kwargs
+):
+ """Retries an HTTP request multiple times while handling errors.
+
+ If after all retries the request still fails, last error is either returned as
+ return value (for HTTP 5xx errors) or thrown (for ssl.SSLError).
+
+ Args:
+ http: Http object to be used to execute request.
+ num_retries: Maximum number of retries.
+ req_type: Type of the request (used for logging retries).
+ sleep, rand: Functions to sleep for random time between retries.
+ uri: URI to be requested.
+ method: HTTP method to be used.
+ args, kwargs: Additional arguments passed to http.request.
+
+ Returns:
+ resp, content - Response from the http request (may be HTTP 5xx).
+ """
+ resp = None
+ content = None
+ exception = None
+ for retry_num in range(num_retries + 1):
+ if retry_num > 0:
+ # Sleep before retrying.
+ sleep_time = rand() * 2 ** retry_num
+ LOGGER.warning(
+ "Sleeping %.2f seconds before retry %d of %d for %s: %s %s, after %s",
+ sleep_time,
+ retry_num,
+ num_retries,
+ req_type,
+ method,
+ uri,
+ resp.status if resp else exception,
+ )
+ sleep(sleep_time)
+
+ try:
+ exception = None
+ resp, content = http.request(uri, method, *args, **kwargs)
+ # Retry on SSL errors and socket timeout errors.
+ except _ssl_SSLError as ssl_error:
+ exception = ssl_error
+ except socket.timeout as socket_timeout:
+ # It's important that this be before socket.error as it's a subclass
+ # socket.timeout has no errorcode
+ exception = socket_timeout
+ except ConnectionError as connection_error:
+ # Needs to be before socket.error as it's a subclass of
+ # OSError (socket.error)
+ exception = connection_error
+ except socket.error as socket_error:
+ # errno's contents differ by platform, so we have to match by name.
+ if socket.errno.errorcode.get(socket_error.errno) not in {
+ "WSAETIMEDOUT",
+ "ETIMEDOUT",
+ "EPIPE",
+ "ECONNABORTED",
+ }:
+ raise
+ exception = socket_error
+ except httplib2.ServerNotFoundError as server_not_found_error:
+ exception = server_not_found_error
+
+ if exception:
+ if retry_num == num_retries:
+ raise exception
+ else:
+ continue
+
+ if not _should_retry_response(resp.status, content):
+ break
+
+ return resp, content
+
+
+class MediaUploadProgress(object):
+ """Status of a resumable upload."""
+
+ def __init__(self, resumable_progress, total_size):
+ """Constructor.
+
+ Args:
+ resumable_progress: int, bytes sent so far.
+ total_size: int, total bytes in complete upload, or None if the total
+ upload size isn't known ahead of time.
+ """
+ self.resumable_progress = resumable_progress
+ self.total_size = total_size
+
+ def progress(self):
+ """Percent of upload completed, as a float.
+
+ Returns:
+ the percentage complete as a float, returning 0.0 if the total size of
+ the upload is unknown.
+ """
+ if self.total_size is not None and self.total_size != 0:
+ return float(self.resumable_progress) / float(self.total_size)
+ else:
+ return 0.0
+
+
+class MediaDownloadProgress(object):
+ """Status of a resumable download."""
+
+ def __init__(self, resumable_progress, total_size):
+ """Constructor.
+
+ Args:
+ resumable_progress: int, bytes received so far.
+ total_size: int, total bytes in complete download.
+ """
+ self.resumable_progress = resumable_progress
+ self.total_size = total_size
+
+ def progress(self):
+ """Percent of download completed, as a float.
+
+ Returns:
+ the percentage complete as a float, returning 0.0 if the total size of
+ the download is unknown.
+ """
+ if self.total_size is not None and self.total_size != 0:
+ return float(self.resumable_progress) / float(self.total_size)
+ else:
+ return 0.0
+
+
+class MediaUpload(object):
+ """Describes a media object to upload.
+
+ Base class that defines the interface of MediaUpload subclasses.
+
+ Note that subclasses of MediaUpload may allow you to control the chunksize
+ when uploading a media object. It is important to keep the size of the chunk
+ as large as possible to keep the upload efficient. Other factors may influence
+ the size of the chunk you use, particularly if you are working in an
+ environment where individual HTTP requests may have a hardcoded time limit,
+ such as under certain classes of requests under Google App Engine.
+
+ Streams are io.Base compatible objects that support seek(). Some MediaUpload
+ subclasses support using streams directly to upload data. Support for
+ streaming may be indicated by a MediaUpload sub-class and if appropriate for a
+ platform that stream will be used for uploading the media object. The support
+ for streaming is indicated by has_stream() returning True. The stream() method
+ should return an io.Base object that supports seek(). On platforms where the
+ underlying httplib module supports streaming, for example Python 2.6 and
+ later, the stream will be passed into the http library which will result in
+ less memory being used and possibly faster uploads.
+
+ If you need to upload media that can't be uploaded using any of the existing
+ MediaUpload sub-class then you can sub-class MediaUpload for your particular
+ needs.
+ """
+
+ def chunksize(self):
+ """Chunk size for resumable uploads.
+
+ Returns:
+ Chunk size in bytes.
+ """
+ raise NotImplementedError()
+
+ def mimetype(self):
+ """Mime type of the body.
+
+ Returns:
+ Mime type.
+ """
+ return "application/octet-stream"
+
+ def size(self):
+ """Size of upload.
+
+ Returns:
+ Size of the body, or None of the size is unknown.
+ """
+ return None
+
+ def resumable(self):
+ """Whether this upload is resumable.
+
+ Returns:
+ True if resumable upload or False.
+ """
+ return False
+
+ def getbytes(self, begin, end):
+ """Get bytes from the media.
+
+ Args:
+ begin: int, offset from beginning of file.
+ length: int, number of bytes to read, starting at begin.
+
+ Returns:
+ A string of bytes read. May be shorter than length if EOF was reached
+ first.
+ """
+ raise NotImplementedError()
+
+ def has_stream(self):
+ """Does the underlying upload support a streaming interface.
+
+ Streaming means it is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+
+ Returns:
+ True if the call to stream() will return an instance of a seekable io.Base
+ subclass.
+ """
+ return False
+
+ def stream(self):
+ """A stream interface to the data being uploaded.
+
+ Returns:
+ The returned value is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+ """
+ raise NotImplementedError()
+
+ @util.positional(1)
+ def _to_json(self, strip=None):
+ """Utility function for creating a JSON representation of a MediaUpload.
+
+ Args:
+ strip: array, An array of names of members to not include in the JSON.
+
+ Returns:
+ string, a JSON representation of this instance, suitable to pass to
+ from_json().
+ """
+ t = type(self)
+ d = copy.copy(self.__dict__)
+ if strip is not None:
+ for member in strip:
+ del d[member]
+ d["_class"] = t.__name__
+ d["_module"] = t.__module__
+ return json.dumps(d)
+
+ def to_json(self):
+ """Create a JSON representation of an instance of MediaUpload.
+
+ Returns:
+ string, a JSON representation of this instance, suitable to pass to
+ from_json().
+ """
+ return self._to_json()
+
+ @classmethod
+ def new_from_json(cls, s):
+ """Utility class method to instantiate a MediaUpload subclass from a JSON
+ representation produced by to_json().
+
+ Args:
+ s: string, JSON from to_json().
+
+ Returns:
+ An instance of the subclass of MediaUpload that was serialized with
+ to_json().
+ """
+ data = json.loads(s)
+ # Find and call the right classmethod from_json() to restore the object.
+ module = data["_module"]
+ m = __import__(module, fromlist=module.split(".")[:-1])
+ kls = getattr(m, data["_class"])
+ from_json = getattr(kls, "from_json")
+ return from_json(s)
+
+
+class MediaIoBaseUpload(MediaUpload):
+ """A MediaUpload for a io.Base objects.
+
+ Note that the Python file object is compatible with io.Base and can be used
+ with this class also.
+
+ fh = BytesIO('...Some data to upload...')
+ media = MediaIoBaseUpload(fh, mimetype='image/png',
+ chunksize=1024*1024, resumable=True)
+ farm.animals().insert(
+ id='cow',
+ name='cow.png',
+ media_body=media).execute()
+
+ Depending on the platform you are working on, you may pass -1 as the
+ chunksize, which indicates that the entire file should be uploaded in a single
+ request. If the underlying platform supports streams, such as Python 2.6 or
+ later, then this can be very efficient as it avoids multiple connections, and
+ also avoids loading the entire file into memory before sending it. Note that
+ Google App Engine has a 5MB limit on request size, so you should never set
+ your chunksize larger than 5MB, or to -1.
+ """
+
+ @util.positional(3)
+ def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
+ """Constructor.
+
+ Args:
+ fd: io.Base or file object, The source of the bytes to upload. MUST be
+ opened in blocking mode, do not use streams opened in non-blocking mode.
+ The given stream must be seekable, that is, it must be able to call
+ seek() on fd.
+ mimetype: string, Mime-type of the file.
+ chunksize: int, File will be uploaded in chunks of this many bytes. Only
+ used if resumable=True. Pass in a value of -1 if the file is to be
+ uploaded as a single chunk. Note that Google App Engine has a 5MB limit
+ on request size, so you should never set your chunksize larger than 5MB,
+ or to -1.
+ resumable: bool, True if this is a resumable upload. False means upload
+ in a single request.
+ """
+ super(MediaIoBaseUpload, self).__init__()
+ self._fd = fd
+ self._mimetype = mimetype
+ if not (chunksize == -1 or chunksize > 0):
+ raise InvalidChunkSizeError()
+ self._chunksize = chunksize
+ self._resumable = resumable
+
+ self._fd.seek(0, os.SEEK_END)
+ self._size = self._fd.tell()
+
+ def chunksize(self):
+ """Chunk size for resumable uploads.
+
+ Returns:
+ Chunk size in bytes.
+ """
+ return self._chunksize
+
+ def mimetype(self):
+ """Mime type of the body.
+
+ Returns:
+ Mime type.
+ """
+ return self._mimetype
+
+ def size(self):
+ """Size of upload.
+
+ Returns:
+ Size of the body, or None of the size is unknown.
+ """
+ return self._size
+
+ def resumable(self):
+ """Whether this upload is resumable.
+
+ Returns:
+ True if resumable upload or False.
+ """
+ return self._resumable
+
+ def getbytes(self, begin, length):
+ """Get bytes from the media.
+
+ Args:
+ begin: int, offset from beginning of file.
+ length: int, number of bytes to read, starting at begin.
+
+ Returns:
+ A string of bytes read. May be shorted than length if EOF was reached
+ first.
+ """
+ self._fd.seek(begin)
+ return self._fd.read(length)
+
+ def has_stream(self):
+ """Does the underlying upload support a streaming interface.
+
+ Streaming means it is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+
+ Returns:
+ True if the call to stream() will return an instance of a seekable io.Base
+ subclass.
+ """
+ return True
+
+ def stream(self):
+ """A stream interface to the data being uploaded.
+
+ Returns:
+ The returned value is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+ """
+ return self._fd
+
+ def to_json(self):
+ """This upload type is not serializable."""
+ raise NotImplementedError("MediaIoBaseUpload is not serializable.")
+
+
+class MediaFileUpload(MediaIoBaseUpload):
+ """A MediaUpload for a file.
+
+ Construct a MediaFileUpload and pass as the media_body parameter of the
+ method. For example, if we had a service that allowed uploading images:
+
+ media = MediaFileUpload('cow.png', mimetype='image/png',
+ chunksize=1024*1024, resumable=True)
+ farm.animals().insert(
+ id='cow',
+ name='cow.png',
+ media_body=media).execute()
+
+ Depending on the platform you are working on, you may pass -1 as the
+ chunksize, which indicates that the entire file should be uploaded in a single
+ request. If the underlying platform supports streams, such as Python 2.6 or
+ later, then this can be very efficient as it avoids multiple connections, and
+ also avoids loading the entire file into memory before sending it. Note that
+ Google App Engine has a 5MB limit on request size, so you should never set
+ your chunksize larger than 5MB, or to -1.
+ """
+
+ @util.positional(2)
+ def __init__(
+ self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, resumable=False
+ ):
+ """Constructor.
+
+ Args:
+ filename: string, Name of the file.
+ mimetype: string, Mime-type of the file. If None then a mime-type will be
+ guessed from the file extension.
+ chunksize: int, File will be uploaded in chunks of this many bytes. Only
+ used if resumable=True. Pass in a value of -1 if the file is to be
+ uploaded in a single chunk. Note that Google App Engine has a 5MB limit
+ on request size, so you should never set your chunksize larger than 5MB,
+ or to -1.
+ resumable: bool, True if this is a resumable upload. False means upload
+ in a single request.
+ """
+ self._filename = filename
+ fd = open(self._filename, "rb")
+ if mimetype is None:
+ # No mimetype provided, make a guess.
+ mimetype, _ = mimetypes.guess_type(filename)
+ if mimetype is None:
+ # Guess failed, use octet-stream.
+ mimetype = "application/octet-stream"
+ super(MediaFileUpload, self).__init__(
+ fd, mimetype, chunksize=chunksize, resumable=resumable
+ )
+
+ def __del__(self):
+ self._fd.close()
+
+ def to_json(self):
+ """Creating a JSON representation of an instance of MediaFileUpload.
+
+ Returns:
+ string, a JSON representation of this instance, suitable to pass to
+ from_json().
+ """
+ return self._to_json(strip=["_fd"])
+
+ @staticmethod
+ def from_json(s):
+ d = json.loads(s)
+ return MediaFileUpload(
+ d["_filename"],
+ mimetype=d["_mimetype"],
+ chunksize=d["_chunksize"],
+ resumable=d["_resumable"],
+ )
+
+
+class MediaInMemoryUpload(MediaIoBaseUpload):
+ """MediaUpload for a chunk of bytes.
+
+ DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+ the stream.
+ """
+
+ @util.positional(2)
+ def __init__(
+ self,
+ body,
+ mimetype="application/octet-stream",
+ chunksize=DEFAULT_CHUNK_SIZE,
+ resumable=False,
+ ):
+ """Create a new MediaInMemoryUpload.
+
+ DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+ the stream.
+
+ Args:
+ body: string, Bytes of body content.
+ mimetype: string, Mime-type of the file or default of
+ 'application/octet-stream'.
+ chunksize: int, File will be uploaded in chunks of this many bytes. Only
+ used if resumable=True.
+ resumable: bool, True if this is a resumable upload. False means upload
+ in a single request.
+ """
+ fd = BytesIO(body)
+ super(MediaInMemoryUpload, self).__init__(
+ fd, mimetype, chunksize=chunksize, resumable=resumable
+ )
+
+
+class MediaIoBaseDownload(object):
+ """"Download media resources.
+
+ Note that the Python file object is compatible with io.Base and can be used
+ with this class also.
+
+
+ Example:
+ request = farms.animals().get_media(id='cow')
+ fh = io.FileIO('cow.png', mode='wb')
+ downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
+
+ done = False
+ while done is False:
+ status, done = downloader.next_chunk()
+ if status:
+ print "Download %d%%." % int(status.progress() * 100)
+ print "Download Complete!"
+ """
+
+ @util.positional(3)
+ def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
+ """Constructor.
+
+ Args:
+ fd: io.Base or file object, The stream in which to write the downloaded
+ bytes.
+ request: googleapiclient.http.HttpRequest, the media request to perform in
+ chunks.
+ chunksize: int, File will be downloaded in chunks of this many bytes.
+ """
+ self._fd = fd
+ self._request = request
+ self._uri = request.uri
+ self._chunksize = chunksize
+ self._progress = 0
+ self._total_size = None
+ self._done = False
+
+ # Stubs for testing.
+ self._sleep = time.sleep
+ self._rand = random.random
+
+ self._headers = {}
+ for k, v in six.iteritems(request.headers):
+ # allow users to supply custom headers by setting them on the request
+ # but strip out the ones that are set by default on requests generated by
+ # API methods like Drive's files().get(fileId=...)
+ if not k.lower() in ("accept", "accept-encoding", "user-agent"):
+ self._headers[k] = v
+
+ @util.positional(1)
+ def next_chunk(self, num_retries=0):
+ """Get the next chunk of the download.
+
+ Args:
+ num_retries: Integer, number of times to retry with randomized
+ exponential backoff. If all retries fail, the raised HttpError
+ represents the last request. If zero (default), we attempt the
+ request only once.
+
+ Returns:
+ (status, done): (MediaDownloadProgress, boolean)
+ The value of 'done' will be True when the media has been fully
+ downloaded or the total size of the media is unknown.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx.
+ httplib2.HttpLib2Error if a transport error has occurred.
+ """
+ headers = self._headers.copy()
+ headers["range"] = "bytes=%d-%d" % (
+ self._progress,
+ self._progress + self._chunksize,
+ )
+ http = self._request.http
+
+ resp, content = _retry_request(
+ http,
+ num_retries,
+ "media download",
+ self._sleep,
+ self._rand,
+ self._uri,
+ "GET",
+ headers=headers,
+ )
+
+ if resp.status in [200, 206]:
+ if "content-location" in resp and resp["content-location"] != self._uri:
+ self._uri = resp["content-location"]
+ self._progress += len(content)
+ self._fd.write(content)
+
+ if "content-range" in resp:
+ content_range = resp["content-range"]
+ length = content_range.rsplit("/", 1)[1]
+ self._total_size = int(length)
+ elif "content-length" in resp:
+ self._total_size = int(resp["content-length"])
+
+ if self._total_size is None or self._progress == self._total_size:
+ self._done = True
+ return MediaDownloadProgress(self._progress, self._total_size), self._done
+ else:
+ raise HttpError(resp, content, uri=self._uri)
+
+
+class _StreamSlice(object):
+ """Truncated stream.
+
+ Takes a stream and presents a stream that is a slice of the original stream.
+ This is used when uploading media in chunks. In later versions of Python a
+ stream can be passed to httplib in place of the string of data to send. The
+ problem is that httplib just blindly reads to the end of the stream. This
+ wrapper presents a virtual stream that only reads to the end of the chunk.
+ """
+
+ def __init__(self, stream, begin, chunksize):
+ """Constructor.
+
+ Args:
+ stream: (io.Base, file object), the stream to wrap.
+ begin: int, the seek position the chunk begins at.
+ chunksize: int, the size of the chunk.
+ """
+ self._stream = stream
+ self._begin = begin
+ self._chunksize = chunksize
+ self._stream.seek(begin)
+
+ def read(self, n=-1):
+ """Read n bytes.
+
+ Args:
+ n, int, the number of bytes to read.
+
+ Returns:
+ A string of length 'n', or less if EOF is reached.
+ """
+ # The data left available to read sits in [cur, end)
+ cur = self._stream.tell()
+ end = self._begin + self._chunksize
+ if n == -1 or cur + n > end:
+ n = end - cur
+ return self._stream.read(n)
+
+
+class HttpRequest(object):
+ """Encapsulates a single HTTP request."""
+
+ @util.positional(4)
+ def __init__(
+ self,
+ http,
+ postproc,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ methodId=None,
+ resumable=None,
+ ):
+ """Constructor for an HttpRequest.
+
+ Args:
+ http: httplib2.Http, the transport object to use to make a request
+ postproc: callable, called on the HTTP response and content to transform
+ it into a data object before returning, or raising an exception
+ on an error.
+ uri: string, the absolute URI to send the request to
+ method: string, the HTTP method to use
+ body: string, the request body of the HTTP request,
+ headers: dict, the HTTP request headers
+ methodId: string, a unique identifier for the API method being called.
+ resumable: MediaUpload, None if this is not a resumbale request.
+ """
+ self.uri = uri
+ self.method = method
+ self.body = body
+ self.headers = headers or {}
+ self.methodId = methodId
+ self.http = http
+ self.postproc = postproc
+ self.resumable = resumable
+ self.response_callbacks = []
+ self._in_error_state = False
+
+ # The size of the non-media part of the request.
+ self.body_size = len(self.body or "")
+
+ # The resumable URI to send chunks to.
+ self.resumable_uri = None
+
+ # The bytes that have been uploaded.
+ self.resumable_progress = 0
+
+ # Stubs for testing.
+ self._rand = random.random
+ self._sleep = time.sleep
+
+ @util.positional(1)
+ def execute(self, http=None, num_retries=0):
+ """Execute the request.
+
+ Args:
+ http: httplib2.Http, an http object to be used in place of the
+ one the HttpRequest request object was constructed with.
+ num_retries: Integer, number of times to retry with randomized
+ exponential backoff. If all retries fail, the raised HttpError
+ represents the last request. If zero (default), we attempt the
+ request only once.
+
+ Returns:
+ A deserialized object model of the response body as determined
+ by the postproc.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx.
+ httplib2.HttpLib2Error if a transport error has occurred.
+ """
+ if http is None:
+ http = self.http
+
+ if self.resumable:
+ body = None
+ while body is None:
+ _, body = self.next_chunk(http=http, num_retries=num_retries)
+ return body
+
+ # Non-resumable case.
+
+ if "content-length" not in self.headers:
+ self.headers["content-length"] = str(self.body_size)
+ # If the request URI is too long then turn it into a POST request.
+ # Assume that a GET request never contains a request body.
+ if len(self.uri) > MAX_URI_LENGTH and self.method == "GET":
+ self.method = "POST"
+ self.headers["x-http-method-override"] = "GET"
+ self.headers["content-type"] = "application/x-www-form-urlencoded"
+ parsed = urlparse(self.uri)
+ self.uri = urlunparse(
+ (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, None)
+ )
+ self.body = parsed.query
+ self.headers["content-length"] = str(len(self.body))
+
+ # Handle retries for server-side errors.
+ resp, content = _retry_request(
+ http,
+ num_retries,
+ "request",
+ self._sleep,
+ self._rand,
+ str(self.uri),
+ method=str(self.method),
+ body=self.body,
+ headers=self.headers,
+ )
+
+ for callback in self.response_callbacks:
+ callback(resp)
+ if resp.status >= 300:
+ raise HttpError(resp, content, uri=self.uri)
+ return self.postproc(resp, content)
+
+ @util.positional(2)
+ def add_response_callback(self, cb):
+ """add_response_headers_callback
+
+ Args:
+ cb: Callback to be called on receiving the response headers, of signature:
+
+ def cb(resp):
+ # Where resp is an instance of httplib2.Response
+ """
+ self.response_callbacks.append(cb)
+
+ @util.positional(1)
+ def next_chunk(self, http=None, num_retries=0):
+ """Execute the next step of a resumable upload.
+
+ Can only be used if the method being executed supports media uploads and
+ the MediaUpload object passed in was flagged as using resumable upload.
+
+ Example:
+
+ media = MediaFileUpload('cow.png', mimetype='image/png',
+ chunksize=1000, resumable=True)
+ request = farm.animals().insert(
+ id='cow',
+ name='cow.png',
+ media_body=media)
+
+ response = None
+ while response is None:
+ status, response = request.next_chunk()
+ if status:
+ print "Upload %d%% complete." % int(status.progress() * 100)
+
+
+ Args:
+ http: httplib2.Http, an http object to be used in place of the
+ one the HttpRequest request object was constructed with.
+ num_retries: Integer, number of times to retry with randomized
+ exponential backoff. If all retries fail, the raised HttpError
+ represents the last request. If zero (default), we attempt the
+ request only once.
+
+ Returns:
+ (status, body): (ResumableMediaStatus, object)
+ The body will be None until the resumable media is fully uploaded.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx.
+ httplib2.HttpLib2Error if a transport error has occurred.
+ """
+ if http is None:
+ http = self.http
+
+ if self.resumable.size() is None:
+ size = "*"
+ else:
+ size = str(self.resumable.size())
+
+ if self.resumable_uri is None:
+ start_headers = copy.copy(self.headers)
+ start_headers["X-Upload-Content-Type"] = self.resumable.mimetype()
+ if size != "*":
+ start_headers["X-Upload-Content-Length"] = size
+ start_headers["content-length"] = str(self.body_size)
+
+ resp, content = _retry_request(
+ http,
+ num_retries,
+ "resumable URI request",
+ self._sleep,
+ self._rand,
+ self.uri,
+ method=self.method,
+ body=self.body,
+ headers=start_headers,
+ )
+
+ if resp.status == 200 and "location" in resp:
+ self.resumable_uri = resp["location"]
+ else:
+ raise ResumableUploadError(resp, content)
+ elif self._in_error_state:
+ # If we are in an error state then query the server for current state of
+ # the upload by sending an empty PUT and reading the 'range' header in
+ # the response.
+ headers = {"Content-Range": "bytes */%s" % size, "content-length": "0"}
+ resp, content = http.request(self.resumable_uri, "PUT", headers=headers)
+ status, body = self._process_response(resp, content)
+ if body:
+ # The upload was complete.
+ return (status, body)
+
+ if self.resumable.has_stream():
+ data = self.resumable.stream()
+ if self.resumable.chunksize() == -1:
+ data.seek(self.resumable_progress)
+ chunk_end = self.resumable.size() - self.resumable_progress - 1
+ else:
+ # Doing chunking with a stream, so wrap a slice of the stream.
+ data = _StreamSlice(
+ data, self.resumable_progress, self.resumable.chunksize()
+ )
+ chunk_end = min(
+ self.resumable_progress + self.resumable.chunksize() - 1,
+ self.resumable.size() - 1,
+ )
+ else:
+ data = self.resumable.getbytes(
+ self.resumable_progress, self.resumable.chunksize()
+ )
+
+ # A short read implies that we are at EOF, so finish the upload.
+ if len(data) < self.resumable.chunksize():
+ size = str(self.resumable_progress + len(data))
+
+ chunk_end = self.resumable_progress + len(data) - 1
+
+ headers = {
+ "Content-Range": "bytes %d-%d/%s"
+ % (self.resumable_progress, chunk_end, size),
+ # Must set the content-length header here because httplib can't
+ # calculate the size when working with _StreamSlice.
+ "Content-Length": str(chunk_end - self.resumable_progress + 1),
+ }
+
+ for retry_num in range(num_retries + 1):
+ if retry_num > 0:
+ self._sleep(self._rand() * 2 ** retry_num)
+ LOGGER.warning(
+ "Retry #%d for media upload: %s %s, following status: %d"
+ % (retry_num, self.method, self.uri, resp.status)
+ )
+
+ try:
+ resp, content = http.request(
+ self.resumable_uri, method="PUT", body=data, headers=headers
+ )
+ except:
+ self._in_error_state = True
+ raise
+ if not _should_retry_response(resp.status, content):
+ break
+
+ return self._process_response(resp, content)
+
+ def _process_response(self, resp, content):
+ """Process the response from a single chunk upload.
+
+ Args:
+ resp: httplib2.Response, the response object.
+ content: string, the content of the response.
+
+ Returns:
+ (status, body): (ResumableMediaStatus, object)
+ The body will be None until the resumable media is fully uploaded.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
+ """
+ if resp.status in [200, 201]:
+ self._in_error_state = False
+ return None, self.postproc(resp, content)
+ elif resp.status == 308:
+ self._in_error_state = False
+ # A "308 Resume Incomplete" indicates we are not done.
+ try:
+ self.resumable_progress = int(resp["range"].split("-")[1]) + 1
+ except KeyError:
+ # If resp doesn't contain range header, resumable progress is 0
+ self.resumable_progress = 0
+ if "location" in resp:
+ self.resumable_uri = resp["location"]
+ else:
+ self._in_error_state = True
+ raise HttpError(resp, content, uri=self.uri)
+
+ return (
+ MediaUploadProgress(self.resumable_progress, self.resumable.size()),
+ None,
+ )
+
+ def to_json(self):
+ """Returns a JSON representation of the HttpRequest."""
+ d = copy.copy(self.__dict__)
+ if d["resumable"] is not None:
+ d["resumable"] = self.resumable.to_json()
+ del d["http"]
+ del d["postproc"]
+ del d["_sleep"]
+ del d["_rand"]
+
+ return json.dumps(d)
+
+ @staticmethod
+ def from_json(s, http, postproc):
+ """Returns an HttpRequest populated with info from a JSON object."""
+ d = json.loads(s)
+ if d["resumable"] is not None:
+ d["resumable"] = MediaUpload.new_from_json(d["resumable"])
+ return HttpRequest(
+ http,
+ postproc,
+ uri=d["uri"],
+ method=d["method"],
+ body=d["body"],
+ headers=d["headers"],
+ methodId=d["methodId"],
+ resumable=d["resumable"],
+ )
+
+ @staticmethod
+ def null_postproc(resp, contents):
+ return resp, contents
+
+
+class BatchHttpRequest(object):
+ """Batches multiple HttpRequest objects into a single HTTP request.
+
+ Example:
+ from googleapiclient.http import BatchHttpRequest
+
+ def list_animals(request_id, response, exception):
+ \"\"\"Do something with the animals list response.\"\"\"
+ if exception is not None:
+ # Do something with the exception.
+ pass
+ else:
+ # Do something with the response.
+ pass
+
+ def list_farmers(request_id, response, exception):
+ \"\"\"Do something with the farmers list response.\"\"\"
+ if exception is not None:
+ # Do something with the exception.
+ pass
+ else:
+ # Do something with the response.
+ pass
+
+ service = build('farm', 'v2')
+
+ batch = BatchHttpRequest()
+
+ batch.add(service.animals().list(), list_animals)
+ batch.add(service.farmers().list(), list_farmers)
+ batch.execute(http=http)
+ """
+
+ @util.positional(1)
+ def __init__(self, callback=None, batch_uri=None):
+ """Constructor for a BatchHttpRequest.
+
+ Args:
+ callback: callable, A callback to be called for each response, of the
+ form callback(id, response, exception). The first parameter is the
+ request id, and the second is the deserialized response object. The
+ third is an googleapiclient.errors.HttpError exception object if an HTTP error
+ occurred while processing the request, or None if no error occurred.
+ batch_uri: string, URI to send batch requests to.
+ """
+ if batch_uri is None:
+ batch_uri = _LEGACY_BATCH_URI
+
+ if batch_uri == _LEGACY_BATCH_URI:
+ LOGGER.warning(
+ "You have constructed a BatchHttpRequest using the legacy batch "
+ "endpoint %s. This endpoint will be turned down on August 12, 2020. "
+ "Please provide the API-specific endpoint or use "
+ "service.new_batch_http_request(). For more details see "
+ "https://developers.googleblog.com/2018/03/discontinuing-support-for-json-rpc-and.html"
+ "and https://developers.google.com/api-client-library/python/guide/batch.",
+ _LEGACY_BATCH_URI,
+ )
+ self._batch_uri = batch_uri
+
+ # Global callback to be called for each individual response in the batch.
+ self._callback = callback
+
+ # A map from id to request.
+ self._requests = {}
+
+ # A map from id to callback.
+ self._callbacks = {}
+
+ # List of request ids, in the order in which they were added.
+ self._order = []
+
+ # The last auto generated id.
+ self._last_auto_id = 0
+
+ # Unique ID on which to base the Content-ID headers.
+ self._base_id = None
+
+ # A map from request id to (httplib2.Response, content) response pairs
+ self._responses = {}
+
+ # A map of id(Credentials) that have been refreshed.
+ self._refreshed_credentials = {}
+
+ def _refresh_and_apply_credentials(self, request, http):
+ """Refresh the credentials and apply to the request.
+
+ Args:
+ request: HttpRequest, the request.
+ http: httplib2.Http, the global http object for the batch.
+ """
+ # For the credentials to refresh, but only once per refresh_token
+ # If there is no http per the request then refresh the http passed in
+ # via execute()
+ creds = None
+ request_credentials = False
+
+ if request.http is not None:
+ creds = _auth.get_credentials_from_http(request.http)
+ request_credentials = True
+
+ if creds is None and http is not None:
+ creds = _auth.get_credentials_from_http(http)
+
+ if creds is not None:
+ if id(creds) not in self._refreshed_credentials:
+ _auth.refresh_credentials(creds)
+ self._refreshed_credentials[id(creds)] = 1
+
+ # Only apply the credentials if we are using the http object passed in,
+ # otherwise apply() will get called during _serialize_request().
+ if request.http is None or not request_credentials:
+ _auth.apply_credentials(creds, request.headers)
+
+ def _id_to_header(self, id_):
+ """Convert an id to a Content-ID header value.
+
+ Args:
+ id_: string, identifier of individual request.
+
+ Returns:
+ A Content-ID header with the id_ encoded into it. A UUID is prepended to
+ the value because Content-ID headers are supposed to be universally
+ unique.
+ """
+ if self._base_id is None:
+ self._base_id = uuid.uuid4()
+
+ # NB: we intentionally leave whitespace between base/id and '+', so RFC2822
+ # line folding works properly on Python 3; see
+ # https://github.com/googleapis/google-api-python-client/issues/164
+ return "<%s + %s>" % (self._base_id, quote(id_))
+
+ def _header_to_id(self, header):
+ """Convert a Content-ID header value to an id.
+
+ Presumes the Content-ID header conforms to the format that _id_to_header()
+ returns.
+
+ Args:
+ header: string, Content-ID header value.
+
+ Returns:
+ The extracted id value.
+
+ Raises:
+ BatchError if the header is not in the expected format.
+ """
+ if header[0] != "<" or header[-1] != ">":
+ raise BatchError("Invalid value for Content-ID: %s" % header)
+ if "+" not in header:
+ raise BatchError("Invalid value for Content-ID: %s" % header)
+ base, id_ = header[1:-1].split(" + ", 1)
+
+ return unquote(id_)
+
+ def _serialize_request(self, request):
+ """Convert an HttpRequest object into a string.
+
+ Args:
+ request: HttpRequest, the request to serialize.
+
+ Returns:
+ The request as a string in application/http format.
+ """
+ # Construct status line
+ parsed = urlparse(request.uri)
+ request_line = urlunparse(
+ ("", "", parsed.path, parsed.params, parsed.query, "")
+ )
+ status_line = request.method + " " + request_line + " HTTP/1.1\n"
+ major, minor = request.headers.get("content-type", "application/json").split(
+ "/"
+ )
+ msg = MIMENonMultipart(major, minor)
+ headers = request.headers.copy()
+
+ if request.http is not None:
+ credentials = _auth.get_credentials_from_http(request.http)
+ if credentials is not None:
+ _auth.apply_credentials(credentials, headers)
+
+ # MIMENonMultipart adds its own Content-Type header.
+ if "content-type" in headers:
+ del headers["content-type"]
+
+ for key, value in six.iteritems(headers):
+ msg[key] = value
+ msg["Host"] = parsed.netloc
+ msg.set_unixfrom(None)
+
+ if request.body is not None:
+ msg.set_payload(request.body)
+ msg["content-length"] = str(len(request.body))
+
+ # Serialize the mime message.
+ fp = StringIO()
+ # maxheaderlen=0 means don't line wrap headers.
+ g = Generator(fp, maxheaderlen=0)
+ g.flatten(msg, unixfrom=False)
+ body = fp.getvalue()
+
+ return status_line + body
+
+ def _deserialize_response(self, payload):
+ """Convert string into httplib2 response and content.
+
+ Args:
+ payload: string, headers and body as a string.
+
+ Returns:
+ A pair (resp, content), such as would be returned from httplib2.request.
+ """
+ # Strip off the status line
+ status_line, payload = payload.split("\n", 1)
+ protocol, status, reason = status_line.split(" ", 2)
+
+ # Parse the rest of the response
+ parser = FeedParser()
+ parser.feed(payload)
+ msg = parser.close()
+ msg["status"] = status
+
+ # Create httplib2.Response from the parsed headers.
+ resp = httplib2.Response(msg)
+ resp.reason = reason
+ resp.version = int(protocol.split("/", 1)[1].replace(".", ""))
+
+ content = payload.split("\r\n\r\n", 1)[1]
+
+ return resp, content
+
+ def _new_id(self):
+ """Create a new id.
+
+ Auto incrementing number that avoids conflicts with ids already used.
+
+ Returns:
+ string, a new unique id.
+ """
+ self._last_auto_id += 1
+ while str(self._last_auto_id) in self._requests:
+ self._last_auto_id += 1
+ return str(self._last_auto_id)
+
+ @util.positional(2)
+ def add(self, request, callback=None, request_id=None):
+ """Add a new request.
+
+ Every callback added will be paired with a unique id, the request_id. That
+ unique id will be passed back to the callback when the response comes back
+ from the server. The default behavior is to have the library generate it's
+ own unique id. If the caller passes in a request_id then they must ensure
+ uniqueness for each request_id, and if they are not an exception is
+ raised. Callers should either supply all request_ids or never supply a
+ request id, to avoid such an error.
+
+ Args:
+ request: HttpRequest, Request to add to the batch.
+ callback: callable, A callback to be called for this response, of the
+ form callback(id, response, exception). The first parameter is the
+ request id, and the second is the deserialized response object. The
+ third is an googleapiclient.errors.HttpError exception object if an HTTP error
+ occurred while processing the request, or None if no errors occurred.
+ request_id: string, A unique id for the request. The id will be passed
+ to the callback with the response.
+
+ Returns:
+ None
+
+ Raises:
+ BatchError if a media request is added to a batch.
+ KeyError is the request_id is not unique.
+ """
+
+ if len(self._order) >= MAX_BATCH_LIMIT:
+ raise BatchError(
+ "Exceeded the maximum calls(%d) in a single batch request."
+ % MAX_BATCH_LIMIT
+ )
+ if request_id is None:
+ request_id = self._new_id()
+ if request.resumable is not None:
+ raise BatchError("Media requests cannot be used in a batch request.")
+ if request_id in self._requests:
+ raise KeyError("A request with this ID already exists: %s" % request_id)
+ self._requests[request_id] = request
+ self._callbacks[request_id] = callback
+ self._order.append(request_id)
+
+ def _execute(self, http, order, requests):
+ """Serialize batch request, send to server, process response.
+
+ Args:
+ http: httplib2.Http, an http object to be used to make the request with.
+ order: list, list of request ids in the order they were added to the
+ batch.
+ requests: list, list of request objects to send.
+
+ Raises:
+ httplib2.HttpLib2Error if a transport error has occurred.
+ googleapiclient.errors.BatchError if the response is the wrong format.
+ """
+ message = MIMEMultipart("mixed")
+ # Message should not write out it's own headers.
+ setattr(message, "_write_headers", lambda self: None)
+
+ # Add all the individual requests.
+ for request_id in order:
+ request = requests[request_id]
+
+ msg = MIMENonMultipart("application", "http")
+ msg["Content-Transfer-Encoding"] = "binary"
+ msg["Content-ID"] = self._id_to_header(request_id)
+
+ body = self._serialize_request(request)
+ msg.set_payload(body)
+ message.attach(msg)
+
+ # encode the body: note that we can't use `as_string`, because
+ # it plays games with `From ` lines.
+ fp = StringIO()
+ g = Generator(fp, mangle_from_=False)
+ g.flatten(message, unixfrom=False)
+ body = fp.getvalue()
+
+ headers = {}
+ headers["content-type"] = (
+ "multipart/mixed; " 'boundary="%s"'
+ ) % message.get_boundary()
+
+ resp, content = http.request(
+ self._batch_uri, method="POST", body=body, headers=headers
+ )
+
+ if resp.status >= 300:
+ raise HttpError(resp, content, uri=self._batch_uri)
+
+ # Prepend with a content-type header so FeedParser can handle it.
+ header = "content-type: %s\r\n\r\n" % resp["content-type"]
+ # PY3's FeedParser only accepts unicode. So we should decode content
+ # here, and encode each payload again.
+ if six.PY3:
+ content = content.decode("utf-8")
+ for_parser = header + content
+
+ parser = FeedParser()
+ parser.feed(for_parser)
+ mime_response = parser.close()
+
+ if not mime_response.is_multipart():
+ raise BatchError(
+ "Response not in multipart/mixed format.", resp=resp, content=content
+ )
+
+ for part in mime_response.get_payload():
+ request_id = self._header_to_id(part["Content-ID"])
+ response, content = self._deserialize_response(part.get_payload())
+ # We encode content here to emulate normal http response.
+ if isinstance(content, six.text_type):
+ content = content.encode("utf-8")
+ self._responses[request_id] = (response, content)
+
+ @util.positional(1)
+ def execute(self, http=None):
+ """Execute all the requests as a single batched HTTP request.
+
+ Args:
+ http: httplib2.Http, an http object to be used in place of the one the
+ HttpRequest request object was constructed with. If one isn't supplied
+ then use a http object from the requests in this batch.
+
+ Returns:
+ None
+
+ Raises:
+ httplib2.HttpLib2Error if a transport error has occurred.
+ googleapiclient.errors.BatchError if the response is the wrong format.
+ """
+ # If we have no requests return
+ if len(self._order) == 0:
+ return None
+
+ # If http is not supplied use the first valid one given in the requests.
+ if http is None:
+ for request_id in self._order:
+ request = self._requests[request_id]
+ if request is not None:
+ http = request.http
+ break
+
+ if http is None:
+ raise ValueError("Missing a valid http object.")
+
+ # Special case for OAuth2Credentials-style objects which have not yet been
+ # refreshed with an initial access_token.
+ creds = _auth.get_credentials_from_http(http)
+ if creds is not None:
+ if not _auth.is_valid(creds):
+ LOGGER.info("Attempting refresh to obtain initial access_token")
+ _auth.refresh_credentials(creds)
+
+ self._execute(http, self._order, self._requests)
+
+ # Loop over all the requests and check for 401s. For each 401 request the
+ # credentials should be refreshed and then sent again in a separate batch.
+ redo_requests = {}
+ redo_order = []
+
+ for request_id in self._order:
+ resp, content = self._responses[request_id]
+ if resp["status"] == "401":
+ redo_order.append(request_id)
+ request = self._requests[request_id]
+ self._refresh_and_apply_credentials(request, http)
+ redo_requests[request_id] = request
+
+ if redo_requests:
+ self._execute(http, redo_order, redo_requests)
+
+ # Now process all callbacks that are erroring, and raise an exception for
+ # ones that return a non-2xx response? Or add extra parameter to callback
+ # that contains an HttpError?
+
+ for request_id in self._order:
+ resp, content = self._responses[request_id]
+
+ request = self._requests[request_id]
+ callback = self._callbacks[request_id]
+
+ response = None
+ exception = None
+ try:
+ if resp.status >= 300:
+ raise HttpError(resp, content, uri=request.uri)
+ response = request.postproc(resp, content)
+ except HttpError as e:
+ exception = e
+
+ if callback is not None:
+ callback(request_id, response, exception)
+ if self._callback is not None:
+ self._callback(request_id, response, exception)
+
+
+class HttpRequestMock(object):
+ """Mock of HttpRequest.
+
+ Do not construct directly, instead use RequestMockBuilder.
+ """
+
+ def __init__(self, resp, content, postproc):
+ """Constructor for HttpRequestMock
+
+ Args:
+ resp: httplib2.Response, the response to emulate coming from the request
+ content: string, the response body
+ postproc: callable, the post processing function usually supplied by
+ the model class. See model.JsonModel.response() as an example.
+ """
+ self.resp = resp
+ self.content = content
+ self.postproc = postproc
+ if resp is None:
+ self.resp = httplib2.Response({"status": 200, "reason": "OK"})
+ if "reason" in self.resp:
+ self.resp.reason = self.resp["reason"]
+
+ def execute(self, http=None):
+ """Execute the request.
+
+ Same behavior as HttpRequest.execute(), but the response is
+ mocked and not really from an HTTP request/response.
+ """
+ return self.postproc(self.resp, self.content)
+
+
+class RequestMockBuilder(object):
+ """A simple mock of HttpRequest
+
+ Pass in a dictionary to the constructor that maps request methodIds to
+ tuples of (httplib2.Response, content, opt_expected_body) that should be
+ returned when that method is called. None may also be passed in for the
+ httplib2.Response, in which case a 200 OK response will be generated.
+ If an opt_expected_body (str or dict) is provided, it will be compared to
+ the body and UnexpectedBodyError will be raised on inequality.
+
+ Example:
+ response = '{"data": {"id": "tag:google.c...'
+ requestBuilder = RequestMockBuilder(
+ {
+ 'plus.activities.get': (None, response),
+ }
+ )
+ googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
+
+ Methods that you do not supply a response for will return a
+ 200 OK with an empty string as the response content or raise an excpetion
+ if check_unexpected is set to True. The methodId is taken from the rpcName
+ in the discovery document.
+
+ For more details see the project wiki.
+ """
+
+ def __init__(self, responses, check_unexpected=False):
+ """Constructor for RequestMockBuilder
+
+ The constructed object should be a callable object
+ that can replace the class HttpResponse.
+
+ responses - A dictionary that maps methodIds into tuples
+ of (httplib2.Response, content). The methodId
+ comes from the 'rpcName' field in the discovery
+ document.
+ check_unexpected - A boolean setting whether or not UnexpectedMethodError
+ should be raised on unsupplied method.
+ """
+ self.responses = responses
+ self.check_unexpected = check_unexpected
+
+ def __call__(
+ self,
+ http,
+ postproc,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ methodId=None,
+ resumable=None,
+ ):
+ """Implements the callable interface that discovery.build() expects
+ of requestBuilder, which is to build an object compatible with
+ HttpRequest.execute(). See that method for the description of the
+ parameters and the expected response.
+ """
+ if methodId in self.responses:
+ response = self.responses[methodId]
+ resp, content = response[:2]
+ if len(response) > 2:
+ # Test the body against the supplied expected_body.
+ expected_body = response[2]
+ if bool(expected_body) != bool(body):
+ # Not expecting a body and provided one
+ # or expecting a body and not provided one.
+ raise UnexpectedBodyError(expected_body, body)
+ if isinstance(expected_body, str):
+ expected_body = json.loads(expected_body)
+ body = json.loads(body)
+ if body != expected_body:
+ raise UnexpectedBodyError(expected_body, body)
+ return HttpRequestMock(resp, content, postproc)
+ elif self.check_unexpected:
+ raise UnexpectedMethodError(methodId=methodId)
+ else:
+ model = JsonModel(False)
+ return HttpRequestMock(None, "{}", model.response)
+
+
+class HttpMock(object):
+ """Mock of httplib2.Http"""
+
+ def __init__(self, filename=None, headers=None):
+ """
+ Args:
+ filename: string, absolute filename to read response from
+ headers: dict, header to return with response
+ """
+ if headers is None:
+ headers = {"status": "200"}
+ if filename:
+ with open(filename, "rb") as f:
+ self.data = f.read()
+ else:
+ self.data = None
+ self.response_headers = headers
+ self.headers = None
+ self.uri = None
+ self.method = None
+ self.body = None
+ self.headers = None
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=1,
+ connection_type=None,
+ ):
+ self.uri = uri
+ self.method = method
+ self.body = body
+ self.headers = headers
+ return httplib2.Response(self.response_headers), self.data
+
+ def close(self):
+ return None
+
+class HttpMockSequence(object):
+ """Mock of httplib2.Http
+
+ Mocks a sequence of calls to request returning different responses for each
+ call. Create an instance initialized with the desired response headers
+ and content and then use as if an httplib2.Http instance.
+
+ http = HttpMockSequence([
+ ({'status': '401'}, ''),
+ ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
+ ({'status': '200'}, 'echo_request_headers'),
+ ])
+ resp, content = http.request("http://examples.com")
+
+ There are special values you can pass in for content to trigger
+ behavours that are helpful in testing.
+
+ 'echo_request_headers' means return the request headers in the response body
+ 'echo_request_headers_as_json' means return the request headers in
+ the response body
+ 'echo_request_body' means return the request body in the response body
+ 'echo_request_uri' means return the request uri in the response body
+ """
+
+ def __init__(self, iterable):
+ """
+ Args:
+ iterable: iterable, a sequence of pairs of (headers, body)
+ """
+ self._iterable = iterable
+ self.follow_redirects = True
+ self.request_sequence = list()
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=1,
+ connection_type=None,
+ ):
+ # Remember the request so after the fact this mock can be examined
+ self.request_sequence.append((uri, method, body, headers))
+ resp, content = self._iterable.pop(0)
+ content = six.ensure_binary(content)
+
+ if content == b"echo_request_headers":
+ content = headers
+ elif content == b"echo_request_headers_as_json":
+ content = json.dumps(headers)
+ elif content == b"echo_request_body":
+ if hasattr(body, "read"):
+ content = body.read()
+ else:
+ content = body
+ elif content == b"echo_request_uri":
+ content = uri
+ if isinstance(content, six.text_type):
+ content = content.encode("utf-8")
+ return httplib2.Response(resp), content
+
+
+def set_user_agent(http, user_agent):
+ """Set the user-agent on every request.
+
+ Args:
+ http - An instance of httplib2.Http
+ or something that acts like it.
+ user_agent: string, the value for the user-agent header.
+
+ Returns:
+ A modified instance of http that was passed in.
+
+ Example:
+
+ h = httplib2.Http()
+ h = set_user_agent(h, "my-app-name/6.0")
+
+ Most of the time the user-agent will be set doing auth, this is for the rare
+ cases where you are accessing an unauthenticated endpoint.
+ """
+ request_orig = http.request
+
+ # The closure that will replace 'httplib2.Http.request'.
+ def new_request(
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ ):
+ """Modify the request headers to add the user-agent."""
+ if headers is None:
+ headers = {}
+ if "user-agent" in headers:
+ headers["user-agent"] = user_agent + " " + headers["user-agent"]
+ else:
+ headers["user-agent"] = user_agent
+ resp, content = request_orig(
+ uri,
+ method=method,
+ body=body,
+ headers=headers,
+ redirections=redirections,
+ connection_type=connection_type,
+ )
+ return resp, content
+
+ http.request = new_request
+ return http
+
+
+def tunnel_patch(http):
+ """Tunnel PATCH requests over POST.
+ Args:
+ http - An instance of httplib2.Http
+ or something that acts like it.
+
+ Returns:
+ A modified instance of http that was passed in.
+
+ Example:
+
+ h = httplib2.Http()
+ h = tunnel_patch(h, "my-app-name/6.0")
+
+ Useful if you are running on a platform that doesn't support PATCH.
+ Apply this last if you are using OAuth 1.0, as changing the method
+ will result in a different signature.
+ """
+ request_orig = http.request
+
+ # The closure that will replace 'httplib2.Http.request'.
+ def new_request(
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ ):
+ """Modify the request headers to add the user-agent."""
+ if headers is None:
+ headers = {}
+ if method == "PATCH":
+ if "oauth_token" in headers.get("authorization", ""):
+ LOGGER.warning(
+ "OAuth 1.0 request made with Credentials after tunnel_patch."
+ )
+ headers["x-http-method-override"] = "PATCH"
+ method = "POST"
+ resp, content = request_orig(
+ uri,
+ method=method,
+ body=body,
+ headers=headers,
+ redirections=redirections,
+ connection_type=connection_type,
+ )
+ return resp, content
+
+ http.request = new_request
+ return http
+
+
+def build_http():
+ """Builds httplib2.Http object
+
+ Returns:
+ A httplib2.Http object, which is used to make http requests, and which has timeout set by default.
+ To override default timeout call
+
+ socket.setdefaulttimeout(timeout_in_sec)
+
+ before interacting with this method.
+ """
+ if socket.getdefaulttimeout() is not None:
+ http_timeout = socket.getdefaulttimeout()
+ else:
+ http_timeout = DEFAULT_HTTP_TIMEOUT_SEC
+ http = httplib2.Http(timeout=http_timeout)
+ # 308's are used by several Google APIs (Drive, YouTube)
+ # for Resumable Uploads rather than Permanent Redirects.
+ # This asks httplib2 to exclude 308s from the status codes
+ # it treats as redirects
+ try:
+ http.redirect_codes = http.redirect_codes - {308}
+ except AttributeError:
+ # Apache Beam tests depend on this library and cannot
+ # currently upgrade their httplib2 version
+ # http.redirect_codes does not exist in previous versions
+ # of httplib2, so pass
+ pass
+
+ return http
diff --git a/venv/Lib/site-packages/googleapiclient/mimeparse.py b/venv/Lib/site-packages/googleapiclient/mimeparse.py
new file mode 100644
index 000000000..6051628f3
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/mimeparse.py
@@ -0,0 +1,183 @@
+# Copyright 2014 Joe Gregorio
+#
+# Licensed under the MIT License
+
+"""MIME-Type Parser
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of the
+HTTP specification [RFC 2616] for a complete explanation.
+
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Contents:
+ - parse_mime_type(): Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
+ quality parameter.
+ - quality(): Determines the quality ('q') of a mime-type when
+ compared against a list of media-ranges.
+ - quality_parsed(): Just like quality() except the second parameter must be
+ pre-parsed.
+ - best_match(): Choose the mime-type with the highest quality ('q')
+ from a list of candidates.
+"""
+from __future__ import absolute_import
+from functools import reduce
+import six
+
+__version__ = "0.1.3"
+__author__ = "Joe Gregorio"
+__email__ = "joe@bitworking.org"
+__license__ = "MIT License"
+__credits__ = ""
+
+
+def parse_mime_type(mime_type):
+ """Parses a mime-type into its component parts.
+
+ Carves up a mime-type and returns a tuple of the (type, subtype, params)
+ where 'params' is a dictionary of all the parameters for the media range.
+ For example, the media range 'application/xhtml;q=0.5' would get parsed
+ into:
+
+ ('application', 'xhtml', {'q', '0.5'})
+ """
+ parts = mime_type.split(";")
+ params = dict(
+ [tuple([s.strip() for s in param.split("=", 1)]) for param in parts[1:]]
+ )
+ full_type = parts[0].strip()
+ # Java URLConnection class sends an Accept header that includes a
+ # single '*'. Turn it into a legal wildcard.
+ if full_type == "*":
+ full_type = "*/*"
+ (type, subtype) = full_type.split("/")
+
+ return (type.strip(), subtype.strip(), params)
+
+
+def parse_media_range(range):
+ """Parse a media-range into its component parts.
+
+ Carves up a media range and returns a tuple of the (type, subtype,
+ params) where 'params' is a dictionary of all the parameters for the media
+ range. For example, the media range 'application/*;q=0.5' would get parsed
+ into:
+
+ ('application', '*', {'q', '0.5'})
+
+ In addition this function also guarantees that there is a value for 'q'
+ in the params dictionary, filling it in with a proper default if
+ necessary.
+ """
+ (type, subtype, params) = parse_mime_type(range)
+ if (
+ "q" not in params
+ or not params["q"]
+ or not float(params["q"])
+ or float(params["q"]) > 1
+ or float(params["q"]) < 0
+ ):
+ params["q"] = "1"
+
+ return (type, subtype, params)
+
+
+def fitness_and_quality_parsed(mime_type, parsed_ranges):
+ """Find the best match for a mime-type amongst parsed media-ranges.
+
+ Find the best match for a given mime-type against a list of media_ranges
+ that have already been parsed by parse_media_range(). Returns a tuple of
+ the fitness value and the value of the 'q' quality parameter of the best
+ match, or (-1, 0) if no match was found. Just as for quality_parsed(),
+ 'parsed_ranges' must be a list of parsed media ranges.
+ """
+ best_fitness = -1
+ best_fit_q = 0
+ (target_type, target_subtype, target_params) = parse_media_range(mime_type)
+ for (type, subtype, params) in parsed_ranges:
+ type_match = type == target_type or type == "*" or target_type == "*"
+ subtype_match = (
+ subtype == target_subtype or subtype == "*" or target_subtype == "*"
+ )
+ if type_match and subtype_match:
+ param_matches = reduce(
+ lambda x, y: x + y,
+ [
+ 1
+ for (key, value) in six.iteritems(target_params)
+ if key != "q" and key in params and value == params[key]
+ ],
+ 0,
+ )
+ fitness = (type == target_type) and 100 or 0
+ fitness += (subtype == target_subtype) and 10 or 0
+ fitness += param_matches
+ if fitness > best_fitness:
+ best_fitness = fitness
+ best_fit_q = params["q"]
+
+ return best_fitness, float(best_fit_q)
+
+
+def quality_parsed(mime_type, parsed_ranges):
+ """Find the best match for a mime-type amongst parsed media-ranges.
+
+ Find the best match for a given mime-type against a list of media_ranges
+ that have already been parsed by parse_media_range(). Returns the 'q'
+ quality parameter of the best match, 0 if no match was found. This function
+ bahaves the same as quality() except that 'parsed_ranges' must be a list of
+ parsed media ranges.
+ """
+
+ return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+
+
+def quality(mime_type, ranges):
+ """Return the quality ('q') of a mime-type against a list of media-ranges.
+
+ Returns the quality 'q' of a mime-type when compared against the
+ media-ranges in ranges. For example:
+
+ >>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
+ text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
+ 0.7
+
+ """
+ parsed_ranges = [parse_media_range(r) for r in ranges.split(",")]
+
+ return quality_parsed(mime_type, parsed_ranges)
+
+
+def best_match(supported, header):
+ """Return mime-type with the highest quality ('q') from list of candidates.
+
+ Takes a list of supported mime-types and finds the best match for all the
+ media-ranges listed in header. The value of header must be a string that
+ conforms to the format of the HTTP Accept: header. The value of 'supported'
+ is a list of mime-types. The list of supported mime-types should be sorted
+ in order of increasing desirability, in case of a situation where there is
+ a tie.
+
+ >>> best_match(['application/xbel+xml', 'text/xml'],
+ 'text/*;q=0.5,*/*; q=0.1')
+ 'text/xml'
+ """
+ split_header = _filter_blank(header.split(","))
+ parsed_header = [parse_media_range(r) for r in split_header]
+ weighted_matches = []
+ pos = 0
+ for mime_type in supported:
+ weighted_matches.append(
+ (fitness_and_quality_parsed(mime_type, parsed_header), pos, mime_type)
+ )
+ pos += 1
+ weighted_matches.sort()
+
+ return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ""
+
+
+def _filter_blank(i):
+ for s in i:
+ if s.strip():
+ yield s
diff --git a/venv/Lib/site-packages/googleapiclient/model.py b/venv/Lib/site-packages/googleapiclient/model.py
new file mode 100644
index 000000000..f58549c49
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/model.py
@@ -0,0 +1,407 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Model objects for requests and responses.
+
+Each API may support one or more serializations, such
+as JSON, Atom, etc. The model classes are responsible
+for converting between the wire format and the Python
+object representation.
+"""
+from __future__ import absolute_import
+import six
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+import json
+import logging
+import platform
+import pkg_resources
+
+from six.moves.urllib.parse import urlencode
+
+from googleapiclient.errors import HttpError
+
+_LIBRARY_VERSION = pkg_resources.get_distribution("google-api-python-client").version
+_PY_VERSION = platform.python_version()
+
+LOGGER = logging.getLogger(__name__)
+
+dump_request_response = False
+
+
+def _abstract():
+ raise NotImplementedError("You need to override this function")
+
+
+class Model(object):
+ """Model base class.
+
+ All Model classes should implement this interface.
+ The Model serializes and de-serializes between a wire
+ format such as JSON and a Python object representation.
+ """
+
+ def request(self, headers, path_params, query_params, body_value):
+ """Updates outgoing requests with a serialized body.
+
+ Args:
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query_params: dict, parameters that appear in the query
+ body_value: object, the request body as a Python object, which must be
+ serializable.
+ Returns:
+ A tuple of (headers, path_params, query, body)
+
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query: string, query part of the request URI
+ body: string, the body serialized in the desired wire format.
+ """
+ _abstract()
+
+ def response(self, resp, content):
+ """Convert the response wire format into a Python object.
+
+ Args:
+ resp: httplib2.Response, the HTTP response headers and status
+ content: string, the body of the HTTP response
+
+ Returns:
+ The body de-serialized as a Python object.
+
+ Raises:
+ googleapiclient.errors.HttpError if a non 2xx response is received.
+ """
+ _abstract()
+
+
+class BaseModel(Model):
+ """Base model class.
+
+ Subclasses should provide implementations for the "serialize" and
+ "deserialize" methods, as well as values for the following class attributes.
+
+ Attributes:
+ accept: The value to use for the HTTP Accept header.
+ content_type: The value to use for the HTTP Content-type header.
+ no_content_response: The value to return when deserializing a 204 "No
+ Content" response.
+ alt_param: The value to supply as the "alt" query parameter for requests.
+ """
+
+ accept = None
+ content_type = None
+ no_content_response = None
+ alt_param = None
+
+ def _log_request(self, headers, path_params, query, body):
+ """Logs debugging information about the request if requested."""
+ if dump_request_response:
+ LOGGER.info("--request-start--")
+ LOGGER.info("-headers-start-")
+ for h, v in six.iteritems(headers):
+ LOGGER.info("%s: %s", h, v)
+ LOGGER.info("-headers-end-")
+ LOGGER.info("-path-parameters-start-")
+ for h, v in six.iteritems(path_params):
+ LOGGER.info("%s: %s", h, v)
+ LOGGER.info("-path-parameters-end-")
+ LOGGER.info("body: %s", body)
+ LOGGER.info("query: %s", query)
+ LOGGER.info("--request-end--")
+
+ def request(self, headers, path_params, query_params, body_value):
+ """Updates outgoing requests with a serialized body.
+
+ Args:
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query_params: dict, parameters that appear in the query
+ body_value: object, the request body as a Python object, which must be
+ serializable by json.
+ Returns:
+ A tuple of (headers, path_params, query, body)
+
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query: string, query part of the request URI
+ body: string, the body serialized as JSON
+ """
+ query = self._build_query(query_params)
+ headers["accept"] = self.accept
+ headers["accept-encoding"] = "gzip, deflate"
+ if "user-agent" in headers:
+ headers["user-agent"] += " "
+ else:
+ headers["user-agent"] = ""
+ headers["user-agent"] += "(gzip)"
+ if "x-goog-api-client" in headers:
+ headers["x-goog-api-client"] += " "
+ else:
+ headers["x-goog-api-client"] = ""
+ headers["x-goog-api-client"] += "gdcl/%s gl-python/%s" % (
+ _LIBRARY_VERSION,
+ _PY_VERSION,
+ )
+
+ if body_value is not None:
+ headers["content-type"] = self.content_type
+ body_value = self.serialize(body_value)
+ self._log_request(headers, path_params, query, body_value)
+ return (headers, path_params, query, body_value)
+
+ def _build_query(self, params):
+ """Builds a query string.
+
+ Args:
+ params: dict, the query parameters
+
+ Returns:
+ The query parameters properly encoded into an HTTP URI query string.
+ """
+ if self.alt_param is not None:
+ params.update({"alt": self.alt_param})
+ astuples = []
+ for key, value in six.iteritems(params):
+ if type(value) == type([]):
+ for x in value:
+ x = x.encode("utf-8")
+ astuples.append((key, x))
+ else:
+ if isinstance(value, six.text_type) and callable(value.encode):
+ value = value.encode("utf-8")
+ astuples.append((key, value))
+ return "?" + urlencode(astuples)
+
+ def _log_response(self, resp, content):
+ """Logs debugging information about the response if requested."""
+ if dump_request_response:
+ LOGGER.info("--response-start--")
+ for h, v in six.iteritems(resp):
+ LOGGER.info("%s: %s", h, v)
+ if content:
+ LOGGER.info(content)
+ LOGGER.info("--response-end--")
+
+ def response(self, resp, content):
+ """Convert the response wire format into a Python object.
+
+ Args:
+ resp: httplib2.Response, the HTTP response headers and status
+ content: string, the body of the HTTP response
+
+ Returns:
+ The body de-serialized as a Python object.
+
+ Raises:
+ googleapiclient.errors.HttpError if a non 2xx response is received.
+ """
+ self._log_response(resp, content)
+ # Error handling is TBD, for example, do we retry
+ # for some operation/error combinations?
+ if resp.status < 300:
+ if resp.status == 204:
+ # A 204: No Content response should be treated differently
+ # to all the other success states
+ return self.no_content_response
+ return self.deserialize(content)
+ else:
+ LOGGER.debug("Content from bad request was: %r" % content)
+ raise HttpError(resp, content)
+
+ def serialize(self, body_value):
+ """Perform the actual Python object serialization.
+
+ Args:
+ body_value: object, the request body as a Python object.
+
+ Returns:
+ string, the body in serialized form.
+ """
+ _abstract()
+
+ def deserialize(self, content):
+ """Perform the actual deserialization from response string to Python
+ object.
+
+ Args:
+ content: string, the body of the HTTP response
+
+ Returns:
+ The body de-serialized as a Python object.
+ """
+ _abstract()
+
+
+class JsonModel(BaseModel):
+ """Model class for JSON.
+
+ Serializes and de-serializes between JSON and the Python
+ object representation of HTTP request and response bodies.
+ """
+
+ accept = "application/json"
+ content_type = "application/json"
+ alt_param = "json"
+
+ def __init__(self, data_wrapper=False):
+ """Construct a JsonModel.
+
+ Args:
+ data_wrapper: boolean, wrap requests and responses in a data wrapper
+ """
+ self._data_wrapper = data_wrapper
+
+ def serialize(self, body_value):
+ if (
+ isinstance(body_value, dict)
+ and "data" not in body_value
+ and self._data_wrapper
+ ):
+ body_value = {"data": body_value}
+ return json.dumps(body_value)
+
+ def deserialize(self, content):
+ try:
+ content = content.decode("utf-8")
+ except AttributeError:
+ pass
+ body = json.loads(content)
+ if self._data_wrapper and isinstance(body, dict) and "data" in body:
+ body = body["data"]
+ return body
+
+ @property
+ def no_content_response(self):
+ return {}
+
+
+class RawModel(JsonModel):
+ """Model class for requests that don't return JSON.
+
+ Serializes and de-serializes between JSON and the Python
+ object representation of HTTP request, and returns the raw bytes
+ of the response body.
+ """
+
+ accept = "*/*"
+ content_type = "application/json"
+ alt_param = None
+
+ def deserialize(self, content):
+ return content
+
+ @property
+ def no_content_response(self):
+ return ""
+
+
+class MediaModel(JsonModel):
+ """Model class for requests that return Media.
+
+ Serializes and de-serializes between JSON and the Python
+ object representation of HTTP request, and returns the raw bytes
+ of the response body.
+ """
+
+ accept = "*/*"
+ content_type = "application/json"
+ alt_param = "media"
+
+ def deserialize(self, content):
+ return content
+
+ @property
+ def no_content_response(self):
+ return ""
+
+
+class ProtocolBufferModel(BaseModel):
+ """Model class for protocol buffers.
+
+ Serializes and de-serializes the binary protocol buffer sent in the HTTP
+ request and response bodies.
+ """
+
+ accept = "application/x-protobuf"
+ content_type = "application/x-protobuf"
+ alt_param = "proto"
+
+ def __init__(self, protocol_buffer):
+ """Constructs a ProtocolBufferModel.
+
+ The serialized protocol buffer returned in an HTTP response will be
+ de-serialized using the given protocol buffer class.
+
+ Args:
+ protocol_buffer: The protocol buffer class used to de-serialize a
+ response from the API.
+ """
+ self._protocol_buffer = protocol_buffer
+
+ def serialize(self, body_value):
+ return body_value.SerializeToString()
+
+ def deserialize(self, content):
+ return self._protocol_buffer.FromString(content)
+
+ @property
+ def no_content_response(self):
+ return self._protocol_buffer()
+
+
+def makepatch(original, modified):
+ """Create a patch object.
+
+ Some methods support PATCH, an efficient way to send updates to a resource.
+ This method allows the easy construction of patch bodies by looking at the
+ differences between a resource before and after it was modified.
+
+ Args:
+ original: object, the original deserialized resource
+ modified: object, the modified deserialized resource
+ Returns:
+ An object that contains only the changes from original to modified, in a
+ form suitable to pass to a PATCH method.
+
+ Example usage:
+ item = service.activities().get(postid=postid, userid=userid).execute()
+ original = copy.deepcopy(item)
+ item['object']['content'] = 'This is updated.'
+ service.activities.patch(postid=postid, userid=userid,
+ body=makepatch(original, item)).execute()
+ """
+ patch = {}
+ for key, original_value in six.iteritems(original):
+ modified_value = modified.get(key, None)
+ if modified_value is None:
+ # Use None to signal that the element is deleted
+ patch[key] = None
+ elif original_value != modified_value:
+ if type(original_value) == type({}):
+ # Recursively descend objects
+ patch[key] = makepatch(original_value, modified_value)
+ else:
+ # In the case of simple types or arrays we just replace
+ patch[key] = modified_value
+ else:
+ # Don't add anything to patch if there's no change
+ pass
+ for key in modified:
+ if key not in original:
+ patch[key] = modified[key]
+
+ return patch
diff --git a/venv/Lib/site-packages/googleapiclient/sample_tools.py b/venv/Lib/site-packages/googleapiclient/sample_tools.py
new file mode 100644
index 000000000..2b6a21b26
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/sample_tools.py
@@ -0,0 +1,110 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for making samples.
+
+Consolidates a lot of code commonly repeated in sample applications.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+__all__ = ["init"]
+
+
+import argparse
+import os
+
+from googleapiclient import discovery
+from googleapiclient.http import build_http
+
+
+def init(
+ argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None
+):
+ """A common initialization routine for samples.
+
+ Many of the sample applications do the same initialization, which has now
+ been consolidated into this function. This function uses common idioms found
+ in almost all the samples, i.e. for an API with name 'apiname', the
+ credentials are stored in a file named apiname.dat, and the
+ client_secrets.json file is stored in the same directory as the application
+ main file.
+
+ Args:
+ argv: list of string, the command-line parameters of the application.
+ name: string, name of the API.
+ version: string, version of the API.
+ doc: string, description of the application. Usually set to __doc__.
+ file: string, filename of the application. Usually set to __file__.
+ parents: list of argparse.ArgumentParser, additional command-line flags.
+ scope: string, The OAuth scope used.
+ discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL.
+
+ Returns:
+ A tuple of (service, flags), where service is the service object and flags
+ is the parsed command-line flags.
+ """
+ try:
+ from oauth2client import client
+ from oauth2client import file
+ from oauth2client import tools
+ except ImportError:
+ raise ImportError(
+ "googleapiclient.sample_tools requires oauth2client. Please install oauth2client and try again."
+ )
+
+ if scope is None:
+ scope = "https://www.googleapis.com/auth/" + name
+
+ # Parser command-line arguments.
+ parent_parsers = [tools.argparser]
+ parent_parsers.extend(parents)
+ parser = argparse.ArgumentParser(
+ description=doc,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ parents=parent_parsers,
+ )
+ flags = parser.parse_args(argv[1:])
+
+ # Name of a file containing the OAuth 2.0 information for this
+ # application, including client_id and client_secret, which are found
+ # on the API Access tab on the Google APIs
+ # Console .
+ client_secrets = os.path.join(os.path.dirname(filename), "client_secrets.json")
+
+ # Set up a Flow object to be used if we need to authenticate.
+ flow = client.flow_from_clientsecrets(
+ client_secrets, scope=scope, message=tools.message_if_missing(client_secrets)
+ )
+
+ # Prepare credentials, and authorize HTTP object with them.
+ # If the credentials don't exist or are invalid run through the native client
+ # flow. The Storage object will ensure that if successful the good
+ # credentials will get written back to a file.
+ storage = file.Storage(name + ".dat")
+ credentials = storage.get()
+ if credentials is None or credentials.invalid:
+ credentials = tools.run_flow(flow, storage, flags)
+ http = credentials.authorize(http=build_http())
+
+ if discovery_filename is None:
+ # Construct a service object via the discovery service.
+ service = discovery.build(name, version, http=http)
+ else:
+ # Construct a service object using a local discovery document file.
+ with open(discovery_filename) as discovery_file:
+ service = discovery.build_from_document(
+ discovery_file.read(), base="https://www.googleapis.com/", http=http
+ )
+ return (service, flags)
diff --git a/venv/Lib/site-packages/googleapiclient/schema.py b/venv/Lib/site-packages/googleapiclient/schema.py
new file mode 100644
index 000000000..022cb0acf
--- /dev/null
+++ b/venv/Lib/site-packages/googleapiclient/schema.py
@@ -0,0 +1,315 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Schema processing for discovery based APIs
+
+Schemas holds an APIs discovery schemas. It can return those schema as
+deserialized JSON objects, or pretty print them as prototype objects that
+conform to the schema.
+
+For example, given the schema:
+
+ schema = \"\"\"{
+ "Foo": {
+ "type": "object",
+ "properties": {
+ "etag": {
+ "type": "string",
+ "description": "ETag of the collection."
+ },
+ "kind": {
+ "type": "string",
+ "description": "Type of the collection ('calendar#acl').",
+ "default": "calendar#acl"
+ },
+ "nextPageToken": {
+ "type": "string",
+ "description": "Token used to access the next
+ page of this result. Omitted if no further results are available."
+ }
+ }
+ }
+ }\"\"\"
+
+ s = Schemas(schema)
+ print s.prettyPrintByName('Foo')
+
+ Produces the following output:
+
+ {
+ "nextPageToken": "A String", # Token used to access the
+ # next page of this result. Omitted if no further results are available.
+ "kind": "A String", # Type of the collection ('calendar#acl').
+ "etag": "A String", # ETag of the collection.
+ },
+
+The constructor takes a discovery document in which to look up named schema.
+"""
+from __future__ import absolute_import
+import six
+
+# TODO(jcgregorio) support format, enum, minimum, maximum
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+import copy
+
+from googleapiclient import _helpers as util
+
+
+class Schemas(object):
+ """Schemas for an API."""
+
+ def __init__(self, discovery):
+ """Constructor.
+
+ Args:
+ discovery: object, Deserialized discovery document from which we pull
+ out the named schema.
+ """
+ self.schemas = discovery.get("schemas", {})
+
+ # Cache of pretty printed schemas.
+ self.pretty = {}
+
+ @util.positional(2)
+ def _prettyPrintByName(self, name, seen=None, dent=0):
+ """Get pretty printed object prototype from the schema name.
+
+ Args:
+ name: string, Name of schema in the discovery document.
+ seen: list of string, Names of schema already seen. Used to handle
+ recursive definitions.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ if seen is None:
+ seen = []
+
+ if name in seen:
+ # Do not fall into an infinite loop over recursive definitions.
+ return "# Object with schema name: %s" % name
+ seen.append(name)
+
+ if name not in self.pretty:
+ self.pretty[name] = _SchemaToStruct(
+ self.schemas[name], seen, dent=dent
+ ).to_str(self._prettyPrintByName)
+
+ seen.pop()
+
+ return self.pretty[name]
+
+ def prettyPrintByName(self, name):
+ """Get pretty printed object prototype from the schema name.
+
+ Args:
+ name: string, Name of schema in the discovery document.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ # Return with trailing comma and newline removed.
+ return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
+
+ @util.positional(2)
+ def _prettyPrintSchema(self, schema, seen=None, dent=0):
+ """Get pretty printed object prototype of schema.
+
+ Args:
+ schema: object, Parsed JSON schema.
+ seen: list of string, Names of schema already seen. Used to handle
+ recursive definitions.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ if seen is None:
+ seen = []
+
+ return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
+
+ def prettyPrintSchema(self, schema):
+ """Get pretty printed object prototype of schema.
+
+ Args:
+ schema: object, Parsed JSON schema.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ # Return with trailing comma and newline removed.
+ return self._prettyPrintSchema(schema, dent=1)[:-2]
+
+ def get(self, name, default=None):
+ """Get deserialized JSON schema from the schema name.
+
+ Args:
+ name: string, Schema name.
+ default: object, return value if name not found.
+ """
+ return self.schemas.get(name, default)
+
+
+class _SchemaToStruct(object):
+ """Convert schema to a prototype object."""
+
+ @util.positional(3)
+ def __init__(self, schema, seen, dent=0):
+ """Constructor.
+
+ Args:
+ schema: object, Parsed JSON schema.
+ seen: list, List of names of schema already seen while parsing. Used to
+ handle recursive definitions.
+ dent: int, Initial indentation depth.
+ """
+ # The result of this parsing kept as list of strings.
+ self.value = []
+
+ # The final value of the parsing.
+ self.string = None
+
+ # The parsed JSON schema.
+ self.schema = schema
+
+ # Indentation level.
+ self.dent = dent
+
+ # Method that when called returns a prototype object for the schema with
+ # the given name.
+ self.from_cache = None
+
+ # List of names of schema already seen while parsing.
+ self.seen = seen
+
+ def emit(self, text):
+ """Add text as a line to the output.
+
+ Args:
+ text: string, Text to output.
+ """
+ self.value.extend([" " * self.dent, text, "\n"])
+
+ def emitBegin(self, text):
+ """Add text to the output, but with no line terminator.
+
+ Args:
+ text: string, Text to output.
+ """
+ self.value.extend([" " * self.dent, text])
+
+ def emitEnd(self, text, comment):
+ """Add text and comment to the output with line terminator.
+
+ Args:
+ text: string, Text to output.
+ comment: string, Python comment.
+ """
+ if comment:
+ divider = "\n" + " " * (self.dent + 2) + "# "
+ lines = comment.splitlines()
+ lines = [x.rstrip() for x in lines]
+ comment = divider.join(lines)
+ self.value.extend([text, " # ", comment, "\n"])
+ else:
+ self.value.extend([text, "\n"])
+
+ def indent(self):
+ """Increase indentation level."""
+ self.dent += 1
+
+ def undent(self):
+ """Decrease indentation level."""
+ self.dent -= 1
+
+ def _to_str_impl(self, schema):
+ """Prototype object based on the schema, in Python code with comments.
+
+ Args:
+ schema: object, Parsed JSON schema file.
+
+ Returns:
+ Prototype object based on the schema, in Python code with comments.
+ """
+ stype = schema.get("type")
+ if stype == "object":
+ self.emitEnd("{", schema.get("description", ""))
+ self.indent()
+ if "properties" in schema:
+ for pname, pschema in six.iteritems(schema.get("properties", {})):
+ self.emitBegin('"%s": ' % pname)
+ self._to_str_impl(pschema)
+ elif "additionalProperties" in schema:
+ self.emitBegin('"a_key": ')
+ self._to_str_impl(schema["additionalProperties"])
+ self.undent()
+ self.emit("},")
+ elif "$ref" in schema:
+ schemaName = schema["$ref"]
+ description = schema.get("description", "")
+ s = self.from_cache(schemaName, seen=self.seen)
+ parts = s.splitlines()
+ self.emitEnd(parts[0], description)
+ for line in parts[1:]:
+ self.emit(line.rstrip())
+ elif stype == "boolean":
+ value = schema.get("default", "True or False")
+ self.emitEnd("%s," % str(value), schema.get("description", ""))
+ elif stype == "string":
+ value = schema.get("default", "A String")
+ self.emitEnd('"%s",' % str(value), schema.get("description", ""))
+ elif stype == "integer":
+ value = schema.get("default", "42")
+ self.emitEnd("%s," % str(value), schema.get("description", ""))
+ elif stype == "number":
+ value = schema.get("default", "3.14")
+ self.emitEnd("%s," % str(value), schema.get("description", ""))
+ elif stype == "null":
+ self.emitEnd("None,", schema.get("description", ""))
+ elif stype == "any":
+ self.emitEnd('"",', schema.get("description", ""))
+ elif stype == "array":
+ self.emitEnd("[", schema.get("description"))
+ self.indent()
+ self.emitBegin("")
+ self._to_str_impl(schema["items"])
+ self.undent()
+ self.emit("],")
+ else:
+ self.emit("Unknown type! %s" % stype)
+ self.emitEnd("", "")
+
+ self.string = "".join(self.value)
+ return self.string
+
+ def to_str(self, from_cache):
+ """Prototype object based on the schema, in Python code with comments.
+
+ Args:
+ from_cache: callable(name, seen), Callable that retrieves an object
+ prototype for a schema with the given name. Seen is a list of schema
+ names already seen as we recursively descend the schema definition.
+
+ Returns:
+ Prototype object based on the schema, in Python code with comments.
+ The lines of the code will all be properly indented.
+ """
+ self.from_cache = from_cache
+ return self._to_str_impl(self.schema)
diff --git a/venv/Lib/site-packages/grpc/__init__.py b/venv/Lib/site-packages/grpc/__init__.py
new file mode 100644
index 000000000..3566c3823
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/__init__.py
@@ -0,0 +1,2125 @@
+# Copyright 2015-2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""gRPC's Python API."""
+
+import abc
+import contextlib
+import enum
+import logging
+import sys
+import six
+
+from grpc._cython import cygrpc as _cygrpc
+from grpc import _compression
+
+logging.getLogger(__name__).addHandler(logging.NullHandler())
+
+try:
+ from grpc._grpcio_metadata import __version__
+except ImportError:
+ __version__ = "dev0"
+
+############################## Future Interface ###############################
+
+
+class FutureTimeoutError(Exception):
+ """Indicates that a method call on a Future timed out."""
+
+
+class FutureCancelledError(Exception):
+ """Indicates that the computation underlying a Future was cancelled."""
+
+
+class Future(six.with_metaclass(abc.ABCMeta)):
+ """A representation of a computation in another control flow.
+
+ Computations represented by a Future may be yet to be begun,
+ may be ongoing, or may have already completed.
+ """
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Attempts to cancel the computation.
+
+ This method does not block.
+
+ Returns:
+ bool:
+ Returns True if the computation was canceled.
+
+ Returns False under all other circumstances, for example:
+
+ 1. computation has begun and could not be canceled.
+ 2. computation has finished
+ 3. computation is scheduled for execution and it is impossible
+ to determine its state without blocking.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancelled(self):
+ """Describes whether the computation was cancelled.
+
+ This method does not block.
+
+ Returns:
+ bool:
+ Returns True if the computation was cancelled before its result became
+ available.
+
+ Returns False under all other circumstances, for example:
+
+ 1. computation was not cancelled.
+ 2. computation's result is available.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ """Describes whether the computation is taking place.
+
+ This method does not block.
+
+ Returns:
+ Returns True if the computation is scheduled for execution or
+ currently executing.
+
+ Returns False if the computation already executed or was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def done(self):
+ """Describes whether the computation has taken place.
+
+ This method does not block.
+
+ Returns:
+ bool:
+ Returns True if the computation already executed or was cancelled.
+ Returns False if the computation is scheduled for execution or
+ currently executing.
+ This is exactly opposite of the running() method's result.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
+ """Returns the result of the computation or raises its exception.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ finish or be cancelled. If None, the call will block until the
+ computations's termination.
+
+ Returns:
+ The return value of the computation.
+
+ Raises:
+ FutureTimeoutError: If a timeout value is passed and the computation
+ does not terminate within the allotted time.
+ FutureCancelledError: If the computation was cancelled.
+ Exception: If the computation raised an exception, this call will
+ raise the same exception.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ """Return the exception raised by the computation.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ terminate or be cancelled. If None, the call will block until the
+ computations's termination.
+
+ Returns:
+ The exception raised by the computation, or None if the computation
+ did not raise an exception.
+
+ Raises:
+ FutureTimeoutError: If a timeout value is passed and the computation
+ does not terminate within the allotted time.
+ FutureCancelledError: If the computation was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def traceback(self, timeout=None):
+ """Access the traceback of the exception raised by the computation.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation
+ to terminate or be cancelled. If None, the call will block until
+ the computation's termination.
+
+ Returns:
+ The traceback of the exception raised by the computation, or None
+ if the computation did not raise an exception.
+
+ Raises:
+ FutureTimeoutError: If a timeout value is passed and the computation
+ does not terminate within the allotted time.
+ FutureCancelledError: If the computation was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ """Adds a function to be called at completion of the computation.
+
+ The callback will be passed this Future object describing the outcome
+ of the computation. Callbacks will be invoked after the future is
+ terminated, whether successfully or not.
+
+ If the computation has already completed, the callback will be called
+ immediately.
+
+ Exceptions raised in the callback will be logged at ERROR level, but
+ will not terminate any threads of execution.
+
+ Args:
+ fn: A callable taking this Future object as its single parameter.
+ """
+ raise NotImplementedError()
+
+
+################################ gRPC Enums ##################################
+
+
+@enum.unique
+class ChannelConnectivity(enum.Enum):
+ """Mirrors grpc_connectivity_state in the gRPC Core.
+
+ Attributes:
+ IDLE: The channel is idle.
+ CONNECTING: The channel is connecting.
+ READY: The channel is ready to conduct RPCs.
+ TRANSIENT_FAILURE: The channel has seen a failure from which it expects
+ to recover.
+ SHUTDOWN: The channel has seen a failure from which it cannot recover.
+ """
+ IDLE = (_cygrpc.ConnectivityState.idle, 'idle')
+ CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting')
+ READY = (_cygrpc.ConnectivityState.ready, 'ready')
+ TRANSIENT_FAILURE = (_cygrpc.ConnectivityState.transient_failure,
+ 'transient failure')
+ SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown')
+
+
+@enum.unique
+class StatusCode(enum.Enum):
+ """Mirrors grpc_status_code in the gRPC Core.
+
+ Attributes:
+ OK: Not an error; returned on success
+ CANCELLED: The operation was cancelled (typically by the caller).
+ UNKNOWN: Unknown error.
+ INVALID_ARGUMENT: Client specified an invalid argument.
+ DEADLINE_EXCEEDED: Deadline expired before operation could complete.
+ NOT_FOUND: Some requested entity (e.g., file or directory) was not found.
+ ALREADY_EXISTS: Some entity that we attempted to create (e.g., file or directory)
+ already exists.
+ PERMISSION_DENIED: The caller does not have permission to execute the specified
+ operation.
+ UNAUTHENTICATED: The request does not have valid authentication credentials for the
+ operation.
+ RESOURCE_EXHAUSTED: Some resource has been exhausted, perhaps a per-user quota, or
+ perhaps the entire file system is out of space.
+ FAILED_PRECONDITION: Operation was rejected because the system is not in a state
+ required for the operation's execution.
+ ABORTED: The operation was aborted, typically due to a concurrency issue
+ like sequencer check failures, transaction aborts, etc.
+ UNIMPLEMENTED: Operation is not implemented or not supported/enabled in this service.
+ INTERNAL: Internal errors. Means some invariants expected by underlying
+ system has been broken.
+ UNAVAILABLE: The service is currently unavailable.
+ DATA_LOSS: Unrecoverable data loss or corruption.
+ """
+ OK = (_cygrpc.StatusCode.ok, 'ok')
+ CANCELLED = (_cygrpc.StatusCode.cancelled, 'cancelled')
+ UNKNOWN = (_cygrpc.StatusCode.unknown, 'unknown')
+ INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, 'invalid argument')
+ DEADLINE_EXCEEDED = (_cygrpc.StatusCode.deadline_exceeded,
+ 'deadline exceeded')
+ NOT_FOUND = (_cygrpc.StatusCode.not_found, 'not found')
+ ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, 'already exists')
+ PERMISSION_DENIED = (_cygrpc.StatusCode.permission_denied,
+ 'permission denied')
+ RESOURCE_EXHAUSTED = (_cygrpc.StatusCode.resource_exhausted,
+ 'resource exhausted')
+ FAILED_PRECONDITION = (_cygrpc.StatusCode.failed_precondition,
+ 'failed precondition')
+ ABORTED = (_cygrpc.StatusCode.aborted, 'aborted')
+ OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, 'out of range')
+ UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, 'unimplemented')
+ INTERNAL = (_cygrpc.StatusCode.internal, 'internal')
+ UNAVAILABLE = (_cygrpc.StatusCode.unavailable, 'unavailable')
+ DATA_LOSS = (_cygrpc.StatusCode.data_loss, 'data loss')
+ UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, 'unauthenticated')
+
+
+############################# gRPC Status ################################
+
+
+class Status(six.with_metaclass(abc.ABCMeta)):
+ """Describes the status of an RPC.
+
+ This is an EXPERIMENTAL API.
+
+ Attributes:
+ code: A StatusCode object to be sent to the client.
+ details: A UTF-8-encodable string to be sent to the client upon
+ termination of the RPC.
+ trailing_metadata: The trailing :term:`metadata` in the RPC.
+ """
+
+
+############################# gRPC Exceptions ################################
+
+
+class RpcError(Exception):
+ """Raised by the gRPC library to indicate non-OK-status RPC termination."""
+
+
+############################## Shared Context ################################
+
+
+class RpcContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides RPC-related information and action."""
+
+ @abc.abstractmethod
+ def is_active(self):
+ """Describes whether the RPC is active or has terminated.
+
+ Returns:
+ bool:
+ True if RPC is active, False otherwise.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def time_remaining(self):
+ """Describes the length of allowed time remaining for the RPC.
+
+ Returns:
+ A nonnegative float indicating the length of allowed time in seconds
+ remaining for the RPC to complete before it is considered to have
+ timed out, or None if no deadline was specified for the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Cancels the RPC.
+
+ Idempotent and has no effect if the RPC has already terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_callback(self, callback):
+ """Registers a callback to be called on RPC termination.
+
+ Args:
+ callback: A no-parameter callable to be called on RPC termination.
+
+ Returns:
+ True if the callback was added and will be called later; False if
+ the callback was not added and will not be called (because the RPC
+ already terminated or some other reason).
+ """
+ raise NotImplementedError()
+
+
+######################### Invocation-Side Context ############################
+
+
+class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """Invocation-side utility object for an RPC."""
+
+ @abc.abstractmethod
+ def initial_metadata(self):
+ """Accesses the initial metadata sent by the server.
+
+ This method blocks until the value is available.
+
+ Returns:
+ The initial :term:`metadata`.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def trailing_metadata(self):
+ """Accesses the trailing metadata sent by the server.
+
+ This method blocks until the value is available.
+
+ Returns:
+ The trailing :term:`metadata`.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def code(self):
+ """Accesses the status code sent by the server.
+
+ This method blocks until the value is available.
+
+ Returns:
+ The StatusCode value for the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def details(self):
+ """Accesses the details sent by the server.
+
+ This method blocks until the value is available.
+
+ Returns:
+ The details string of the RPC.
+ """
+ raise NotImplementedError()
+
+
+############## Invocation-Side Interceptor Interfaces & Classes ##############
+
+
+class ClientCallDetails(six.with_metaclass(abc.ABCMeta)):
+ """Describes an RPC to be invoked.
+
+ This is an EXPERIMENTAL API.
+
+ Attributes:
+ method: The method name of the RPC.
+ timeout: An optional duration of time in seconds to allow for the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to
+ the service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+ """
+
+
+class UnaryUnaryClientInterceptor(six.with_metaclass(abc.ABCMeta)):
+ """Affords intercepting unary-unary invocations.
+
+ This is an EXPERIMENTAL API.
+ """
+
+ @abc.abstractmethod
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ """Intercepts a unary-unary invocation asynchronously.
+
+ Args:
+ continuation: A function that proceeds with the invocation by
+ executing the next interceptor in chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `response_future = continuation(client_call_details, request)`
+ to continue with the RPC. `continuation` returns an object that is
+ both a Call for the RPC and a Future. In the event of RPC
+ completion, the return Call-Future's result value will be
+ the response message of the RPC. Should the event terminate
+ with non-OK status, the returned Call-Future's exception value
+ will be an RpcError.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request: The request value for the RPC.
+
+ Returns:
+ An object that is both a Call for the RPC and a Future.
+ In the event of RPC completion, the return Call-Future's
+ result value will be the response message of the RPC.
+ Should the event terminate with non-OK status, the returned
+ Call-Future's exception value will be an RpcError.
+ """
+ raise NotImplementedError()
+
+
+class UnaryStreamClientInterceptor(six.with_metaclass(abc.ABCMeta)):
+ """Affords intercepting unary-stream invocations.
+
+ This is an EXPERIMENTAL API.
+ """
+
+ @abc.abstractmethod
+ def intercept_unary_stream(self, continuation, client_call_details,
+ request):
+ """Intercepts a unary-stream invocation.
+
+ Args:
+ continuation: A function that proceeds with the invocation by
+ executing the next interceptor in chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `response_iterator = continuation(client_call_details, request)`
+ to continue with the RPC. `continuation` returns an object that is
+ both a Call for the RPC and an iterator for response values.
+ Drawing response values from the returned Call-iterator may
+ raise RpcError indicating termination of the RPC with non-OK
+ status.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request: The request value for the RPC.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of
+ response values. Drawing response values from the returned
+ Call-iterator may raise RpcError indicating termination of
+ the RPC with non-OK status.
+ """
+ raise NotImplementedError()
+
+
+class StreamUnaryClientInterceptor(six.with_metaclass(abc.ABCMeta)):
+ """Affords intercepting stream-unary invocations.
+
+ This is an EXPERIMENTAL API.
+ """
+
+ @abc.abstractmethod
+ def intercept_stream_unary(self, continuation, client_call_details,
+ request_iterator):
+ """Intercepts a stream-unary invocation asynchronously.
+
+ Args:
+ continuation: A function that proceeds with the invocation by
+ executing the next interceptor in chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `response_future = continuation(client_call_details, request_iterator)`
+ to continue with the RPC. `continuation` returns an object that is
+ both a Call for the RPC and a Future. In the event of RPC completion,
+ the return Call-Future's result value will be the response message
+ of the RPC. Should the event terminate with non-OK status, the
+ returned Call-Future's exception value will be an RpcError.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+
+ Returns:
+ An object that is both a Call for the RPC and a Future.
+ In the event of RPC completion, the return Call-Future's
+ result value will be the response message of the RPC.
+ Should the event terminate with non-OK status, the returned
+ Call-Future's exception value will be an RpcError.
+ """
+ raise NotImplementedError()
+
+
+class StreamStreamClientInterceptor(six.with_metaclass(abc.ABCMeta)):
+ """Affords intercepting stream-stream invocations.
+
+ This is an EXPERIMENTAL API.
+ """
+
+ @abc.abstractmethod
+ def intercept_stream_stream(self, continuation, client_call_details,
+ request_iterator):
+ """Intercepts a stream-stream invocation.
+
+ Args:
+ continuation: A function that proceeds with the invocation by
+ executing the next interceptor in chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `response_iterator = continuation(client_call_details, request_iterator)`
+ to continue with the RPC. `continuation` returns an object that is
+ both a Call for the RPC and an iterator for response values.
+ Drawing response values from the returned Call-iterator may
+ raise RpcError indicating termination of the RPC with non-OK
+ status.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of
+ response values. Drawing response values from the returned
+ Call-iterator may raise RpcError indicating termination of
+ the RPC with non-OK status.
+ """
+ raise NotImplementedError()
+
+
+############ Authentication & Authorization Interfaces & Classes #############
+
+
+class ChannelCredentials(object):
+ """An encapsulation of the data required to create a secure Channel.
+
+ This class has no supported interface - it exists to define the type of its
+ instances and its instances exist to be passed to other functions. For
+ example, ssl_channel_credentials returns an instance of this class and
+ secure_channel requires an instance of this class.
+ """
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+
+
+class CallCredentials(object):
+ """An encapsulation of the data required to assert an identity over a call.
+
+ A CallCredentials has to be used with secure Channel, otherwise the
+ metadata will not be transmitted to the server.
+
+ A CallCredentials may be composed with ChannelCredentials to always assert
+ identity for every call over that Channel.
+
+ This class has no supported interface - it exists to define the type of its
+ instances and its instances exist to be passed to other functions.
+ """
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+
+
+class AuthMetadataContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides information to call credentials metadata plugins.
+
+ Attributes:
+ service_url: A string URL of the service being called into.
+ method_name: A string of the fully qualified method name being called.
+ """
+
+
+class AuthMetadataPluginCallback(six.with_metaclass(abc.ABCMeta)):
+ """Callback object received by a metadata plugin."""
+
+ def __call__(self, metadata, error):
+ """Passes to the gRPC runtime authentication metadata for an RPC.
+
+ Args:
+ metadata: The :term:`metadata` used to construct the CallCredentials.
+ error: An Exception to indicate error or None to indicate success.
+ """
+ raise NotImplementedError()
+
+
+class AuthMetadataPlugin(six.with_metaclass(abc.ABCMeta)):
+ """A specification for custom authentication."""
+
+ def __call__(self, context, callback):
+ """Implements authentication by passing metadata to a callback.
+
+ This method will be invoked asynchronously in a separate thread.
+
+ Args:
+ context: An AuthMetadataContext providing information on the RPC that
+ the plugin is being called to authenticate.
+ callback: An AuthMetadataPluginCallback to be invoked either
+ synchronously or asynchronously.
+ """
+ raise NotImplementedError()
+
+
+class ServerCredentials(object):
+ """An encapsulation of the data required to open a secure port on a Server.
+
+ This class has no supported interface - it exists to define the type of its
+ instances and its instances exist to be passed to other functions.
+ """
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+
+
+class ServerCertificateConfiguration(object):
+ """A certificate configuration for use with an SSL-enabled Server.
+
+ Instances of this class can be returned in the certificate configuration
+ fetching callback.
+
+ This class has no supported interface -- it exists to define the
+ type of its instances and its instances exist to be passed to
+ other functions.
+ """
+
+ def __init__(self, certificate_configuration):
+ self._certificate_configuration = certificate_configuration
+
+
+######################## Multi-Callable Interfaces ###########################
+
+
+class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a unary-unary RPC from client-side."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: An optional duration of time in seconds to allow
+ for the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ The response value for the RPC.
+
+ Raises:
+ RpcError: Indicating that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def with_call(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: An optional durating of time in seconds to allow for
+ the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ The response value for the RPC and a Call value for the RPC.
+
+ Raises:
+ RpcError: Indicating that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: An optional duration of time in seconds to allow for
+ the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ An object that is both a Call for the RPC and a Future.
+ In the event of RPC completion, the return Call-Future's result
+ value will be the response message of the RPC.
+ Should the event terminate with non-OK status,
+ the returned Call-Future's exception value will be an RpcError.
+ """
+ raise NotImplementedError()
+
+
+class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a unary-stream RPC from client-side."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: An optional duration of time in seconds to allow for
+ the RPC. If None, the timeout is considered infinite.
+ metadata: An optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of
+ response values. Drawing response values from the returned
+ Call-iterator may raise RpcError indicating termination of the
+ RPC with non-OK status.
+ """
+ raise NotImplementedError()
+
+
+class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a stream-unary RPC from client-side."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for
+ the RPC.
+ timeout: An optional duration of time in seconds to allow for
+ the RPC. If None, the timeout is considered infinite.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ The response value for the RPC.
+
+ Raises:
+ RpcError: Indicating that the RPC terminated with non-OK status. The
+ raised RpcError will also implement grpc.Call, affording methods
+ such as metadata, code, and details.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def with_call(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Synchronously invokes the underlying RPC on the client.
+
+ Args:
+ request_iterator: An iterator that yields request values for
+ the RPC.
+ timeout: An optional duration of time in seconds to allow for
+ the RPC. If None, the timeout is considered infinite.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ The response value for the RPC and a Call object for the RPC.
+
+ Raises:
+ RpcError: Indicating that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Asynchronously invokes the underlying RPC on the client.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: An optional duration of time in seconds to allow for
+ the RPC. If None, the timeout is considered infinite.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ An object that is both a Call for the RPC and a Future.
+ In the event of RPC completion, the return Call-Future's result value
+ will be the response message of the RPC. Should the event terminate
+ with non-OK status, the returned Call-Future's exception value will
+ be an RpcError.
+ """
+ raise NotImplementedError()
+
+
+class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a stream-stream RPC on client-side."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ """Invokes the underlying RPC on the client.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: An optional duration of time in seconds to allow for
+ the RPC. If not specified, the timeout is considered infinite.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of
+ response values. Drawing response values from the returned
+ Call-iterator may raise RpcError indicating termination of the
+ RPC with non-OK status.
+ """
+ raise NotImplementedError()
+
+
+############################# Channel Interface ##############################
+
+
+class Channel(six.with_metaclass(abc.ABCMeta)):
+ """Affords RPC invocation via generic methods on client-side.
+
+ Channel objects implement the Context Manager type, although they need not
+ support being entered and exited multiple times.
+ """
+
+ @abc.abstractmethod
+ def subscribe(self, callback, try_to_connect=False):
+ """Subscribe to this Channel's connectivity state machine.
+
+ A Channel may be in any of the states described by ChannelConnectivity.
+ This method allows application to monitor the state transitions.
+ The typical use case is to debug or gain better visibility into gRPC
+ runtime's state.
+
+ Args:
+ callback: A callable to be invoked with ChannelConnectivity argument.
+ ChannelConnectivity describes current state of the channel.
+ The callable will be invoked immediately upon subscription
+ and again for every change to ChannelConnectivity until it
+ is unsubscribed or this Channel object goes out of scope.
+ try_to_connect: A boolean indicating whether or not this Channel
+ should attempt to connect immediately. If set to False, gRPC
+ runtime decides when to connect.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unsubscribe(self, callback):
+ """Unsubscribes a subscribed callback from this Channel's connectivity.
+
+ Args:
+ callback: A callable previously registered with this Channel from
+ having been passed to its "subscribe" method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None
+ is passed.
+
+ Returns:
+ A UnaryUnaryMultiCallable value for the named unary-unary method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None is
+ passed.
+
+ Returns:
+ A UnaryStreamMultiCallable value for the name unary-stream method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None is
+ passed.
+
+ Returns:
+ A StreamUnaryMultiCallable value for the named stream-unary method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None
+ is passed.
+
+ Returns:
+ A StreamStreamMultiCallable value for the named stream-stream method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def close(self):
+ """Closes this Channel and releases all resources held by it.
+
+ Closing the Channel will immediately terminate all RPCs active with the
+ Channel and it is not valid to invoke new RPCs with the Channel.
+
+ This method is idempotent.
+ """
+ raise NotImplementedError()
+
+ def __enter__(self):
+ """Enters the runtime context related to the channel object."""
+ raise NotImplementedError()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ """Exits the runtime context related to the channel object."""
+ raise NotImplementedError()
+
+
+########################## Service-Side Context ##############################
+
+
+class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """A context object passed to method implementations."""
+
+ @abc.abstractmethod
+ def invocation_metadata(self):
+ """Accesses the metadata from the sent by the client.
+
+ Returns:
+ The invocation :term:`metadata`.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def peer(self):
+ """Identifies the peer that invoked the RPC being serviced.
+
+ Returns:
+ A string identifying the peer that invoked the RPC being serviced.
+ The string format is determined by gRPC runtime.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def peer_identities(self):
+ """Gets one or more peer identity(s).
+
+ Equivalent to
+ servicer_context.auth_context().get(servicer_context.peer_identity_key())
+
+ Returns:
+ An iterable of the identities, or None if the call is not
+ authenticated. Each identity is returned as a raw bytes type.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def peer_identity_key(self):
+ """The auth property used to identify the peer.
+
+ For example, "x509_common_name" or "x509_subject_alternative_name" are
+ used to identify an SSL peer.
+
+ Returns:
+ The auth property (string) that indicates the
+ peer identity, or None if the call is not authenticated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def auth_context(self):
+ """Gets the auth context for the call.
+
+ Returns:
+ A map of strings to an iterable of bytes for each auth property.
+ """
+ raise NotImplementedError()
+
+ def set_compression(self, compression):
+ """Set the compression algorithm to be used for the entire call.
+
+ This is an EXPERIMENTAL method.
+
+ Args:
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def send_initial_metadata(self, initial_metadata):
+ """Sends the initial metadata value to the client.
+
+ This method need not be called by implementations if they have no
+ metadata to add to what the gRPC runtime will transmit.
+
+ Args:
+ initial_metadata: The initial :term:`metadata`.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_trailing_metadata(self, trailing_metadata):
+ """Sets the trailing metadata for the RPC.
+
+ Sets the trailing metadata to be sent upon completion of the RPC.
+
+ If this method is invoked multiple times throughout the lifetime of an
+ RPC, the value supplied in the final invocation will be the value sent
+ over the wire.
+
+ This method need not be called by implementations if they have no
+ metadata to add to what the gRPC runtime will transmit.
+
+ Args:
+ trailing_metadata: The trailing :term:`metadata`.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def abort(self, code, details):
+ """Raises an exception to terminate the RPC with a non-OK status.
+
+ The code and details passed as arguments will supercede any existing
+ ones.
+
+ Args:
+ code: A StatusCode object to be sent to the client.
+ It must not be StatusCode.OK.
+ details: A UTF-8-encodable string to be sent to the client upon
+ termination of the RPC.
+
+ Raises:
+ Exception: An exception is always raised to signal the abortion the
+ RPC to the gRPC runtime.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def abort_with_status(self, status):
+ """Raises an exception to terminate the RPC with a non-OK status.
+
+ The status passed as argument will supercede any existing status code,
+ status message and trailing metadata.
+
+ This is an EXPERIMENTAL API.
+
+ Args:
+ status: A grpc.Status object. The status code in it must not be
+ StatusCode.OK.
+
+ Raises:
+ Exception: An exception is always raised to signal the abortion the
+ RPC to the gRPC runtime.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_code(self, code):
+ """Sets the value to be used as status code upon RPC completion.
+
+ This method need not be called by method implementations if they wish
+ the gRPC runtime to determine the status code of the RPC.
+
+ Args:
+ code: A StatusCode object to be sent to the client.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_details(self, details):
+ """Sets the value to be used as detail string upon RPC completion.
+
+ This method need not be called by method implementations if they have
+ no details to transmit.
+
+ Args:
+ details: A UTF-8-encodable string to be sent to the client upon
+ termination of the RPC.
+ """
+ raise NotImplementedError()
+
+ def disable_next_message_compression(self):
+ """Disables compression for the next response message.
+
+ This is an EXPERIMENTAL method.
+
+ This method will override any compression configuration set during
+ server creation or set on the call.
+ """
+ raise NotImplementedError()
+
+
+##################### Service-Side Handler Interfaces ########################
+
+
+class RpcMethodHandler(six.with_metaclass(abc.ABCMeta)):
+ """An implementation of a single RPC method.
+
+ Attributes:
+ request_streaming: Whether the RPC supports exactly one request message
+ or any arbitrary number of request messages.
+ response_streaming: Whether the RPC supports exactly one response message
+ or any arbitrary number of response messages.
+ request_deserializer: A callable :term:`deserializer` that accepts a byte string and
+ returns an object suitable to be passed to this object's business
+ logic, or None to indicate that this object's business logic should be
+ passed the raw request bytes.
+ response_serializer: A callable :term:`serializer` that accepts an object produced
+ by this object's business logic and returns a byte string, or None to
+ indicate that the byte strings produced by this object's business logic
+ should be transmitted on the wire as they are.
+ unary_unary: This object's application-specific business logic as a
+ callable value that takes a request value and a ServicerContext object
+ and returns a response value. Only non-None if both request_streaming
+ and response_streaming are False.
+ unary_stream: This object's application-specific business logic as a
+ callable value that takes a request value and a ServicerContext object
+ and returns an iterator of response values. Only non-None if
+ request_streaming is False and response_streaming is True.
+ stream_unary: This object's application-specific business logic as a
+ callable value that takes an iterator of request values and a
+ ServicerContext object and returns a response value. Only non-None if
+ request_streaming is True and response_streaming is False.
+ stream_stream: This object's application-specific business logic as a
+ callable value that takes an iterator of request values and a
+ ServicerContext object and returns an iterator of response values.
+ Only non-None if request_streaming and response_streaming are both
+ True.
+ """
+
+
+class HandlerCallDetails(six.with_metaclass(abc.ABCMeta)):
+ """Describes an RPC that has just arrived for service.
+
+ Attributes:
+ method: The method name of the RPC.
+ invocation_metadata: The :term:`metadata` sent by the client.
+ """
+
+
+class GenericRpcHandler(six.with_metaclass(abc.ABCMeta)):
+ """An implementation of arbitrarily many RPC methods."""
+
+ @abc.abstractmethod
+ def service(self, handler_call_details):
+ """Returns the handler for servicing the RPC.
+
+ Args:
+ handler_call_details: A HandlerCallDetails describing the RPC.
+
+ Returns:
+ An RpcMethodHandler with which the RPC may be serviced if the
+ implementation chooses to service this RPC, or None otherwise.
+ """
+ raise NotImplementedError()
+
+
+class ServiceRpcHandler(six.with_metaclass(abc.ABCMeta, GenericRpcHandler)):
+ """An implementation of RPC methods belonging to a service.
+
+ A service handles RPC methods with structured names of the form
+ '/Service.Name/Service.Method', where 'Service.Name' is the value
+ returned by service_name(), and 'Service.Method' is the method
+ name. A service can have multiple method names, but only a single
+ service name.
+ """
+
+ @abc.abstractmethod
+ def service_name(self):
+ """Returns this service's name.
+
+ Returns:
+ The service name.
+ """
+ raise NotImplementedError()
+
+
+#################### Service-Side Interceptor Interfaces #####################
+
+
+class ServerInterceptor(six.with_metaclass(abc.ABCMeta)):
+ """Affords intercepting incoming RPCs on the service-side.
+
+ This is an EXPERIMENTAL API.
+ """
+
+ @abc.abstractmethod
+ def intercept_service(self, continuation, handler_call_details):
+ """Intercepts incoming RPCs before handing them over to a handler.
+
+ Args:
+ continuation: A function that takes a HandlerCallDetails and
+ proceeds to invoke the next interceptor in the chain, if any,
+ or the RPC handler lookup logic, with the call details passed
+ as an argument, and returns an RpcMethodHandler instance if
+ the RPC is considered serviced, or None otherwise.
+ handler_call_details: A HandlerCallDetails describing the RPC.
+
+ Returns:
+ An RpcMethodHandler with which the RPC may be serviced if the
+ interceptor chooses to service this RPC, or None otherwise.
+ """
+ raise NotImplementedError()
+
+
+############################# Server Interface ###############################
+
+
+class Server(six.with_metaclass(abc.ABCMeta)):
+ """Services RPCs."""
+
+ @abc.abstractmethod
+ def add_generic_rpc_handlers(self, generic_rpc_handlers):
+ """Registers GenericRpcHandlers with this Server.
+
+ This method is only safe to call before the server is started.
+
+ Args:
+ generic_rpc_handlers: An iterable of GenericRpcHandlers that will be
+ used to service RPCs.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_insecure_port(self, address):
+ """Opens an insecure port for accepting RPCs.
+
+ This method may only be called before starting the server.
+
+ Args:
+ address: The address for which to open a port. If the port is 0,
+ or not specified in the address, then gRPC runtime will choose a port.
+
+ Returns:
+ An integer port on which server will accept RPC requests.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_secure_port(self, address, server_credentials):
+ """Opens a secure port for accepting RPCs.
+
+ This method may only be called before starting the server.
+
+ Args:
+ address: The address for which to open a port.
+ if the port is 0, or not specified in the address, then gRPC
+ runtime will choose a port.
+ server_credentials: A ServerCredentials object.
+
+ Returns:
+ An integer port on which server will accept RPC requests.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def start(self):
+ """Starts this Server.
+
+ This method may only be called once. (i.e. it is not idempotent).
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stop(self, grace):
+ """Stops this Server.
+
+ This method immediately stop service of new RPCs in all cases.
+
+ If a grace period is specified, this method returns immediately
+ and all RPCs active at the end of the grace period are aborted.
+ If a grace period is not specified (by passing None for `grace`),
+ all existing RPCs are aborted immediately and this method
+ blocks until the last RPC handler terminates.
+
+ This method is idempotent and may be called at any time.
+ Passing a smaller grace value in a subsequent call will have
+ the effect of stopping the Server sooner (passing None will
+ have the effect of stopping the server immediately). Passing
+ a larger grace value in a subsequent call *will not* have the
+ effect of stopping the server later (i.e. the most restrictive
+ grace value is used).
+
+ Args:
+ grace: A duration of time in seconds or None.
+
+ Returns:
+ A threading.Event that will be set when this Server has completely
+ stopped, i.e. when running RPCs either complete or are aborted and
+ all handlers have terminated.
+ """
+ raise NotImplementedError()
+
+ def wait_for_termination(self, timeout=None):
+ """Block current thread until the server stops.
+
+ This is an EXPERIMENTAL API.
+
+ The wait will not consume computational resources during blocking, and
+ it will block until one of the two following conditions are met:
+
+ 1) The server is stopped or terminated;
+ 2) A timeout occurs if timeout is not `None`.
+
+ The timeout argument works in the same way as `threading.Event.wait()`.
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
+
+ Args:
+ timeout: A floating point number specifying a timeout for the
+ operation in seconds.
+
+ Returns:
+ A bool indicates if the operation times out.
+ """
+ raise NotImplementedError()
+
+
+################################# Functions ################################
+
+
+def unary_unary_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a unary-unary RPC method.
+
+ Args:
+ behavior: The implementation of an RPC that accepts one request
+ and returns one response.
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
+ response_serializer: An optional :term:`serializer` for response serialization.
+
+ Returns:
+ An RpcMethodHandler object that is typically used by grpc.Server.
+ """
+ from grpc import _utilities # pylint: disable=cyclic-import
+ return _utilities.RpcMethodHandler(False, False, request_deserializer,
+ response_serializer, behavior, None,
+ None, None)
+
+
+def unary_stream_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a unary-stream RPC method.
+
+ Args:
+ behavior: The implementation of an RPC that accepts one request
+ and returns an iterator of response values.
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
+ response_serializer: An optional :term:`serializer` for response serialization.
+
+ Returns:
+ An RpcMethodHandler object that is typically used by grpc.Server.
+ """
+ from grpc import _utilities # pylint: disable=cyclic-import
+ return _utilities.RpcMethodHandler(False, True, request_deserializer,
+ response_serializer, None, behavior,
+ None, None)
+
+
+def stream_unary_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a stream-unary RPC method.
+
+ Args:
+ behavior: The implementation of an RPC that accepts an iterator of
+ request values and returns a single response value.
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
+ response_serializer: An optional :term:`serializer` for response serialization.
+
+ Returns:
+ An RpcMethodHandler object that is typically used by grpc.Server.
+ """
+ from grpc import _utilities # pylint: disable=cyclic-import
+ return _utilities.RpcMethodHandler(True, False, request_deserializer,
+ response_serializer, None, None,
+ behavior, None)
+
+
+def stream_stream_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a stream-stream RPC method.
+
+ Args:
+ behavior: The implementation of an RPC that accepts an iterator of
+ request values and returns an iterator of response values.
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
+ response_serializer: An optional :term:`serializer` for response serialization.
+
+ Returns:
+ An RpcMethodHandler object that is typically used by grpc.Server.
+ """
+ from grpc import _utilities # pylint: disable=cyclic-import
+ return _utilities.RpcMethodHandler(True, True, request_deserializer,
+ response_serializer, None, None, None,
+ behavior)
+
+
+def method_handlers_generic_handler(service, method_handlers):
+ """Creates a GenericRpcHandler from RpcMethodHandlers.
+
+ Args:
+ service: The name of the service that is implemented by the
+ method_handlers.
+ method_handlers: A dictionary that maps method names to corresponding
+ RpcMethodHandler.
+
+ Returns:
+ A GenericRpcHandler. This is typically added to the grpc.Server object
+ with add_generic_rpc_handlers() before starting the server.
+ """
+ from grpc import _utilities # pylint: disable=cyclic-import
+ return _utilities.DictionaryGenericHandler(service, method_handlers)
+
+
+def ssl_channel_credentials(root_certificates=None,
+ private_key=None,
+ certificate_chain=None):
+ """Creates a ChannelCredentials for use with an SSL-enabled Channel.
+
+ Args:
+ root_certificates: The PEM-encoded root certificates as a byte string,
+ or None to retrieve them from a default location chosen by gRPC
+ runtime.
+ private_key: The PEM-encoded private key as a byte string, or None if no
+ private key should be used.
+ certificate_chain: The PEM-encoded certificate chain as a byte string
+ to use or None if no certificate chain should be used.
+
+ Returns:
+ A ChannelCredentials for use with an SSL-enabled Channel.
+ """
+ return ChannelCredentials(
+ _cygrpc.SSLChannelCredentials(root_certificates, private_key,
+ certificate_chain))
+
+
+def metadata_call_credentials(metadata_plugin, name=None):
+ """Construct CallCredentials from an AuthMetadataPlugin.
+
+ Args:
+ metadata_plugin: An AuthMetadataPlugin to use for authentication.
+ name: An optional name for the plugin.
+
+ Returns:
+ A CallCredentials.
+ """
+ from grpc import _plugin_wrapping # pylint: disable=cyclic-import
+ return _plugin_wrapping.metadata_plugin_call_credentials(
+ metadata_plugin, name)
+
+
+def access_token_call_credentials(access_token):
+ """Construct CallCredentials from an access token.
+
+ Args:
+ access_token: A string to place directly in the http request
+ authorization header, for example
+ "authorization: Bearer ".
+
+ Returns:
+ A CallCredentials.
+ """
+ from grpc import _auth # pylint: disable=cyclic-import
+ from grpc import _plugin_wrapping # pylint: disable=cyclic-import
+ return _plugin_wrapping.metadata_plugin_call_credentials(
+ _auth.AccessTokenAuthMetadataPlugin(access_token), None)
+
+
+def composite_call_credentials(*call_credentials):
+ """Compose multiple CallCredentials to make a new CallCredentials.
+
+ Args:
+ *call_credentials: At least two CallCredentials objects.
+
+ Returns:
+ A CallCredentials object composed of the given CallCredentials objects.
+ """
+ return CallCredentials(
+ _cygrpc.CompositeCallCredentials(
+ tuple(single_call_credentials._credentials
+ for single_call_credentials in call_credentials)))
+
+
+def composite_channel_credentials(channel_credentials, *call_credentials):
+ """Compose a ChannelCredentials and one or more CallCredentials objects.
+
+ Args:
+ channel_credentials: A ChannelCredentials object.
+ *call_credentials: One or more CallCredentials objects.
+
+ Returns:
+ A ChannelCredentials composed of the given ChannelCredentials and
+ CallCredentials objects.
+ """
+ return ChannelCredentials(
+ _cygrpc.CompositeChannelCredentials(
+ tuple(single_call_credentials._credentials
+ for single_call_credentials in call_credentials),
+ channel_credentials._credentials))
+
+
+def ssl_server_credentials(private_key_certificate_chain_pairs,
+ root_certificates=None,
+ require_client_auth=False):
+ """Creates a ServerCredentials for use with an SSL-enabled Server.
+
+ Args:
+ private_key_certificate_chain_pairs: A list of pairs of the form
+ [PEM-encoded private key, PEM-encoded certificate chain].
+ root_certificates: An optional byte string of PEM-encoded client root
+ certificates that the server will use to verify client authentication.
+ If omitted, require_client_auth must also be False.
+ require_client_auth: A boolean indicating whether or not to require
+ clients to be authenticated. May only be True if root_certificates
+ is not None.
+
+ Returns:
+ A ServerCredentials for use with an SSL-enabled Server. Typically, this
+ object is an argument to add_secure_port() method during server setup.
+ """
+ if not private_key_certificate_chain_pairs:
+ raise ValueError(
+ 'At least one private key-certificate chain pair is required!')
+ elif require_client_auth and root_certificates is None:
+ raise ValueError(
+ 'Illegal to require client auth without providing root certificates!'
+ )
+ else:
+ return ServerCredentials(
+ _cygrpc.server_credentials_ssl(root_certificates, [
+ _cygrpc.SslPemKeyCertPair(key, pem)
+ for key, pem in private_key_certificate_chain_pairs
+ ], require_client_auth))
+
+
+def ssl_server_certificate_configuration(private_key_certificate_chain_pairs,
+ root_certificates=None):
+ """Creates a ServerCertificateConfiguration for use with a Server.
+
+ Args:
+ private_key_certificate_chain_pairs: A collection of pairs of
+ the form [PEM-encoded private key, PEM-encoded certificate
+ chain].
+ root_certificates: An optional byte string of PEM-encoded client root
+ certificates that the server will use to verify client authentication.
+
+ Returns:
+ A ServerCertificateConfiguration that can be returned in the certificate
+ configuration fetching callback.
+ """
+ if private_key_certificate_chain_pairs:
+ return ServerCertificateConfiguration(
+ _cygrpc.server_certificate_config_ssl(root_certificates, [
+ _cygrpc.SslPemKeyCertPair(key, pem)
+ for key, pem in private_key_certificate_chain_pairs
+ ]))
+ else:
+ raise ValueError(
+ 'At least one private key-certificate chain pair is required!')
+
+
+def dynamic_ssl_server_credentials(initial_certificate_configuration,
+ certificate_configuration_fetcher,
+ require_client_authentication=False):
+ """Creates a ServerCredentials for use with an SSL-enabled Server.
+
+ Args:
+ initial_certificate_configuration (ServerCertificateConfiguration): The
+ certificate configuration with which the server will be initialized.
+ certificate_configuration_fetcher (callable): A callable that takes no
+ arguments and should return a ServerCertificateConfiguration to
+ replace the server's current certificate, or None for no change
+ (i.e., the server will continue its current certificate
+ config). The library will call this callback on *every* new
+ client connection before starting the TLS handshake with the
+ client, thus allowing the user application to optionally
+ return a new ServerCertificateConfiguration that the server will then
+ use for the handshake.
+ require_client_authentication: A boolean indicating whether or not to
+ require clients to be authenticated.
+
+ Returns:
+ A ServerCredentials.
+ """
+ return ServerCredentials(
+ _cygrpc.server_credentials_ssl_dynamic_cert_config(
+ initial_certificate_configuration,
+ certificate_configuration_fetcher, require_client_authentication))
+
+
+@enum.unique
+class LocalConnectionType(enum.Enum):
+ """Types of local connection for local credential creation.
+
+ Attributes:
+ UDS: Unix domain socket connections
+ LOCAL_TCP: Local TCP connections.
+ """
+ UDS = _cygrpc.LocalConnectionType.uds
+ LOCAL_TCP = _cygrpc.LocalConnectionType.local_tcp
+
+
+def local_channel_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP):
+ """Creates a local ChannelCredentials used for local connections.
+
+ This is an EXPERIMENTAL API.
+
+ Local credentials are used by local TCP endpoints (e.g. localhost:10000)
+ also UDS connections.
+
+ The connections created by local channel credentials are not
+ encrypted, but will be checked if they are local or not.
+ The UDS connections are considered secure by providing peer authentication
+ and data confidentiality while TCP connections are considered insecure.
+
+ It is allowed to transmit call credentials over connections created by
+ local channel credentials.
+
+ Local channel credentials are useful for 1) eliminating insecure_channel usage;
+ 2) enable unit testing for call credentials without setting up secrets.
+
+ Args:
+ local_connect_type: Local connection type (either
+ grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP)
+
+ Returns:
+ A ChannelCredentials for use with a local Channel
+ """
+ return ChannelCredentials(
+ _cygrpc.channel_credentials_local(local_connect_type.value))
+
+
+def local_server_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP):
+ """Creates a local ServerCredentials used for local connections.
+
+ This is an EXPERIMENTAL API.
+
+ Local credentials are used by local TCP endpoints (e.g. localhost:10000)
+ also UDS connections.
+
+ The connections created by local server credentials are not
+ encrypted, but will be checked if they are local or not.
+ The UDS connections are considered secure by providing peer authentication
+ and data confidentiality while TCP connections are considered insecure.
+
+ It is allowed to transmit call credentials over connections created by local
+ server credentials.
+
+ Local server credentials are useful for 1) eliminating insecure_channel usage;
+ 2) enable unit testing for call credentials without setting up secrets.
+
+ Args:
+ local_connect_type: Local connection type (either
+ grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP)
+
+ Returns:
+ A ServerCredentials for use with a local Server
+ """
+ return ServerCredentials(
+ _cygrpc.server_credentials_local(local_connect_type.value))
+
+
+def alts_channel_credentials(service_accounts=None):
+ """Creates a ChannelCredentials for use with an ALTS-enabled Channel.
+
+ This is an EXPERIMENTAL API.
+ ALTS credentials API can only be used in GCP environment as it relies on
+ handshaker service being available. For more info about ALTS see
+ https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
+
+ Args:
+ service_accounts: A list of server identities accepted by the client.
+ If target service accounts are provided and none of them matches the
+ peer identity of the server, handshake will fail. The arg can be empty
+ if the client does not have any information about trusted server
+ identity.
+ Returns:
+ A ChannelCredentials for use with an ALTS-enabled Channel
+ """
+ return ChannelCredentials(
+ _cygrpc.channel_credentials_alts(service_accounts or []))
+
+
+def alts_server_credentials():
+ """Creates a ServerCredentials for use with an ALTS-enabled connection.
+
+ This is an EXPERIMENTAL API.
+ ALTS credentials API can only be used in GCP environment as it relies on
+ handshaker service being available. For more info about ALTS see
+ https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
+
+ Returns:
+ A ServerCredentials for use with an ALTS-enabled Server
+ """
+ return ServerCredentials(_cygrpc.server_credentials_alts())
+
+
+def compute_engine_channel_credentials(call_credentials):
+ """Creates a compute engine channel credential.
+
+ This credential can only be used in a GCP environment as it relies on
+ a handshaker service. For more info about ALTS, see
+ https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
+
+ This channel credential is expected to be used as part of a composite
+ credential in conjunction with a call credentials that authenticates the
+ VM's default service account. If used with any other sort of call
+ credential, the connection may suddenly and unexpectedly begin failing RPCs.
+ """
+ return ChannelCredentials(
+ _cygrpc.channel_credentials_compute_engine(
+ call_credentials._credentials))
+
+
+def channel_ready_future(channel):
+ """Creates a Future that tracks when a Channel is ready.
+
+ Cancelling the Future does not affect the channel's state machine.
+ It merely decouples the Future from channel state machine.
+
+ Args:
+ channel: A Channel object.
+
+ Returns:
+ A Future object that matures when the channel connectivity is
+ ChannelConnectivity.READY.
+ """
+ from grpc import _utilities # pylint: disable=cyclic-import
+ return _utilities.channel_ready_future(channel)
+
+
+def insecure_channel(target, options=None, compression=None):
+ """Creates an insecure Channel to a server.
+
+ The returned Channel is thread-safe.
+
+ Args:
+ target: The server address
+ options: An optional list of key-value pairs (:term:`channel_arguments`
+ in gRPC Core runtime) to configure the channel.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel. This is an EXPERIMENTAL option.
+
+ Returns:
+ A Channel.
+ """
+ from grpc import _channel # pylint: disable=cyclic-import
+ return _channel.Channel(target, () if options is None else options, None,
+ compression)
+
+
+def secure_channel(target, credentials, options=None, compression=None):
+ """Creates a secure Channel to a server.
+
+ The returned Channel is thread-safe.
+
+ Args:
+ target: The server address.
+ credentials: A ChannelCredentials instance.
+ options: An optional list of key-value pairs (:term:`channel_arguments`
+ in gRPC Core runtime) to configure the channel.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel. This is an EXPERIMENTAL option.
+
+ Returns:
+ A Channel.
+ """
+ from grpc import _channel # pylint: disable=cyclic-import
+ from grpc.experimental import _insecure_channel_credentials
+ if credentials._credentials is _insecure_channel_credentials:
+ raise ValueError(
+ "secure_channel cannot be called with insecure credentials." +
+ " Call insecure_channel instead.")
+ return _channel.Channel(target, () if options is None else options,
+ credentials._credentials, compression)
+
+
+def intercept_channel(channel, *interceptors):
+ """Intercepts a channel through a set of interceptors.
+
+ This is an EXPERIMENTAL API.
+
+ Args:
+ channel: A Channel.
+ interceptors: Zero or more objects of type
+ UnaryUnaryClientInterceptor,
+ UnaryStreamClientInterceptor,
+ StreamUnaryClientInterceptor, or
+ StreamStreamClientInterceptor.
+ Interceptors are given control in the order they are listed.
+
+ Returns:
+ A Channel that intercepts each invocation via the provided interceptors.
+
+ Raises:
+ TypeError: If interceptor does not derive from any of
+ UnaryUnaryClientInterceptor,
+ UnaryStreamClientInterceptor,
+ StreamUnaryClientInterceptor, or
+ StreamStreamClientInterceptor.
+ """
+ from grpc import _interceptor # pylint: disable=cyclic-import
+ return _interceptor.intercept_channel(channel, *interceptors)
+
+
+def server(thread_pool,
+ handlers=None,
+ interceptors=None,
+ options=None,
+ maximum_concurrent_rpcs=None,
+ compression=None):
+ """Creates a Server with which RPCs can be serviced.
+
+ Args:
+ thread_pool: A futures.ThreadPoolExecutor to be used by the Server
+ to execute RPC handlers.
+ handlers: An optional list of GenericRpcHandlers used for executing RPCs.
+ More handlers may be added by calling add_generic_rpc_handlers any time
+ before the server is started.
+ interceptors: An optional list of ServerInterceptor objects that observe
+ and optionally manipulate the incoming RPCs before handing them over to
+ handlers. The interceptors are given control in the order they are
+ specified. This is an EXPERIMENTAL API.
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
+ to configure the channel.
+ maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
+ will service before returning RESOURCE_EXHAUSTED status, or None to
+ indicate no limit.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This compression algorithm will be used for the
+ lifetime of the server unless overridden. This is an EXPERIMENTAL option.
+
+ Returns:
+ A Server object.
+ """
+ from grpc import _server # pylint: disable=cyclic-import
+ return _server.create_server(thread_pool,
+ () if handlers is None else handlers,
+ () if interceptors is None else interceptors,
+ () if options is None else options,
+ maximum_concurrent_rpcs, compression)
+
+
+@contextlib.contextmanager
+def _create_servicer_context(rpc_event, state, request_deserializer):
+ from grpc import _server # pylint: disable=cyclic-import
+ context = _server._Context(rpc_event, state, request_deserializer)
+ yield context
+ context._finalize_state() # pylint: disable=protected-access
+
+
+@enum.unique
+class Compression(enum.IntEnum):
+ """Indicates the compression method to be used for an RPC.
+
+ This enumeration is part of an EXPERIMENTAL API.
+
+ Attributes:
+ NoCompression: Do not use compression algorithm.
+ Deflate: Use "Deflate" compression algorithm.
+ Gzip: Use "Gzip" compression algorithm.
+ """
+ NoCompression = _compression.NoCompression
+ Deflate = _compression.Deflate
+ Gzip = _compression.Gzip
+
+
+from grpc._runtime_protos import protos, services, protos_and_services # pylint: disable=wrong-import-position
+
+################################### __all__ #################################
+
+__all__ = (
+ 'FutureTimeoutError',
+ 'FutureCancelledError',
+ 'Future',
+ 'ChannelConnectivity',
+ 'StatusCode',
+ 'Status',
+ 'RpcError',
+ 'RpcContext',
+ 'Call',
+ 'ChannelCredentials',
+ 'CallCredentials',
+ 'AuthMetadataContext',
+ 'AuthMetadataPluginCallback',
+ 'AuthMetadataPlugin',
+ 'Compression',
+ 'ClientCallDetails',
+ 'ServerCertificateConfiguration',
+ 'ServerCredentials',
+ 'LocalConnectionType',
+ 'UnaryUnaryMultiCallable',
+ 'UnaryStreamMultiCallable',
+ 'StreamUnaryMultiCallable',
+ 'StreamStreamMultiCallable',
+ 'UnaryUnaryClientInterceptor',
+ 'UnaryStreamClientInterceptor',
+ 'StreamUnaryClientInterceptor',
+ 'StreamStreamClientInterceptor',
+ 'Channel',
+ 'ServicerContext',
+ 'RpcMethodHandler',
+ 'HandlerCallDetails',
+ 'GenericRpcHandler',
+ 'ServiceRpcHandler',
+ 'Server',
+ 'ServerInterceptor',
+ 'unary_unary_rpc_method_handler',
+ 'unary_stream_rpc_method_handler',
+ 'stream_unary_rpc_method_handler',
+ 'stream_stream_rpc_method_handler',
+ 'method_handlers_generic_handler',
+ 'ssl_channel_credentials',
+ 'metadata_call_credentials',
+ 'access_token_call_credentials',
+ 'composite_call_credentials',
+ 'composite_channel_credentials',
+ 'local_channel_credentials',
+ 'local_server_credentials',
+ 'alts_channel_credentials',
+ 'alts_server_credentials',
+ 'ssl_server_credentials',
+ 'ssl_server_certificate_configuration',
+ 'dynamic_ssl_server_credentials',
+ 'channel_ready_future',
+ 'insecure_channel',
+ 'secure_channel',
+ 'intercept_channel',
+ 'server',
+ 'protos',
+ 'services',
+ 'protos_and_services',
+)
+
+############################### Extension Shims ################################
+
+# Here to maintain backwards compatibility; avoid using these in new code!
+try:
+ import grpc_tools
+ sys.modules.update({'grpc.tools': grpc_tools})
+except ImportError:
+ pass
+try:
+ import grpc_health
+ sys.modules.update({'grpc.health': grpc_health})
+except ImportError:
+ pass
+try:
+ import grpc_reflection
+ sys.modules.update({'grpc.reflection': grpc_reflection})
+except ImportError:
+ pass
diff --git a/venv/Lib/site-packages/grpc/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..954e98d48
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_auth.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_auth.cpython-36.pyc
new file mode 100644
index 000000000..d27f291f5
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_auth.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_channel.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_channel.cpython-36.pyc
new file mode 100644
index 000000000..027eae377
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_channel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_common.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_common.cpython-36.pyc
new file mode 100644
index 000000000..916ab0f56
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_common.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_compression.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_compression.cpython-36.pyc
new file mode 100644
index 000000000..bbbc029c7
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_compression.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-36.pyc
new file mode 100644
index 000000000..86abb0851
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_interceptor.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_interceptor.cpython-36.pyc
new file mode 100644
index 000000000..7568692dc
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_interceptor.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-36.pyc
new file mode 100644
index 000000000..63cba51be
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_runtime_protos.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_runtime_protos.cpython-36.pyc
new file mode 100644
index 000000000..71e728ff2
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_runtime_protos.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_server.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_server.cpython-36.pyc
new file mode 100644
index 000000000..b49132c6c
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_server.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_simple_stubs.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_simple_stubs.cpython-36.pyc
new file mode 100644
index 000000000..a43a852f9
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_simple_stubs.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/__pycache__/_utilities.cpython-36.pyc b/venv/Lib/site-packages/grpc/__pycache__/_utilities.cpython-36.pyc
new file mode 100644
index 000000000..8ef887807
Binary files /dev/null and b/venv/Lib/site-packages/grpc/__pycache__/_utilities.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/_auth.py b/venv/Lib/site-packages/grpc/_auth.py
new file mode 100644
index 000000000..2d38320af
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_auth.py
@@ -0,0 +1,58 @@
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""GRPCAuthMetadataPlugins for standard authentication."""
+
+import inspect
+
+import grpc
+
+
+def _sign_request(callback, token, error):
+ metadata = (('authorization', 'Bearer {}'.format(token)),)
+ callback(metadata, error)
+
+
+class GoogleCallCredentials(grpc.AuthMetadataPlugin):
+ """Metadata wrapper for GoogleCredentials from the oauth2client library."""
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+ # Hack to determine if these are JWT creds and we need to pass
+ # additional_claims when getting a token
+ self._is_jwt = 'additional_claims' in inspect.getargspec( # pylint: disable=deprecated-method
+ credentials.get_access_token).args
+
+ def __call__(self, context, callback):
+ try:
+ if self._is_jwt:
+ access_token = self._credentials.get_access_token(
+ additional_claims={
+ 'aud': context.service_url
+ }).access_token
+ else:
+ access_token = self._credentials.get_access_token().access_token
+ except Exception as exception: # pylint: disable=broad-except
+ _sign_request(callback, None, exception)
+ else:
+ _sign_request(callback, access_token, None)
+
+
+class AccessTokenAuthMetadataPlugin(grpc.AuthMetadataPlugin):
+ """Metadata wrapper for raw access token credentials."""
+
+ def __init__(self, access_token):
+ self._access_token = access_token
+
+ def __call__(self, context, callback):
+ _sign_request(callback, self._access_token, None)
diff --git a/venv/Lib/site-packages/grpc/_channel.py b/venv/Lib/site-packages/grpc/_channel.py
new file mode 100644
index 000000000..37071909c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_channel.py
@@ -0,0 +1,1461 @@
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Invocation-side implementation of gRPC Python."""
+
+import copy
+import functools
+import logging
+import os
+import sys
+import threading
+import time
+
+import grpc
+import grpc.experimental
+from grpc import _compression
+from grpc import _common
+from grpc import _grpcio_metadata
+from grpc._cython import cygrpc
+
+_LOGGER = logging.getLogger(__name__)
+
+_USER_AGENT = 'grpc-python/{}'.format(_grpcio_metadata.__version__)
+
+_EMPTY_FLAGS = 0
+
+# NOTE(rbellevi): No guarantees are given about the maintenance of this
+# environment variable.
+_DEFAULT_SINGLE_THREADED_UNARY_STREAM = os.getenv(
+ "GRPC_SINGLE_THREADED_UNARY_STREAM") is not None
+
+_UNARY_UNARY_INITIAL_DUE = (
+ cygrpc.OperationType.send_initial_metadata,
+ cygrpc.OperationType.send_message,
+ cygrpc.OperationType.send_close_from_client,
+ cygrpc.OperationType.receive_initial_metadata,
+ cygrpc.OperationType.receive_message,
+ cygrpc.OperationType.receive_status_on_client,
+)
+_UNARY_STREAM_INITIAL_DUE = (
+ cygrpc.OperationType.send_initial_metadata,
+ cygrpc.OperationType.send_message,
+ cygrpc.OperationType.send_close_from_client,
+ cygrpc.OperationType.receive_initial_metadata,
+ cygrpc.OperationType.receive_status_on_client,
+)
+_STREAM_UNARY_INITIAL_DUE = (
+ cygrpc.OperationType.send_initial_metadata,
+ cygrpc.OperationType.receive_initial_metadata,
+ cygrpc.OperationType.receive_message,
+ cygrpc.OperationType.receive_status_on_client,
+)
+_STREAM_STREAM_INITIAL_DUE = (
+ cygrpc.OperationType.send_initial_metadata,
+ cygrpc.OperationType.receive_initial_metadata,
+ cygrpc.OperationType.receive_status_on_client,
+)
+
+_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
+ 'Exception calling channel subscription callback!')
+
+_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n'
+ '\tstatus = {}\n'
+ '\tdetails = "{}"\n'
+ '>')
+
+_NON_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n'
+ '\tstatus = {}\n'
+ '\tdetails = "{}"\n'
+ '\tdebug_error_string = "{}"\n'
+ '>')
+
+
+def _deadline(timeout):
+ return None if timeout is None else time.time() + timeout
+
+
+def _unknown_code_details(unknown_cygrpc_code, details):
+ return 'Server sent unknown code {} and details "{}"'.format(
+ unknown_cygrpc_code, details)
+
+
+class _RPCState(object):
+
+ def __init__(self, due, initial_metadata, trailing_metadata, code, details):
+ self.condition = threading.Condition()
+ # The cygrpc.OperationType objects representing events due from the RPC's
+ # completion queue.
+ self.due = set(due)
+ self.initial_metadata = initial_metadata
+ self.response = None
+ self.trailing_metadata = trailing_metadata
+ self.code = code
+ self.details = details
+ self.debug_error_string = None
+ # The semantics of grpc.Future.cancel and grpc.Future.cancelled are
+ # slightly wonky, so they have to be tracked separately from the rest of the
+ # result of the RPC. This field tracks whether cancellation was requested
+ # prior to termination of the RPC.
+ self.cancelled = False
+ self.callbacks = []
+ self.fork_epoch = cygrpc.get_fork_epoch()
+
+ def reset_postfork_child(self):
+ self.condition = threading.Condition()
+
+
+def _abort(state, code, details):
+ if state.code is None:
+ state.code = code
+ state.details = details
+ if state.initial_metadata is None:
+ state.initial_metadata = ()
+ state.trailing_metadata = ()
+
+
+def _handle_event(event, state, response_deserializer):
+ callbacks = []
+ for batch_operation in event.batch_operations:
+ operation_type = batch_operation.type()
+ state.due.remove(operation_type)
+ if operation_type == cygrpc.OperationType.receive_initial_metadata:
+ state.initial_metadata = batch_operation.initial_metadata()
+ elif operation_type == cygrpc.OperationType.receive_message:
+ serialized_response = batch_operation.message()
+ if serialized_response is not None:
+ response = _common.deserialize(serialized_response,
+ response_deserializer)
+ if response is None:
+ details = 'Exception deserializing response!'
+ _abort(state, grpc.StatusCode.INTERNAL, details)
+ else:
+ state.response = response
+ elif operation_type == cygrpc.OperationType.receive_status_on_client:
+ state.trailing_metadata = batch_operation.trailing_metadata()
+ if state.code is None:
+ code = _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE.get(
+ batch_operation.code())
+ if code is None:
+ state.code = grpc.StatusCode.UNKNOWN
+ state.details = _unknown_code_details(
+ code, batch_operation.details())
+ else:
+ state.code = code
+ state.details = batch_operation.details()
+ state.debug_error_string = batch_operation.error_string()
+ callbacks.extend(state.callbacks)
+ state.callbacks = None
+ return callbacks
+
+
+def _event_handler(state, response_deserializer):
+
+ def handle_event(event):
+ with state.condition:
+ callbacks = _handle_event(event, state, response_deserializer)
+ state.condition.notify_all()
+ done = not state.due
+ for callback in callbacks:
+ try:
+ callback()
+ except Exception as e: # pylint: disable=broad-except
+ # NOTE(rbellevi): We suppress but log errors here so as not to
+ # kill the channel spin thread.
+ logging.error('Exception in callback %s: %s',
+ repr(callback.func), repr(e))
+ return done and state.fork_epoch >= cygrpc.get_fork_epoch()
+
+ return handle_event
+
+
+#pylint: disable=too-many-statements
+def _consume_request_iterator(request_iterator, state, call, request_serializer,
+ event_handler):
+ """Consume a request iterator supplied by the user."""
+
+ def consume_request_iterator(): # pylint: disable=too-many-branches
+ # Iterate over the request iterator until it is exhausted or an error
+ # condition is encountered.
+ while True:
+ return_from_user_request_generator_invoked = False
+ try:
+ # The thread may die in user-code. Do not block fork for this.
+ cygrpc.enter_user_request_generator()
+ request = next(request_iterator)
+ except StopIteration:
+ break
+ except Exception: # pylint: disable=broad-except
+ cygrpc.return_from_user_request_generator()
+ return_from_user_request_generator_invoked = True
+ code = grpc.StatusCode.UNKNOWN
+ details = 'Exception iterating requests!'
+ _LOGGER.exception(details)
+ call.cancel(_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code],
+ details)
+ _abort(state, code, details)
+ return
+ finally:
+ if not return_from_user_request_generator_invoked:
+ cygrpc.return_from_user_request_generator()
+ serialized_request = _common.serialize(request, request_serializer)
+ with state.condition:
+ if state.code is None and not state.cancelled:
+ if serialized_request is None:
+ code = grpc.StatusCode.INTERNAL
+ details = 'Exception serializing request!'
+ call.cancel(
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code],
+ details)
+ _abort(state, code, details)
+ return
+ else:
+ operations = (cygrpc.SendMessageOperation(
+ serialized_request, _EMPTY_FLAGS),)
+ operating = call.operate(operations, event_handler)
+ if operating:
+ state.due.add(cygrpc.OperationType.send_message)
+ else:
+ return
+
+ def _done():
+ return (state.code is not None or
+ cygrpc.OperationType.send_message not in
+ state.due)
+
+ _common.wait(state.condition.wait,
+ _done,
+ spin_cb=functools.partial(
+ cygrpc.block_if_fork_in_progress,
+ state))
+ if state.code is not None:
+ return
+ else:
+ return
+ with state.condition:
+ if state.code is None:
+ operations = (
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),)
+ operating = call.operate(operations, event_handler)
+ if operating:
+ state.due.add(cygrpc.OperationType.send_close_from_client)
+
+ consumption_thread = cygrpc.ForkManagedThread(
+ target=consume_request_iterator)
+ consumption_thread.setDaemon(True)
+ consumption_thread.start()
+
+
+def _rpc_state_string(class_name, rpc_state):
+ """Calculates error string for RPC."""
+ with rpc_state.condition:
+ if rpc_state.code is None:
+ return '<{} object>'.format(class_name)
+ elif rpc_state.code is grpc.StatusCode.OK:
+ return _OK_RENDEZVOUS_REPR_FORMAT.format(class_name, rpc_state.code,
+ rpc_state.details)
+ else:
+ return _NON_OK_RENDEZVOUS_REPR_FORMAT.format(
+ class_name, rpc_state.code, rpc_state.details,
+ rpc_state.debug_error_string)
+
+
+class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future):
+ """An RPC error not tied to the execution of a particular RPC.
+
+ The RPC represented by the state object must not be in-progress or
+ cancelled.
+
+ Attributes:
+ _state: An instance of _RPCState.
+ """
+
+ def __init__(self, state):
+ with state.condition:
+ self._state = _RPCState((), copy.deepcopy(state.initial_metadata),
+ copy.deepcopy(state.trailing_metadata),
+ state.code, copy.deepcopy(state.details))
+ self._state.response = copy.copy(state.response)
+ self._state.debug_error_string = copy.copy(state.debug_error_string)
+
+ def initial_metadata(self):
+ return self._state.initial_metadata
+
+ def trailing_metadata(self):
+ return self._state.trailing_metadata
+
+ def code(self):
+ return self._state.code
+
+ def details(self):
+ return _common.decode(self._state.details)
+
+ def debug_error_string(self):
+ return _common.decode(self._state.debug_error_string)
+
+ def _repr(self):
+ return _rpc_state_string(self.__class__.__name__, self._state)
+
+ def __repr__(self):
+ return self._repr()
+
+ def __str__(self):
+ return self._repr()
+
+ def cancel(self):
+ """See grpc.Future.cancel."""
+ return False
+
+ def cancelled(self):
+ """See grpc.Future.cancelled."""
+ return False
+
+ def running(self):
+ """See grpc.Future.running."""
+ return False
+
+ def done(self):
+ """See grpc.Future.done."""
+ return True
+
+ def result(self, timeout=None): # pylint: disable=unused-argument
+ """See grpc.Future.result."""
+ raise self
+
+ def exception(self, timeout=None): # pylint: disable=unused-argument
+ """See grpc.Future.exception."""
+ return self
+
+ def traceback(self, timeout=None): # pylint: disable=unused-argument
+ """See grpc.Future.traceback."""
+ try:
+ raise self
+ except grpc.RpcError:
+ return sys.exc_info()[2]
+
+ def add_done_callback(self, fn, timeout=None): # pylint: disable=unused-argument
+ """See grpc.Future.add_done_callback."""
+ fn(self)
+
+
+class _Rendezvous(grpc.RpcError, grpc.RpcContext):
+ """An RPC iterator.
+
+ Attributes:
+ _state: An instance of _RPCState.
+ _call: An instance of SegregatedCall or IntegratedCall.
+ In either case, the _call object is expected to have operate, cancel,
+ and next_event methods.
+ _response_deserializer: A callable taking bytes and return a Python
+ object.
+ _deadline: A float representing the deadline of the RPC in seconds. Or
+ possibly None, to represent an RPC with no deadline at all.
+ """
+
+ def __init__(self, state, call, response_deserializer, deadline):
+ super(_Rendezvous, self).__init__()
+ self._state = state
+ self._call = call
+ self._response_deserializer = response_deserializer
+ self._deadline = deadline
+
+ def is_active(self):
+ """See grpc.RpcContext.is_active"""
+ with self._state.condition:
+ return self._state.code is None
+
+ def time_remaining(self):
+ """See grpc.RpcContext.time_remaining"""
+ with self._state.condition:
+ if self._deadline is None:
+ return None
+ else:
+ return max(self._deadline - time.time(), 0)
+
+ def cancel(self):
+ """See grpc.RpcContext.cancel"""
+ with self._state.condition:
+ if self._state.code is None:
+ code = grpc.StatusCode.CANCELLED
+ details = 'Locally cancelled by application!'
+ self._call.cancel(
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details)
+ self._state.cancelled = True
+ _abort(self._state, code, details)
+ self._state.condition.notify_all()
+ return True
+ else:
+ return False
+
+ def add_callback(self, callback):
+ """See grpc.RpcContext.add_callback"""
+ with self._state.condition:
+ if self._state.callbacks is None:
+ return False
+ else:
+ self._state.callbacks.append(callback)
+ return True
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ return self._next()
+
+ def __next__(self):
+ return self._next()
+
+ def _next(self):
+ raise NotImplementedError()
+
+ def debug_error_string(self):
+ raise NotImplementedError()
+
+ def _repr(self):
+ return _rpc_state_string(self.__class__.__name__, self._state)
+
+ def __repr__(self):
+ return self._repr()
+
+ def __str__(self):
+ return self._repr()
+
+ def __del__(self):
+ with self._state.condition:
+ if self._state.code is None:
+ self._state.code = grpc.StatusCode.CANCELLED
+ self._state.details = 'Cancelled upon garbage collection!'
+ self._state.cancelled = True
+ self._call.cancel(
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code],
+ self._state.details)
+ self._state.condition.notify_all()
+
+
+class _SingleThreadedRendezvous(_Rendezvous, grpc.Call): # pylint: disable=too-many-ancestors
+ """An RPC iterator operating entirely on a single thread.
+
+ The __next__ method of _SingleThreadedRendezvous does not depend on the
+ existence of any other thread, including the "channel spin thread".
+ However, this means that its interface is entirely synchronous. So this
+ class cannot fulfill the grpc.Future interface.
+ """
+
+ def initial_metadata(self):
+ """See grpc.Call.initial_metadata"""
+ with self._state.condition:
+ # NOTE(gnossen): Based on our initial call batch, we are guaranteed
+ # to receive initial metadata before any messages.
+ while self._state.initial_metadata is None:
+ self._consume_next_event()
+ return self._state.initial_metadata
+
+ def trailing_metadata(self):
+ """See grpc.Call.trailing_metadata"""
+ with self._state.condition:
+ if self._state.trailing_metadata is None:
+ raise grpc.experimental.UsageError(
+ "Cannot get trailing metadata until RPC is completed.")
+ return self._state.trailing_metadata
+
+ def code(self):
+ """See grpc.Call.code"""
+ with self._state.condition:
+ if self._state.code is None:
+ raise grpc.experimental.UsageError(
+ "Cannot get code until RPC is completed.")
+ return self._state.code
+
+ def details(self):
+ """See grpc.Call.details"""
+ with self._state.condition:
+ if self._state.details is None:
+ raise grpc.experimental.UsageError(
+ "Cannot get details until RPC is completed.")
+ return _common.decode(self._state.details)
+
+ def _consume_next_event(self):
+ event = self._call.next_event()
+ with self._state.condition:
+ callbacks = _handle_event(event, self._state,
+ self._response_deserializer)
+ for callback in callbacks:
+ # NOTE(gnossen): We intentionally allow exceptions to bubble up
+ # to the user when running on a single thread.
+ callback()
+ return event
+
+ def _next_response(self):
+ while True:
+ self._consume_next_event()
+ with self._state.condition:
+ if self._state.response is not None:
+ response = self._state.response
+ self._state.response = None
+ return response
+ elif cygrpc.OperationType.receive_message not in self._state.due:
+ if self._state.code is grpc.StatusCode.OK:
+ raise StopIteration()
+ elif self._state.code is not None:
+ raise self
+
+ def _next(self):
+ with self._state.condition:
+ if self._state.code is None:
+ operating = self._call.operate(
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), None)
+ if operating:
+ self._state.due.add(cygrpc.OperationType.receive_message)
+ elif self._state.code is grpc.StatusCode.OK:
+ raise StopIteration()
+ else:
+ raise self
+ return self._next_response()
+
+ def debug_error_string(self):
+ with self._state.condition:
+ if self._state.debug_error_string is None:
+ raise grpc.experimental.UsageError(
+ "Cannot get debug error string until RPC is completed.")
+ return _common.decode(self._state.debug_error_string)
+
+
+class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: disable=too-many-ancestors
+ """An RPC iterator that depends on a channel spin thread.
+
+ This iterator relies upon a per-channel thread running in the background,
+ dequeueing events from the completion queue, and notifying threads waiting
+ on the threading.Condition object in the _RPCState object.
+
+ This extra thread allows _MultiThreadedRendezvous to fulfill the grpc.Future interface
+ and to mediate a bidirection streaming RPC.
+ """
+
+ def initial_metadata(self):
+ """See grpc.Call.initial_metadata"""
+ with self._state.condition:
+
+ def _done():
+ return self._state.initial_metadata is not None
+
+ _common.wait(self._state.condition.wait, _done)
+ return self._state.initial_metadata
+
+ def trailing_metadata(self):
+ """See grpc.Call.trailing_metadata"""
+ with self._state.condition:
+
+ def _done():
+ return self._state.trailing_metadata is not None
+
+ _common.wait(self._state.condition.wait, _done)
+ return self._state.trailing_metadata
+
+ def code(self):
+ """See grpc.Call.code"""
+ with self._state.condition:
+
+ def _done():
+ return self._state.code is not None
+
+ _common.wait(self._state.condition.wait, _done)
+ return self._state.code
+
+ def details(self):
+ """See grpc.Call.details"""
+ with self._state.condition:
+
+ def _done():
+ return self._state.details is not None
+
+ _common.wait(self._state.condition.wait, _done)
+ return _common.decode(self._state.details)
+
+ def debug_error_string(self):
+ with self._state.condition:
+
+ def _done():
+ return self._state.debug_error_string is not None
+
+ _common.wait(self._state.condition.wait, _done)
+ return _common.decode(self._state.debug_error_string)
+
+ def cancelled(self):
+ with self._state.condition:
+ return self._state.cancelled
+
+ def running(self):
+ with self._state.condition:
+ return self._state.code is None
+
+ def done(self):
+ with self._state.condition:
+ return self._state.code is not None
+
+ def _is_complete(self):
+ return self._state.code is not None
+
+ def result(self, timeout=None):
+ """Returns the result of the computation or raises its exception.
+
+ See grpc.Future.result for the full API contract.
+ """
+ with self._state.condition:
+ timed_out = _common.wait(self._state.condition.wait,
+ self._is_complete,
+ timeout=timeout)
+ if timed_out:
+ raise grpc.FutureTimeoutError()
+ else:
+ if self._state.code is grpc.StatusCode.OK:
+ return self._state.response
+ elif self._state.cancelled:
+ raise grpc.FutureCancelledError()
+ else:
+ raise self
+
+ def exception(self, timeout=None):
+ """Return the exception raised by the computation.
+
+ See grpc.Future.exception for the full API contract.
+ """
+ with self._state.condition:
+ timed_out = _common.wait(self._state.condition.wait,
+ self._is_complete,
+ timeout=timeout)
+ if timed_out:
+ raise grpc.FutureTimeoutError()
+ else:
+ if self._state.code is grpc.StatusCode.OK:
+ return None
+ elif self._state.cancelled:
+ raise grpc.FutureCancelledError()
+ else:
+ return self
+
+ def traceback(self, timeout=None):
+ """Access the traceback of the exception raised by the computation.
+
+ See grpc.future.traceback for the full API contract.
+ """
+ with self._state.condition:
+ timed_out = _common.wait(self._state.condition.wait,
+ self._is_complete,
+ timeout=timeout)
+ if timed_out:
+ raise grpc.FutureTimeoutError()
+ else:
+ if self._state.code is grpc.StatusCode.OK:
+ return None
+ elif self._state.cancelled:
+ raise grpc.FutureCancelledError()
+ else:
+ try:
+ raise self
+ except grpc.RpcError:
+ return sys.exc_info()[2]
+
+ def add_done_callback(self, fn):
+ with self._state.condition:
+ if self._state.code is None:
+ self._state.callbacks.append(functools.partial(fn, self))
+ return
+
+ fn(self)
+
+ def _next(self):
+ with self._state.condition:
+ if self._state.code is None:
+ event_handler = _event_handler(self._state,
+ self._response_deserializer)
+ operating = self._call.operate(
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
+ event_handler)
+ if operating:
+ self._state.due.add(cygrpc.OperationType.receive_message)
+ elif self._state.code is grpc.StatusCode.OK:
+ raise StopIteration()
+ else:
+ raise self
+
+ def _response_ready():
+ return (
+ self._state.response is not None or
+ (cygrpc.OperationType.receive_message not in self._state.due
+ and self._state.code is not None))
+
+ _common.wait(self._state.condition.wait, _response_ready)
+ if self._state.response is not None:
+ response = self._state.response
+ self._state.response = None
+ return response
+ elif cygrpc.OperationType.receive_message not in self._state.due:
+ if self._state.code is grpc.StatusCode.OK:
+ raise StopIteration()
+ elif self._state.code is not None:
+ raise self
+
+
+def _start_unary_request(request, timeout, request_serializer):
+ deadline = _deadline(timeout)
+ serialized_request = _common.serialize(request, request_serializer)
+ if serialized_request is None:
+ state = _RPCState((), (), (), grpc.StatusCode.INTERNAL,
+ 'Exception serializing request!')
+ error = _InactiveRpcError(state)
+ return deadline, None, error
+ else:
+ return deadline, serialized_request, None
+
+
+def _end_unary_response_blocking(state, call, with_call, deadline):
+ if state.code is grpc.StatusCode.OK:
+ if with_call:
+ rendezvous = _MultiThreadedRendezvous(state, call, None, deadline)
+ return state.response, rendezvous
+ else:
+ return state.response
+ else:
+ raise _InactiveRpcError(state)
+
+
+def _stream_unary_invocation_operationses(metadata, initial_metadata_flags):
+ return (
+ (
+ cygrpc.SendInitialMetadataOperation(metadata,
+ initial_metadata_flags),
+ cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
+ ),
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
+ )
+
+
+def _stream_unary_invocation_operationses_and_tags(metadata,
+ initial_metadata_flags):
+ return tuple((
+ operations,
+ None,
+ ) for operations in _stream_unary_invocation_operationses(
+ metadata, initial_metadata_flags))
+
+
+def _determine_deadline(user_deadline):
+ parent_deadline = cygrpc.get_deadline_from_context()
+ if parent_deadline is None and user_deadline is None:
+ return None
+ elif parent_deadline is not None and user_deadline is None:
+ return parent_deadline
+ elif user_deadline is not None and parent_deadline is None:
+ return user_deadline
+ else:
+ return min(parent_deadline, user_deadline)
+
+
+class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+ self._context = cygrpc.build_census_context()
+
+ def _prepare(self, request, timeout, metadata, wait_for_ready, compression):
+ deadline, serialized_request, rendezvous = _start_unary_request(
+ request, timeout, self._request_serializer)
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
+ wait_for_ready)
+ augmented_metadata = _compression.augment_metadata(
+ metadata, compression)
+ if serialized_request is None:
+ return None, None, None, rendezvous
+ else:
+ state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None)
+ operations = (
+ cygrpc.SendInitialMetadataOperation(augmented_metadata,
+ initial_metadata_flags),
+ cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
+ cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),
+ cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
+ )
+ return state, operations, deadline, None
+
+ def _blocking(self, request, timeout, metadata, credentials, wait_for_ready,
+ compression):
+ state, operations, deadline, rendezvous = self._prepare(
+ request, timeout, metadata, wait_for_ready, compression)
+ if state is None:
+ raise rendezvous # pylint: disable-msg=raising-bad-type
+ else:
+ call = self._channel.segregated_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
+ self._method, None, _determine_deadline(deadline), metadata,
+ None if credentials is None else credentials._credentials, ((
+ operations,
+ None,
+ ),), self._context)
+ event = call.next_event()
+ _handle_event(event, state, self._response_deserializer)
+ return state, call
+
+ def __call__(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ state, call, = self._blocking(request, timeout, metadata, credentials,
+ wait_for_ready, compression)
+ return _end_unary_response_blocking(state, call, False, None)
+
+ def with_call(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ state, call, = self._blocking(request, timeout, metadata, credentials,
+ wait_for_ready, compression)
+ return _end_unary_response_blocking(state, call, True, None)
+
+ def future(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ state, operations, deadline, rendezvous = self._prepare(
+ request, timeout, metadata, wait_for_ready, compression)
+ if state is None:
+ raise rendezvous # pylint: disable-msg=raising-bad-type
+ else:
+ event_handler = _event_handler(state, self._response_deserializer)
+ call = self._managed_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
+ self._method, None, deadline, metadata,
+ None if credentials is None else credentials._credentials,
+ (operations,), event_handler, self._context)
+ return _MultiThreadedRendezvous(state, call,
+ self._response_deserializer,
+ deadline)
+
+
+class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, channel, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+ self._context = cygrpc.build_census_context()
+
+ def __call__( # pylint: disable=too-many-locals
+ self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ deadline = _deadline(timeout)
+ serialized_request = _common.serialize(request,
+ self._request_serializer)
+ if serialized_request is None:
+ state = _RPCState((), (), (), grpc.StatusCode.INTERNAL,
+ 'Exception serializing request!')
+ raise _InactiveRpcError(state)
+
+ state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
+ call_credentials = None if credentials is None else credentials._credentials
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
+ wait_for_ready)
+ augmented_metadata = _compression.augment_metadata(
+ metadata, compression)
+ operations = (
+ (cygrpc.SendInitialMetadataOperation(augmented_metadata,
+ initial_metadata_flags),
+ cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS)),
+ (cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),),
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
+ )
+ operations_and_tags = tuple((ops, None) for ops in operations)
+ call = self._channel.segregated_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method,
+ None, _determine_deadline(deadline), metadata, call_credentials,
+ operations_and_tags, self._context)
+ return _SingleThreadedRendezvous(state, call,
+ self._response_deserializer, deadline)
+
+
+class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+ self._context = cygrpc.build_census_context()
+
+ def __call__( # pylint: disable=too-many-locals
+ self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ deadline, serialized_request, rendezvous = _start_unary_request(
+ request, timeout, self._request_serializer)
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
+ wait_for_ready)
+ if serialized_request is None:
+ raise rendezvous # pylint: disable-msg=raising-bad-type
+ else:
+ augmented_metadata = _compression.augment_metadata(
+ metadata, compression)
+ state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
+ operationses = (
+ (
+ cygrpc.SendInitialMetadataOperation(augmented_metadata,
+ initial_metadata_flags),
+ cygrpc.SendMessageOperation(serialized_request,
+ _EMPTY_FLAGS),
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
+ ),
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
+ )
+ call = self._managed_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
+ self._method, None, _determine_deadline(deadline), metadata,
+ None if credentials is None else credentials._credentials,
+ operationses, _event_handler(state,
+ self._response_deserializer),
+ self._context)
+ return _MultiThreadedRendezvous(state, call,
+ self._response_deserializer,
+ deadline)
+
+
+class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+ self._context = cygrpc.build_census_context()
+
+ def _blocking(self, request_iterator, timeout, metadata, credentials,
+ wait_for_ready, compression):
+ deadline = _deadline(timeout)
+ state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
+ wait_for_ready)
+ augmented_metadata = _compression.augment_metadata(
+ metadata, compression)
+ call = self._channel.segregated_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method,
+ None, _determine_deadline(deadline), augmented_metadata,
+ None if credentials is None else credentials._credentials,
+ _stream_unary_invocation_operationses_and_tags(
+ augmented_metadata, initial_metadata_flags), self._context)
+ _consume_request_iterator(request_iterator, state, call,
+ self._request_serializer, None)
+ while True:
+ event = call.next_event()
+ with state.condition:
+ _handle_event(event, state, self._response_deserializer)
+ state.condition.notify_all()
+ if not state.due:
+ break
+ return state, call
+
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ state, call, = self._blocking(request_iterator, timeout, metadata,
+ credentials, wait_for_ready, compression)
+ return _end_unary_response_blocking(state, call, False, None)
+
+ def with_call(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ state, call, = self._blocking(request_iterator, timeout, metadata,
+ credentials, wait_for_ready, compression)
+ return _end_unary_response_blocking(state, call, True, None)
+
+ def future(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ deadline = _deadline(timeout)
+ state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
+ event_handler = _event_handler(state, self._response_deserializer)
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
+ wait_for_ready)
+ augmented_metadata = _compression.augment_metadata(
+ metadata, compression)
+ call = self._managed_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method,
+ None, deadline, augmented_metadata,
+ None if credentials is None else credentials._credentials,
+ _stream_unary_invocation_operationses(metadata,
+ initial_metadata_flags),
+ event_handler, self._context)
+ _consume_request_iterator(request_iterator, state, call,
+ self._request_serializer, event_handler)
+ return _MultiThreadedRendezvous(state, call,
+ self._response_deserializer, deadline)
+
+
+class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+ self._context = cygrpc.build_census_context()
+
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ deadline = _deadline(timeout)
+ state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None)
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
+ wait_for_ready)
+ augmented_metadata = _compression.augment_metadata(
+ metadata, compression)
+ operationses = (
+ (
+ cygrpc.SendInitialMetadataOperation(augmented_metadata,
+ initial_metadata_flags),
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
+ ),
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
+ )
+ event_handler = _event_handler(state, self._response_deserializer)
+ call = self._managed_call(
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method,
+ None, _determine_deadline(deadline), augmented_metadata,
+ None if credentials is None else credentials._credentials,
+ operationses, event_handler, self._context)
+ _consume_request_iterator(request_iterator, state, call,
+ self._request_serializer, event_handler)
+ return _MultiThreadedRendezvous(state, call,
+ self._response_deserializer, deadline)
+
+
+class _InitialMetadataFlags(int):
+ """Stores immutable initial metadata flags"""
+
+ def __new__(cls, value=_EMPTY_FLAGS):
+ value &= cygrpc.InitialMetadataFlags.used_mask
+ return super(_InitialMetadataFlags, cls).__new__(cls, value)
+
+ def with_wait_for_ready(self, wait_for_ready):
+ if wait_for_ready is not None:
+ if wait_for_ready:
+ return self.__class__(self | cygrpc.InitialMetadataFlags.wait_for_ready | \
+ cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set)
+ elif not wait_for_ready:
+ return self.__class__(self & ~cygrpc.InitialMetadataFlags.wait_for_ready | \
+ cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set)
+ return self
+
+
+class _ChannelCallState(object):
+
+ def __init__(self, channel):
+ self.lock = threading.Lock()
+ self.channel = channel
+ self.managed_calls = 0
+ self.threading = False
+
+ def reset_postfork_child(self):
+ self.managed_calls = 0
+
+ def __del__(self):
+ try:
+ self.channel.close(cygrpc.StatusCode.cancelled,
+ 'Channel deallocated!')
+ except (TypeError, AttributeError):
+ pass
+
+
+def _run_channel_spin_thread(state):
+
+ def channel_spin():
+ while True:
+ cygrpc.block_if_fork_in_progress(state)
+ event = state.channel.next_call_event()
+ if event.completion_type == cygrpc.CompletionType.queue_timeout:
+ continue
+ call_completed = event.tag(event)
+ if call_completed:
+ with state.lock:
+ state.managed_calls -= 1
+ if state.managed_calls == 0:
+ return
+
+ channel_spin_thread = cygrpc.ForkManagedThread(target=channel_spin)
+ channel_spin_thread.setDaemon(True)
+ channel_spin_thread.start()
+
+
+def _channel_managed_call_management(state):
+
+ # pylint: disable=too-many-arguments
+ def create(flags, method, host, deadline, metadata, credentials,
+ operationses, event_handler, context):
+ """Creates a cygrpc.IntegratedCall.
+
+ Args:
+ flags: An integer bitfield of call flags.
+ method: The RPC method.
+ host: A host string for the created call.
+ deadline: A float to be the deadline of the created call or None if
+ the call is to have an infinite deadline.
+ metadata: The metadata for the call or None.
+ credentials: A cygrpc.CallCredentials or None.
+ operationses: An iterable of iterables of cygrpc.Operations to be
+ started on the call.
+ event_handler: A behavior to call to handle the events resultant from
+ the operations on the call.
+ context: Context object for distributed tracing.
+ Returns:
+ A cygrpc.IntegratedCall with which to conduct an RPC.
+ """
+ operationses_and_tags = tuple((
+ operations,
+ event_handler,
+ ) for operations in operationses)
+ with state.lock:
+ call = state.channel.integrated_call(flags, method, host, deadline,
+ metadata, credentials,
+ operationses_and_tags, context)
+ if state.managed_calls == 0:
+ state.managed_calls = 1
+ _run_channel_spin_thread(state)
+ else:
+ state.managed_calls += 1
+ return call
+
+ return create
+
+
+class _ChannelConnectivityState(object):
+
+ def __init__(self, channel):
+ self.lock = threading.RLock()
+ self.channel = channel
+ self.polling = False
+ self.connectivity = None
+ self.try_to_connect = False
+ self.callbacks_and_connectivities = []
+ self.delivering = False
+
+ def reset_postfork_child(self):
+ self.polling = False
+ self.connectivity = None
+ self.try_to_connect = False
+ self.callbacks_and_connectivities = []
+ self.delivering = False
+
+
+def _deliveries(state):
+ callbacks_needing_update = []
+ for callback_and_connectivity in state.callbacks_and_connectivities:
+ callback, callback_connectivity, = callback_and_connectivity
+ if callback_connectivity is not state.connectivity:
+ callbacks_needing_update.append(callback)
+ callback_and_connectivity[1] = state.connectivity
+ return callbacks_needing_update
+
+
+def _deliver(state, initial_connectivity, initial_callbacks):
+ connectivity = initial_connectivity
+ callbacks = initial_callbacks
+ while True:
+ for callback in callbacks:
+ cygrpc.block_if_fork_in_progress(state)
+ try:
+ callback(connectivity)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.exception(
+ _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE)
+ with state.lock:
+ callbacks = _deliveries(state)
+ if callbacks:
+ connectivity = state.connectivity
+ else:
+ state.delivering = False
+ return
+
+
+def _spawn_delivery(state, callbacks):
+ delivering_thread = cygrpc.ForkManagedThread(target=_deliver,
+ args=(
+ state,
+ state.connectivity,
+ callbacks,
+ ))
+ delivering_thread.setDaemon(True)
+ delivering_thread.start()
+ state.delivering = True
+
+
+# NOTE(https://github.com/grpc/grpc/issues/3064): We'd rather not poll.
+def _poll_connectivity(state, channel, initial_try_to_connect):
+ try_to_connect = initial_try_to_connect
+ connectivity = channel.check_connectivity_state(try_to_connect)
+ with state.lock:
+ state.connectivity = (
+ _common.
+ CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[connectivity])
+ callbacks = tuple(
+ callback for callback, unused_but_known_to_be_none_connectivity in
+ state.callbacks_and_connectivities)
+ for callback_and_connectivity in state.callbacks_and_connectivities:
+ callback_and_connectivity[1] = state.connectivity
+ if callbacks:
+ _spawn_delivery(state, callbacks)
+ while True:
+ event = channel.watch_connectivity_state(connectivity,
+ time.time() + 0.2)
+ cygrpc.block_if_fork_in_progress(state)
+ with state.lock:
+ if not state.callbacks_and_connectivities and not state.try_to_connect:
+ state.polling = False
+ state.connectivity = None
+ break
+ try_to_connect = state.try_to_connect
+ state.try_to_connect = False
+ if event.success or try_to_connect:
+ connectivity = channel.check_connectivity_state(try_to_connect)
+ with state.lock:
+ state.connectivity = (
+ _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
+ connectivity])
+ if not state.delivering:
+ callbacks = _deliveries(state)
+ if callbacks:
+ _spawn_delivery(state, callbacks)
+
+
+def _subscribe(state, callback, try_to_connect):
+ with state.lock:
+ if not state.callbacks_and_connectivities and not state.polling:
+ polling_thread = cygrpc.ForkManagedThread(
+ target=_poll_connectivity,
+ args=(state, state.channel, bool(try_to_connect)))
+ polling_thread.setDaemon(True)
+ polling_thread.start()
+ state.polling = True
+ state.callbacks_and_connectivities.append([callback, None])
+ elif not state.delivering and state.connectivity is not None:
+ _spawn_delivery(state, (callback,))
+ state.try_to_connect |= bool(try_to_connect)
+ state.callbacks_and_connectivities.append(
+ [callback, state.connectivity])
+ else:
+ state.try_to_connect |= bool(try_to_connect)
+ state.callbacks_and_connectivities.append([callback, None])
+
+
+def _unsubscribe(state, callback):
+ with state.lock:
+ for index, (subscribed_callback, unused_connectivity) in enumerate(
+ state.callbacks_and_connectivities):
+ if callback == subscribed_callback:
+ state.callbacks_and_connectivities.pop(index)
+ break
+
+
+def _augment_options(base_options, compression):
+ compression_option = _compression.create_channel_option(compression)
+ return tuple(base_options) + compression_option + ((
+ cygrpc.ChannelArgKey.primary_user_agent_string,
+ _USER_AGENT,
+ ),)
+
+
+def _separate_channel_options(options):
+ """Separates core channel options from Python channel options."""
+ core_options = []
+ python_options = []
+ for pair in options:
+ if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream:
+ python_options.append(pair)
+ else:
+ core_options.append(pair)
+ return python_options, core_options
+
+
+class Channel(grpc.Channel):
+ """A cygrpc.Channel-backed implementation of grpc.Channel."""
+
+ def __init__(self, target, options, credentials, compression):
+ """Constructor.
+
+ Args:
+ target: The target to which to connect.
+ options: Configuration options for the channel.
+ credentials: A cygrpc.ChannelCredentials or None.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel.
+ """
+ python_options, core_options = _separate_channel_options(options)
+ self._single_threaded_unary_stream = _DEFAULT_SINGLE_THREADED_UNARY_STREAM
+ self._process_python_options(python_options)
+ self._channel = cygrpc.Channel(
+ _common.encode(target), _augment_options(core_options, compression),
+ credentials)
+ self._call_state = _ChannelCallState(self._channel)
+ self._connectivity_state = _ChannelConnectivityState(self._channel)
+ cygrpc.fork_register_channel(self)
+
+ def _process_python_options(self, python_options):
+ """Sets channel attributes according to python-only channel options."""
+ for pair in python_options:
+ if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream:
+ self._single_threaded_unary_stream = True
+
+ def subscribe(self, callback, try_to_connect=None):
+ _subscribe(self._connectivity_state, callback, try_to_connect)
+
+ def unsubscribe(self, callback):
+ _unsubscribe(self._connectivity_state, callback)
+
+ def unary_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ return _UnaryUnaryMultiCallable(
+ self._channel, _channel_managed_call_management(self._call_state),
+ _common.encode(method), request_serializer, response_deserializer)
+
+ def unary_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ # NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC
+ # on a single Python thread results in an appreciable speed-up. However,
+ # due to slight differences in capability, the multi-threaded variant
+ # remains the default.
+ if self._single_threaded_unary_stream:
+ return _SingleThreadedUnaryStreamMultiCallable(
+ self._channel, _common.encode(method), request_serializer,
+ response_deserializer)
+ else:
+ return _UnaryStreamMultiCallable(
+ self._channel,
+ _channel_managed_call_management(self._call_state),
+ _common.encode(method), request_serializer,
+ response_deserializer)
+
+ def stream_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ return _StreamUnaryMultiCallable(
+ self._channel, _channel_managed_call_management(self._call_state),
+ _common.encode(method), request_serializer, response_deserializer)
+
+ def stream_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ return _StreamStreamMultiCallable(
+ self._channel, _channel_managed_call_management(self._call_state),
+ _common.encode(method), request_serializer, response_deserializer)
+
+ def _unsubscribe_all(self):
+ state = self._connectivity_state
+ if state:
+ with state.lock:
+ del state.callbacks_and_connectivities[:]
+
+ def _close(self):
+ self._unsubscribe_all()
+ self._channel.close(cygrpc.StatusCode.cancelled, 'Channel closed!')
+ cygrpc.fork_unregister_channel(self)
+
+ def _close_on_fork(self):
+ self._unsubscribe_all()
+ self._channel.close_on_fork(cygrpc.StatusCode.cancelled,
+ 'Channel closed due to fork')
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._close()
+ return False
+
+ def close(self):
+ self._close()
+
+ def __del__(self):
+ # TODO(https://github.com/grpc/grpc/issues/12531): Several releases
+ # after 1.12 (1.16 or thereabouts?) add a "self._channel.close" call
+ # here (or more likely, call self._close() here). We don't do this today
+ # because many valid use cases today allow the channel to be deleted
+ # immediately after stubs are created. After a sufficient period of time
+ # has passed for all users to be trusted to hang out to their channels
+ # for as long as they are in use and to close them after using them,
+ # then deletion of this grpc._channel.Channel instance can be made to
+ # effect closure of the underlying cygrpc.Channel instance.
+ try:
+ self._unsubscribe_all()
+ except: # pylint: disable=bare-except
+ # Exceptions in __del__ are ignored by Python anyway, but they can
+ # keep spamming logs. Just silence them.
+ pass
diff --git a/venv/Lib/site-packages/grpc/_common.py b/venv/Lib/site-packages/grpc/_common.py
new file mode 100644
index 000000000..128124c32
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_common.py
@@ -0,0 +1,168 @@
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Shared implementation."""
+
+import logging
+import time
+import six
+
+import grpc
+from grpc._cython import cygrpc
+
+_LOGGER = logging.getLogger(__name__)
+
+CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
+ cygrpc.ConnectivityState.idle:
+ grpc.ChannelConnectivity.IDLE,
+ cygrpc.ConnectivityState.connecting:
+ grpc.ChannelConnectivity.CONNECTING,
+ cygrpc.ConnectivityState.ready:
+ grpc.ChannelConnectivity.READY,
+ cygrpc.ConnectivityState.transient_failure:
+ grpc.ChannelConnectivity.TRANSIENT_FAILURE,
+ cygrpc.ConnectivityState.shutdown:
+ grpc.ChannelConnectivity.SHUTDOWN,
+}
+
+CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
+ cygrpc.StatusCode.ok: grpc.StatusCode.OK,
+ cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED,
+ cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN,
+ cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT,
+ cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED,
+ cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND,
+ cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS,
+ cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED,
+ cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED,
+ cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED,
+ cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION,
+ cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED,
+ cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE,
+ cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED,
+ cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL,
+ cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE,
+ cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS,
+}
+STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
+ grpc_code: cygrpc_code for cygrpc_code, grpc_code in six.iteritems(
+ CYGRPC_STATUS_CODE_TO_STATUS_CODE)
+}
+
+MAXIMUM_WAIT_TIMEOUT = 0.1
+
+_ERROR_MESSAGE_PORT_BINDING_FAILED = 'Failed to bind to address %s; set ' \
+ 'GRPC_VERBOSITY=debug environment variable to see detailed error message.'
+
+
+def encode(s):
+ if isinstance(s, bytes):
+ return s
+ else:
+ return s.encode('utf8')
+
+
+def decode(b):
+ if isinstance(b, bytes):
+ return b.decode('utf-8', 'replace')
+ return b
+
+
+def _transform(message, transformer, exception_message):
+ if transformer is None:
+ return message
+ else:
+ try:
+ return transformer(message)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.exception(exception_message)
+ return None
+
+
+def serialize(message, serializer):
+ return _transform(message, serializer, 'Exception serializing message!')
+
+
+def deserialize(serialized_message, deserializer):
+ return _transform(serialized_message, deserializer,
+ 'Exception deserializing message!')
+
+
+def fully_qualified_method(group, method):
+ return '/{}/{}'.format(group, method)
+
+
+def _wait_once(wait_fn, timeout, spin_cb):
+ wait_fn(timeout=timeout)
+ if spin_cb is not None:
+ spin_cb()
+
+
+def wait(wait_fn, wait_complete_fn, timeout=None, spin_cb=None):
+ """Blocks waiting for an event without blocking the thread indefinitely.
+
+ See https://github.com/grpc/grpc/issues/19464 for full context. CPython's
+ `threading.Event.wait` and `threading.Condition.wait` methods, if invoked
+ without a timeout kwarg, may block the calling thread indefinitely. If the
+ call is made from the main thread, this means that signal handlers may not
+ run for an arbitrarily long period of time.
+
+ This wrapper calls the supplied wait function with an arbitrary short
+ timeout to ensure that no signal handler has to wait longer than
+ MAXIMUM_WAIT_TIMEOUT before executing.
+
+ Args:
+ wait_fn: A callable acceptable a single float-valued kwarg named
+ `timeout`. This function is expected to be one of `threading.Event.wait`
+ or `threading.Condition.wait`.
+ wait_complete_fn: A callable taking no arguments and returning a bool.
+ When this function returns true, it indicates that waiting should cease.
+ timeout: An optional float-valued number of seconds after which the wait
+ should cease.
+ spin_cb: An optional Callable taking no arguments and returning nothing.
+ This callback will be called on each iteration of the spin. This may be
+ used for, e.g. work related to forking.
+
+ Returns:
+ True if a timeout was supplied and it was reached. False otherwise.
+ """
+ if timeout is None:
+ while not wait_complete_fn():
+ _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
+ else:
+ end = time.time() + timeout
+ while not wait_complete_fn():
+ remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT)
+ if remaining < 0:
+ return True
+ _wait_once(wait_fn, remaining, spin_cb)
+ return False
+
+
+def validate_port_binding_result(address, port):
+ """Validates if the port binding succeed.
+
+ If the port returned by Core is 0, the binding is failed. However, in that
+ case, the Core API doesn't return a detailed failing reason. The best we
+ can do is raising an exception to prevent further confusion.
+
+ Args:
+ address: The address string to be bound.
+ port: An int returned by core
+ """
+ if port == 0:
+ # The Core API doesn't return a failure message. The best we can do
+ # is raising an exception to prevent further confusion.
+ raise RuntimeError(_ERROR_MESSAGE_PORT_BINDING_FAILED % address)
+ else:
+ return port
diff --git a/venv/Lib/site-packages/grpc/_compression.py b/venv/Lib/site-packages/grpc/_compression.py
new file mode 100644
index 000000000..45339c3af
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_compression.py
@@ -0,0 +1,55 @@
+# Copyright 2019 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from grpc._cython import cygrpc
+
+NoCompression = cygrpc.CompressionAlgorithm.none
+Deflate = cygrpc.CompressionAlgorithm.deflate
+Gzip = cygrpc.CompressionAlgorithm.gzip
+
+_METADATA_STRING_MAPPING = {
+ NoCompression: 'identity',
+ Deflate: 'deflate',
+ Gzip: 'gzip',
+}
+
+
+def _compression_algorithm_to_metadata_value(compression):
+ return _METADATA_STRING_MAPPING[compression]
+
+
+def compression_algorithm_to_metadata(compression):
+ return (cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY,
+ _compression_algorithm_to_metadata_value(compression))
+
+
+def create_channel_option(compression):
+ return ((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM,
+ int(compression)),) if compression else ()
+
+
+def augment_metadata(metadata, compression):
+ if not metadata and not compression:
+ return None
+ base_metadata = tuple(metadata) if metadata else ()
+ compression_metadata = (
+ compression_algorithm_to_metadata(compression),) if compression else ()
+ return base_metadata + compression_metadata
+
+
+__all__ = (
+ "NoCompression",
+ "Deflate",
+ "Gzip",
+)
diff --git a/venv/Lib/site-packages/grpc/_cython/__init__.py b/venv/Lib/site-packages/grpc/_cython/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_cython/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/_cython/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/_cython/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..f1f5478ca
Binary files /dev/null and b/venv/Lib/site-packages/grpc/_cython/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/requests/cacert.pem b/venv/Lib/site-packages/grpc/_cython/_credentials/roots.pem
similarity index 61%
rename from venv/Lib/site-packages/requests/cacert.pem
rename to venv/Lib/site-packages/grpc/_cython/_credentials/roots.pem
index 6a66daa99..0b3847460 100644
--- a/venv/Lib/site-packages/requests/cacert.pem
+++ b/venv/Lib/site-packages/grpc/_cython/_credentials/roots.pem
@@ -1,30 +1,6 @@
-
-# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
-# Subject: O=Equifax OU=Equifax Secure Certificate Authority
-# Label: "Equifax Secure CA"
-# Serial: 903804111
-# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
-# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
-# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
-dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
-MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
-dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
-BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
-cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
-aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
-ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
-IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
-7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
-1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
------END CERTIFICATE-----
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
@@ -117,38 +93,6 @@ F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
-----END CERTIFICATE-----
-# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
-# Serial: 314531972711909413743075096039378935511
-# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
-# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
-# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
-GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
-+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
-U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
-NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
-ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
-ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
-CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
-g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
-fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
-2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
-bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
------END CERTIFICATE-----
-
# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
# Label: "Entrust.net Premium 2048 Secure Server CA"
@@ -211,38 +155,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
-----END CERTIFICATE-----
-# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Low-Value Services Root"
-# Serial: 1
-# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
-# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
-# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
------BEGIN CERTIFICATE-----
-MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
-MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
-QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
-VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
-CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
-tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
-dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
-PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
-+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
-BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
-MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
-ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
-IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
-7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
-43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
-eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
-pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
-WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
------END CERTIFICATE-----
-
# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
# Label: "AddTrust External Root"
@@ -276,71 +188,6 @@ c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
-----END CERTIFICATE-----
-# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Public Services Root"
-# Serial: 1
-# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
-# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
-# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
-MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
-ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
-BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
-6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
-GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
-dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
-1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
-62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
-BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
-AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
-MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
-cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
-b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
-IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
-iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
-GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
-4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
-XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Qualified Certificates Root"
-# Serial: 1
-# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
-# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
-# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
------BEGIN CERTIFICATE-----
-MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
-MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
-EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
-BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
-xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
-87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
-2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
-WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
-0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
-A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
-pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
-ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
-aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
-hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
-hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
-dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
-P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
-iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
-xqE=
------END CERTIFICATE-----
-
# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
# Label: "Entrust Root Certification Authority"
@@ -376,35 +223,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
0vdXcDazv/wor3ElhVsT/h5/WrQ8
-----END CERTIFICATE-----
-# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3
-# Subject: O=RSA Security Inc OU=RSA Security 2048 V3
-# Label: "RSA Security 2048 v3"
-# Serial: 13297492616345471454730593562152402946
-# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e
-# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42
-# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c
------BEGIN CERTIFICATE-----
-MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6
-MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
-dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX
-BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy
-MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp
-eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg
-/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl
-wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh
-AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2
-PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu
-AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR
-MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc
-HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/
-Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+
-f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO
-rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch
-6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3
-7CAFYd4=
------END CERTIFICATE-----
-
# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
# Label: "GeoTrust Global CA"
@@ -433,35 +251,6 @@ hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
-----END CERTIFICATE-----
-# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Global CA 2"
-# Serial: 1
-# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
-# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
-# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
------BEGIN CERTIFICATE-----
-MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
-IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
-R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
-PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
-Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
-TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
-5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
-S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
-2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
-FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
-EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
-EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
-/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
-A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
-abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
-I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
-4iIprn2DQKi6bA==
------END CERTIFICATE-----
-
# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
# Label: "GeoTrust Universal CA"
@@ -540,63 +329,6 @@ OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
-----END CERTIFICATE-----
-# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
-# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
-# Label: "Visa eCommerce Root"
-# Serial: 25952180776285836048024890241505565794
-# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
-# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
-# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
------BEGIN CERTIFICATE-----
-MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
-MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
-cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
-bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
-CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
-dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
-cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
-2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
-lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
-ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
-299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
-vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
-dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
-AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
-AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
-zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
-LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
-7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
-++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
-398znM/jra6O1I7mT1GvFpLgXPYHDw==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum CA O=Unizeto Sp. z o.o.
-# Subject: CN=Certum CA O=Unizeto Sp. z o.o.
-# Label: "Certum Root CA"
-# Serial: 65568
-# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9
-# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18
-# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24
------BEGIN CERTIFICATE-----
-MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM
-MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
-QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM
-MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
-QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E
-jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo
-ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI
-ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu
-Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg
-AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7
-HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA
-uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa
-TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg
-xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q
-CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x
-O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs
-6GAqm4VKQPNriiTsBhYscw==
------END CERTIFICATE-----
-
# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
# Subject: CN=AAA Certificate Services O=Comodo CA Limited
# Label: "Comodo AAA Services root"
@@ -630,72 +362,6 @@ l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
-----END CERTIFICATE-----
-# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
-# Subject: CN=Secure Certificate Services O=Comodo CA Limited
-# Label: "Comodo Secure Services root"
-# Serial: 1
-# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
-# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
-# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
------BEGIN CERTIFICATE-----
-MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
-ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
-fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
-BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
-cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
-HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
-CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
-3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
-6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
-HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
-EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
-Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
-Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
-DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
-5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
-Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
-gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
-aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
-izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
------END CERTIFICATE-----
-
-# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
-# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
-# Label: "Comodo Trusted Services root"
-# Serial: 1
-# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
-# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
-# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
-aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
-MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
-BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
-VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
-fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
-TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
-fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
-1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
-kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
-A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
-ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
-dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
-Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
-HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
-pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
-jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
-xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
-dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
------END CERTIFICATE-----
-
# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
# Label: "QuoVadis Root CA"
@@ -880,221 +546,6 @@ Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
-----END CERTIFICATE-----
-# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA"
-# Serial: 10000010
-# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0
-# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04
-# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO
-TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy
-MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk
-ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn
-ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71
-9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO
-hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U
-tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o
-BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh
-SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww
-OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv
-cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA
-7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k
-/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm
-eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6
-u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy
-7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR
-iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==
------END CERTIFICATE-----
-
-# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN DATACorp SGC Root CA"
-# Serial: 91374294542884689855167577680241077609
-# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
-# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
-# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
------BEGIN CERTIFICATE-----
-MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
-kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
-IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
-EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
-VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
-dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
-BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
-E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
-D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
-4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
-lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
-bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
-o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
-MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
-LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
-BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
-AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
-Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
-j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
-KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
-2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
-mfnGV/TJVTl4uix5yaaIK/QI
------END CERTIFICATE-----
-
-# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN USERFirst Hardware Root CA"
-# Serial: 91374294542884704022267039221184531197
-# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
-# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
-# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
------BEGIN CERTIFICATE-----
-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
------END CERTIFICATE-----
-
-# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Label: "Camerfirma Chambers of Commerce Root"
-# Serial: 0
-# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84
-# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1
-# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3
------BEGIN CERTIFICATE-----
-MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn
-MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
-ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg
-b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa
-MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB
-ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw
-IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B
-AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb
-unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d
-BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq
-7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3
-0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX
-roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG
-A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j
-aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p
-26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA
-BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud
-EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN
-BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz
-aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB
-AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd
-p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi
-1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc
-XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0
-eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu
-tGWaIZDgqtCYvDi1czyL+Nw=
------END CERTIFICATE-----
-
-# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Label: "Camerfirma Global Chambersign Root"
-# Serial: 0
-# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19
-# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9
-# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed
------BEGIN CERTIFICATE-----
-MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn
-MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
-ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo
-YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9
-MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy
-NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G
-A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA
-A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0
-Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s
-QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV
-eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795
-B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh
-z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T
-AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i
-ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w
-TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH
-MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD
-VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE
-VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh
-bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B
-AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM
-bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi
-ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG
-VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c
-ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/
-AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Notary (Class A) Root"
-# Serial: 259
-# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7
-# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6
-# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67
------BEGIN CERTIFICATE-----
-MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV
-MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe
-TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0
-dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB
-KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0
-N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC
-dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu
-MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL
-b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD
-zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi
-3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8
-WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY
-Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi
-NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC
-ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4
-QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0
-YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz
-aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu
-IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm
-ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg
-ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs
-amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv
-IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3
-Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6
-ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1
-YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg
-dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs
-b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G
-CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO
-xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP
-0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ
-QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk
-f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK
-8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI
------END CERTIFICATE-----
-
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
# Label: "XRamp Global CA Root"
@@ -1192,58 +643,6 @@ VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
-----END CERTIFICATE-----
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
-# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
-# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
------BEGIN CERTIFICATE-----
-MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
-FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
-ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
-LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
-BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
-Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
-dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
-cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
-YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
-dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
-bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
-YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
-TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
-9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
-jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
-FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
-ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
-ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
-EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
-L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
-yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
-O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
-um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
-NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
------END CERTIFICATE-----
-
# Issuer: O=Government Root Certification Authority
# Subject: O=Government Root Certification Authority
# Label: "Taiwan GRCA"
@@ -1284,48 +683,6 @@ LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
pYYsfPQS
-----END CERTIFICATE-----
-# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root CA 1"
-# Serial: 122348795730808398873664200247279986742
-# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9
-# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51
-# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e
------BEGIN CERTIFICATE-----
-MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk
-MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
-YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
-Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT
-AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
-Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN
-BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9
-m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih
-FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/
-TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F
-EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco
-kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu
-HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF
-vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo
-19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC
-L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW
-bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX
-JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
-FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j
-BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc
-K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf
-ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik
-Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB
-sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e
-3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR
-ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip
-mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH
-b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf
-rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms
-hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y
-zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6
-MBr1mmz0DlP5OlvRHA==
------END CERTIFICATE-----
-
# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
# Label: "DigiCert Assured ID Root CA"
@@ -1417,36 +774,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
+OkuE6N36B9K
-----END CERTIFICATE-----
-# Issuer: CN=Class 2 Primary CA O=Certplus
-# Subject: CN=Class 2 Primary CA O=Certplus
-# Label: "Certplus Class 2 Primary CA"
-# Serial: 177770208045934040241468760488327595043
-# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
-# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
-# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
------BEGIN CERTIFICATE-----
-MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
-PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
-cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
-MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
-IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
-ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
-VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
-kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
-EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
-H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
-HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
-DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
-QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
-Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
-AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
-yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
-FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
-ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
-kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
-l7+ijrRU
------END CERTIFICATE-----
-
# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
# Label: "DST Root CA X3"
@@ -1475,71 +802,6 @@ JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
-----END CERTIFICATE-----
-# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
-# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
-# Label: "DST ACES CA X6"
-# Serial: 17771143917277623872238992636097467865
-# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8
-# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d
-# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40
------BEGIN CERTIFICATE-----
-MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx
-ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w
-MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD
-VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx
-FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu
-ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7
-gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH
-fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a
-ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT
-ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF
-MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk
-c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto
-dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt
-aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI
-hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk
-QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/
-h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq
-nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR
-rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
-9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
-# Label: "TURKTRUST Certificate Services Provider Root 2"
-# Serial: 1
-# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00
-# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7
-# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6
------BEGIN CERTIFICATE-----
-MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc
-UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS
-S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
-SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3
-WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv
-bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU
-UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw
-bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe
-LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef
-J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh
-R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ
-Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX
-JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p
-zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S
-Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
-KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq
-ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
-Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz
-gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH
-uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS
-y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=
------END CERTIFICATE-----
-
# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
# Label: "SwissSign Gold CA - G2"
@@ -1844,42 +1106,6 @@ wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
-----END CERTIFICATE-----
-# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
-# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
-# Label: "WellsSecure Public Root Certificate Authority"
-# Serial: 1
-# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36
-# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee
-# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43
------BEGIN CERTIFICATE-----
-MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx
-IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs
-cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v
-dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0
-MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl
-bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD
-DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw
-ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r
-WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU
-Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs
-HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj
-z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf
-SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl
-AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG
-KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P
-AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j
-BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC
-VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX
-ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg
-Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB
-ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd
-/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB
-A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn
-k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9
-iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv
-2G0xffX8oRAHh84vWdw+WNs=
------END CERTIFICATE-----
-
# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
# Label: "COMODO ECC Certification Authority"
@@ -1904,67 +1130,6 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
-----END CERTIFICATE-----
-# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI
-# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI
-# Label: "IGC/A"
-# Serial: 245102874772
-# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37
-# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c
-# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32
------BEGIN CERTIFICATE-----
-MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT
-AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ
-TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG
-9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw
-MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM
-BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO
-MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2
-LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI
-s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2
-xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4
-u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b
-F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx
-Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd
-PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV
-HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx
-NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF
-AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ
-L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY
-YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg
-Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a
-NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R
-0982gaEbeC9xs/FZTEYYKKuF0mBWWg==
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
-# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
-# Label: "Security Communication EV RootCA1"
-# Serial: 0
-# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3
-# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d
-# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37
------BEGIN CERTIFICATE-----
-MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl
-MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh
-U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz
-MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N
-IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11
-bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE
-RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO
-zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5
-bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF
-MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1
-VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC
-OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW
-tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ
-q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb
-EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+
-Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O
-VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490
------END CERTIFICATE-----
-
# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
# Label: "OISTE WISeKey Global Root GA CA"
@@ -1997,57 +1162,6 @@ Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
/L7fCg0=
-----END CERTIFICATE-----
-# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
-# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
-# Label: "Microsec e-Szigno Root CA"
-# Serial: 272122594155480254301341951808045322001
-# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5
-# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d
-# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0
------BEGIN CERTIFICATE-----
-MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw
-cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy
-b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z
-ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4
-NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN
-TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p
-Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u
-uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+
-LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA
-vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770
-Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx
-62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB
-AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw
-LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP
-BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB
-AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov
-MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5
-ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn
-AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT
-AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh
-ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo
-AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa
-AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln
-bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p
-Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP
-PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv
-Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB
-EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu
-w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj
-cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV
-HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI
-VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS
-BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS
-b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS
-8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds
-ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl
-7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a
-86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR
-hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/
-MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=
------END CERTIFICATE-----
-
# Issuer: CN=Certigna O=Dhimyotis
# Subject: CN=Certigna O=Dhimyotis
# Label: "Certigna"
@@ -2078,36 +1192,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
-----END CERTIFICATE-----
-# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
-# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
-# Label: "Deutsche Telekom Root CA 2"
-# Serial: 38
-# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
-# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
-# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
------BEGIN CERTIFICATE-----
-MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
-MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
-IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
-IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
-RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
-U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
-IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
-ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
-QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
-rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
-NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
-QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
-txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
-BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
-AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
-tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
-IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
-6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
-xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
-Cm26OWMohpLzGITY+9HPBVZkVw==
------END CERTIFICATE-----
-
# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
# Label: "Cybertrust Global Root"
@@ -2179,114 +1263,6 @@ W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
-----END CERTIFICATE-----
-# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
-# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
-# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3"
-# Serial: 17
-# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26
-# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96
-# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a
------BEGIN CERTIFICATE-----
-MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS
-MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp
-bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw
-VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy
-YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy
-dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2
-ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe
-Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx
-GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls
-aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU
-QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh
-xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0
-aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr
-IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h
-gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK
-O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO
-fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw
-lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL
-hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID
-AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP
-NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t
-wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM
-7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh
-gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n
-oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs
-yZyQ2uypQjyttgI=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
-# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
-# Label: "Buypass Class 2 CA 1"
-# Serial: 1
-# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23
-# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc
-# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38
------BEGIN CERTIFICATE-----
-MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
-Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL
-MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
-VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0
-ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX
-l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB
-HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B
-5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3
-WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD
-AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP
-gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+
-DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu
-BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs
-h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
-LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
------END CERTIFICATE-----
-
-# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
-# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
-# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
-# Serial: 5525761995591021570
-# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37
-# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58
-# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2
------BEGIN CERTIFICATE-----
-MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV
-BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt
-ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4
-MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg
-SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl
-a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h
-4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk
-tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s
-tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL
-dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4
-c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um
-TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z
-+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O
-Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW
-OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW
-fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2
-l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB
-/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw
-FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+
-8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI
-6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO
-TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME
-wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY
-Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn
-xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q
-DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q
-Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t
-hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4
-7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7
-QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT
------END CERTIFICATE-----
-
# Issuer: O=certSIGN OU=certSIGN ROOT CA
# Subject: O=certSIGN OU=certSIGN ROOT CA
# Label: "certSIGN ROOT CA"
@@ -2315,64 +1291,6 @@ i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
9u6wWk5JRFRYX0KD
-----END CERTIFICATE-----
-# Issuer: CN=CNNIC ROOT O=CNNIC
-# Subject: CN=CNNIC ROOT O=CNNIC
-# Label: "CNNIC ROOT"
-# Serial: 1228079105
-# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19
-# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f
-# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7
------BEGIN CERTIFICATE-----
-MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD
-TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2
-MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF
-Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB
-DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh
-IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6
-dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO
-V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC
-GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN
-v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB
-AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB
-Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO
-76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK
-OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH
-ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi
-yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL
-buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj
-2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=
------END CERTIFICATE-----
-
-# Issuer: O=Japanese Government OU=ApplicationCA
-# Subject: O=Japanese Government OU=ApplicationCA
-# Label: "ApplicationCA - Japanese Government"
-# Serial: 49
-# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6
-# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74
-# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19
------BEGIN CERTIFICATE-----
-MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc
-MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp
-b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT
-AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs
-aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H
-j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K
-f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55
-IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw
-FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht
-QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm
-/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ
-k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ
-MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC
-seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ
-hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+
-eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U
-DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj
-B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL
-rosot4LKGAfmt1t06SAZf7IbiVQ=
------END CERTIFICATE-----
-
# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
# Label: "GeoTrust Primary Certification Authority - G3"
@@ -2625,75 +1543,6 @@ Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
-----END CERTIFICATE-----
-# Issuer: CN=CA Disig O=Disig a.s.
-# Subject: CN=CA Disig O=Disig a.s.
-# Label: "CA Disig"
-# Serial: 1
-# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6
-# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41
-# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET
-MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE
-AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw
-CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg
-YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE
-Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX
-mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD
-XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW
-S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp
-FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD
-AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu
-ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z
-ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv
-Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw
-DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6
-yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq
-EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/
-CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB
-EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN
-PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=
------END CERTIFICATE-----
-
-# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus
-# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus
-# Label: "Juur-SK"
-# Serial: 999181308
-# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55
-# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89
-# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39
------BEGIN CERTIFICATE-----
-MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN
-AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp
-dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw
-MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw
-CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ
-MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB
-SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz
-ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH
-LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP
-PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL
-2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w
-ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC
-MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk
-AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0
-AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz
-AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz
-AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f
-BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE
-FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY
-P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi
-CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g
-kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95
-HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS
-na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q
-qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z
-TbvGRNs2yyqcjg==
------END CERTIFICATE-----
-
# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
# Label: "Hongkong Post Root CA 1"
@@ -2751,47 +1600,6 @@ pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
QSdJQO7e5iNEOdyhIta6A/I=
-----END CERTIFICATE-----
-# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI
-# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI
-# Label: "ACEDICOM Root"
-# Serial: 7029493972724711941
-# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6
-# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84
-# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a
------BEGIN CERTIFICATE-----
-MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
-AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
-CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
-MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
-RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
-AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
-09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
-XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
-Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
-t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
-X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
-MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
-fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
-2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
-K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
-ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
-BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
-MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
-RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
-bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
-fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
-gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
-I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
-5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
-ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
-MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
-o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
-zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
-GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
-r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
-Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
------END CERTIFICATE-----
-
# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
# Label: "Microsec e-Szigno Root CA 2009"
@@ -3277,122 +2085,6 @@ VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
-----END CERTIFICATE-----
-# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
-# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
-# Label: "Certinomis - Autorité Racine"
-# Serial: 1
-# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a
-# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3
-# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17
------BEGIN CERTIFICATE-----
-MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET
-MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk
-BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4
-Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl
-cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0
-aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP
-ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY
-F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N
-8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe
-rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K
-/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu
-7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC
-28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6
-lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E
-nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB
-0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09
-5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj
-WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN
-jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ
-KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s
-ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM
-OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q
-619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn
-2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj
-o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v
-nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG
-5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq
-pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb
-dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0
-BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5
------END CERTIFICATE-----
-
-# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
-# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
-# Label: "Root CA Generalitat Valenciana"
-# Serial: 994436456
-# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2
-# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46
-# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e
------BEGIN CERTIFICATE-----
-MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF
-UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ
-R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN
-MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G
-A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw
-JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+
-WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj
-SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl
-u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy
-A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk
-Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7
-MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr
-aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC
-IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A
-cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA
-YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA
-bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA
-bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA
-aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA
-aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA
-ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA
-YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA
-ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA
-LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6
-Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y
-eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw
-CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G
-A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu
-Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn
-lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt
-b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg
-9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF
-ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC
-IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=
------END CERTIFICATE-----
-
-# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
-# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
-# Label: "A-Trust-nQual-03"
-# Serial: 93214
-# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53
-# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2
-# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb
------BEGIN CERTIFICATE-----
-MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB
-VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp
-bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R
-dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw
-MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy
-dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52
-ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM
-EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj
-lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ
-znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH
-2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1
-k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs
-2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD
-VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
-AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG
-KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+
-8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R
-FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS
-mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE
-DNuxUCAKGkq6ahq97BvIxYSazQ==
------END CERTIFICATE-----
-
# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
# Label: "TWCA Root Certification Authority"
@@ -3451,6 +2143,45 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
-----END CERTIFICATE-----
+# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes
+# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes
+# Label: "EC-ACC"
+# Serial: -23701579247955709139626555126524820479
+# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09
+# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8
+# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99
+-----BEGIN CERTIFICATE-----
+MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB
+8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy
+dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1
+YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3
+dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh
+IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD
+LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG
+EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g
+KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD
+ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu
+bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg
+ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R
+85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm
+4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV
+HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd
+QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t
+lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB
+o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4
+opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo
+dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW
+ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN
+AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y
+/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k
+SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy
+Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS
+Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl
+nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI=
+-----END CERTIFICATE-----
+
# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
# Label: "Hellenic Academic and Research Institutions RootCA 2011"
@@ -3554,96 +2285,6 @@ jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
ZetX2fNXlrtIzYE=
-----END CERTIFICATE-----
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 45
-# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
-# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
-# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
------BEGIN CERTIFICATE-----
-MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
-VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
-F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
-ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
-ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
-aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
-YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
-c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
-d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
-CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
-dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
-wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
-Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
-0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
-pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
-CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
-P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
-1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
-KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
-JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
-8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
-fyWl8kgAwKQB2j8=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Label: "StartCom Certification Authority G2"
-# Serial: 59
-# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
-# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
-# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
------BEGIN CERTIFICATE-----
-MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
-OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
-A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
-JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
-vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
-D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
-Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
-RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
-HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
-nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
-0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
-UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
-Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
-TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
-AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
-BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
-2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
-UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
-6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
-9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
-HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
-wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
-XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
-IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
-hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
-so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
------END CERTIFICATE-----
-
# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
# Label: "Buypass Class 2 Root CA"
@@ -3785,39 +2426,6 @@ iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
-----END CERTIFICATE-----
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
-# Label: "TURKTRUST Certificate Services Provider Root 2007"
-# Serial: 1
-# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72
-# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33
-# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50
------BEGIN CERTIFICATE-----
-MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc
-UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS
-S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
-SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx
-OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry
-b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC
-VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE
-sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F
-ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY
-KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG
-+7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG
-HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P
-IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M
-733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk
-Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW
-AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I
-aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5
-mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa
-XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ
-qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9
------END CERTIFICATE-----
-
# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
# Label: "D-TRUST Root Class 3 CA 2 2009"
@@ -3884,222 +2492,6 @@ xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
-----END CERTIFICATE-----
-# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica
-# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT
-# Label: "PSCProcert"
-# Serial: 11
-# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec
-# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74
-# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0
------BEGIN CERTIFICATE-----
-MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1
-dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s
-YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz
-dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0
-aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh
-IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ
-KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw
-MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy
-b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx
-KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG
-A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u
-aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9
-7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74
-BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G
-ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9
-JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0
-PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2
-0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH
-0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/
-6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m
-v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7
-K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev
-bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw
-MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w
-MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD
-gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0
-b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh
-bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0
-cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp
-ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg
-ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq
-hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD
-AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w
-MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag
-RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t
-UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl
-cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v
-Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG
-AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN
-AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS
-1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB
-3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv
-Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh
-HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm
-pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz
-sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE
-qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb
-mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9
-opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H
-YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km
------END CERTIFICATE-----
-
-# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
-# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
-# Label: "China Internet Network Information Center EV Certificates Root"
-# Serial: 1218379777
-# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15
-# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e
-# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7
------BEGIN CERTIFICATE-----
-MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC
-Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g
-Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0
-aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa
-Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg
-SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo
-aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp
-ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z
-7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA//
-DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx
-zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8
-hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs
-4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u
-gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY
-NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E
-FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3
-j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG
-52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB
-echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws
-ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI
-zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy
-wy39FCqQmbkHzJ8=
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root CA 2"
-# Serial: 40698052477090394928831521023204026294
-# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19
-# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec
-# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41
------BEGIN CERTIFICATE-----
-MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk
-MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
-YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
-Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT
-AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
-Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN
-BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr
-jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r
-0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f
-2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP
-ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF
-y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA
-tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL
-6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0
-uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL
-acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh
-k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q
-VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
-FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O
-BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh
-b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R
-fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv
-/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI
-REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx
-srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv
-aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT
-woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n
-Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W
-t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N
-8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2
-9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5
-wSsSnqaeG8XmDtkx2Q==
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root EV CA 2"
-# Serial: 322973295377129385374608406479535262296
-# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec
-# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b
-# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d
------BEGIN CERTIFICATE-----
-MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw
-ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp
-dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290
-IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD
-VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy
-dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg
-MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx
-UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD
-1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH
-oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR
-HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/
-5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv
-idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL
-OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC
-NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f
-46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB
-UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth
-7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G
-A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED
-MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB
-bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x
-XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T
-PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0
-Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70
-WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL
-Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm
-7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S
-nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN
-vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB
-WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI
-fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb
-I+2ksx0WckNLIOFZfsLorSa/ovc=
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig Root R1 O=Disig a.s.
-# Subject: CN=CA Disig Root R1 O=Disig a.s.
-# Label: "CA Disig Root R1"
-# Serial: 14052245610670616104
-# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a
-# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6
-# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce
------BEGIN CERTIFICATE-----
-MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV
-BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
-MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy
-MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
-EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw
-ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk
-D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o
-OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A
-fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe
-IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n
-oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK
-/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj
-rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD
-3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE
-7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC
-yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd
-qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
-DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI
-hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR
-xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA
-SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo
-HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB
-emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC
-AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb
-7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x
-DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk
-F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF
-a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT
-Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL
------END CERTIFICATE-----
-
# Issuer: CN=CA Disig Root R2 O=Disig a.s.
# Subject: CN=CA Disig Root R2 O=Disig a.s.
# Label: "CA Disig Root R2"
@@ -4635,85 +3027,6 @@ r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
-----END CERTIFICATE-----
-# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited
-# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited
-# Label: "WoSign"
-# Serial: 125491772294754854453622855443212256657
-# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d
-# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb
-# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08
------BEGIN CERTIFICATE-----
-MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV
-MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV
-BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw
-MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX
-b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN
-rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U
-fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc
-f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2
-ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M
-x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR
-aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch
-zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar
-uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K
-mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA
-Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv
-HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H
-EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1
-LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ
-MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e
-JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN
-g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp
-dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab
-R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ
-PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce
-xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+
-J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl
-OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT
-ee5Ehr7XHuQe+w==
------END CERTIFICATE-----
-
-# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited
-# Subject: CN=CA 沃通根证书 O=WoSign CA Limited
-# Label: "WoSign China"
-# Serial: 106921963437422998931660691310149453965
-# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93
-# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6
-# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54
------BEGIN CERTIFICATE-----
-MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG
-MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV
-BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw
-MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl
-ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r
-D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1
-9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf
-v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk
-UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L
-NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb
-+gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V
-qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K
-yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G
-AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK
-J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC
-AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
-BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4
-WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6
-yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj
-/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6
-jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2
-ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX
-X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n
-FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D
-u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l
-O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le
-ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1
-2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ==
------END CERTIFICATE-----
-
# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
# Label: "COMODO RSA Certification Authority"
@@ -5122,495 +3435,1210 @@ AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
-----END CERTIFICATE-----
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"
-# Serial: 156233699172481
-# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
-# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
-# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
-----BEGIN CERTIFICATE-----
-MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
-BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
-aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
-QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
-SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
-MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
-VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
-dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
-bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
-/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
-Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
-4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
-5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
-hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
-AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
-SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
-VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
-URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
-peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
-Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
-+qtB4Uu2NQvAmxU=
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
-----END CERTIFICATE-----
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"
-# Serial: 138134509972618
-# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46
-# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0
-# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
-----BEGIN CERTIFICATE-----
-MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG
-EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp
-IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB
-LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI
-aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx
-NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV
-BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2
-ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs
-ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x
-eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9
-+bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA
-z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p
-u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p
-lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB
-AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E
-BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq
-FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC
-QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy
-o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID
-gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm
-9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG
-tAuYSyher4hYyw==
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
-----END CERTIFICATE-----
-# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
-# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
-# Label: "Certinomis - Root CA"
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
# Serial: 1
-# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
-# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
-# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
-----BEGIN CERTIFICATE-----
-MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
-MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
-BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
-MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
-FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
-Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
-fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
-LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
-WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
-TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
-5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
-CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
-wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
-wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
-m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
-F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
-WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
-2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
-AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
-0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
-F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
-g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
-qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
-h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
-ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
-btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
-Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
-8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
-gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
------END CERTIFICATE-----
-# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Secure Server CA"
-# Serial: 927650371
-# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
-# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
-# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
------BEGIN CERTIFICATE-----
-MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
-MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
-ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
-b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
-bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
-U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
-A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
-I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
-wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
-AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
-oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
-BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
-dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
-MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
-b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
-dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
-MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
-E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
-MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
-hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
-95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
-2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
-----END CERTIFICATE-----
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Label: "ValiCert Class 2 VA"
-# Serial: 1
-# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
-# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
-# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
-----BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
-----END CERTIFICATE-----
-# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Express (Class C) Root"
-# Serial: 104
-# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4
-# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b
-# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
-----BEGIN CERTIFICATE-----
-MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx
-ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
-b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD
-EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X
-DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw
-DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u
-c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr
-TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN
-BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA
-OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC
-2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW
-RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P
-AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW
-ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0
-YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz
-b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO
-ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB
-IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs
-b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs
-ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s
-YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg
-a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g
-SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0
-aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg
-YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg
-Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY
-ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g
-pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4
-Fp1hBWeAyNDYpQcCNJgEjTME1A==
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
-----END CERTIFICATE-----
-# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Business (Class B) Root"
-# Serial: 105
-# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6
-# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af
-# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
-----BEGIN CERTIFICATE-----
-MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx
-ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
-b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD
-EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05
-OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G
-A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
-Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l
-dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG
-SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK
-gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX
-iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc
-Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E
-BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G
-SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu
-b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh
-bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv
-Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln
-aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0
-IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh
-c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph
-biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo
-ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP
-UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj
-YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo
-dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA
-bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06
-sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa
-n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS
-NitjrFgBazMpUIaD8QFI
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
-----END CERTIFICATE-----
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Label: "RSA Root Certificate 1"
-# Serial: 1
-# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
-# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
-# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
-----BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
-NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
-cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
-2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
-JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
-Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
-n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
-PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
-----END CERTIFICATE-----
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Label: "ValiCert Class 1 VA"
-# Serial: 1
-# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
-# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
-# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
-----BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
-NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
-LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
-TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
-TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
-LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
-I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
-nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
-----END CERTIFICATE-----
-# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure eBusiness CA 1"
-# Serial: 4
-# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
-# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
-# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
-----BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
-----END CERTIFICATE-----
-# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure Global eBusiness CA"
-# Serial: 1
-# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
-# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
-# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
-----BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
-----END CERTIFICATE-----
-# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Premium Server CA"
-# Serial: 1
-# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
-# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
-# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
-----BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
-----END CERTIFICATE-----
-# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Server CA"
-# Serial: 1
-# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
-# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
-# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
-----BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
-----END CERTIFICATE-----
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Label: "Verisign Class 3 Public Primary Certification Authority"
-# Serial: 149843929435818692848040365716851702463
-# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
-# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
-# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
-----BEGIN CERTIFICATE-----
-MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
-lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
-AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
-----END CERTIFICATE-----
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Label: "Verisign Class 3 Public Primary Certification Authority"
-# Serial: 80507572722862485515306429940691309246
-# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
-# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
-# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 146587175971765017618439757810265552097
+# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
+# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
+# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
-----BEGIN CERTIFICATE-----
-MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
-2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
-2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
+MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
+f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
+mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
+zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
+fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
+vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
+Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
+zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
+Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
+k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
+DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
+lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
+Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
+d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
+XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
+gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
+d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
+J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
+DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
+F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
+SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
+E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
-----END CERTIFICATE-----
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
-# Serial: 167285380242319648451154478808036881606
-# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
-# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
-# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 146587176055767053814479386953112547951
+# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
+# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
+# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
-----BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
-pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
-13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
-U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
-F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
-oJ2daZH9
+MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
+CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
+GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
+XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
+re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
+PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
+mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
+8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
+x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
+nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
+kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
+twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
+8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
+vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
+z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
+pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
+pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
+R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
+RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
+0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
+5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
+izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
+yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
-----END CERTIFICATE-----
-# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Label: "GTE CyberTrust Global Root"
-# Serial: 421
-# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
-# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
-# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 146587176140553309517047991083707763997
+# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
+# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
+# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
-----BEGIN CERTIFICATE-----
-MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
-b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
-iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
-r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
-04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
-GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
-3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
-lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
+736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
+DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
+fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
+njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 146587176229350439916519468929765261721
+# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
+# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
+# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
+-----BEGIN CERTIFICATE-----
+MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
+hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
+xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
+CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
+sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G4"
+# Serial: 289383649854506086828220374796556676440
+# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
+# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
+# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
+gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
+Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
+MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
+BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
+MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
+c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
+bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
+Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
+2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
+T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
+5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
+C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
+DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
+wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
+2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
+nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
+dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
+N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
+c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
+5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
+Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
+hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
+B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
+AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
+H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
+b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
+2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
+IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
+5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
+n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
-----END CERTIFICATE-----
diff --git a/venv/Lib/site-packages/grpc/_cython/_cygrpc/__init__.py b/venv/Lib/site-packages/grpc/_cython/_cygrpc/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_cython/_cygrpc/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..efa969b55
Binary files /dev/null and b/venv/Lib/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/_cython/cygrpc.cp36-win32.pyd b/venv/Lib/site-packages/grpc/_cython/cygrpc.cp36-win32.pyd
new file mode 100644
index 000000000..b279d0e6b
Binary files /dev/null and b/venv/Lib/site-packages/grpc/_cython/cygrpc.cp36-win32.pyd differ
diff --git a/venv/Lib/site-packages/grpc/_grpcio_metadata.py b/venv/Lib/site-packages/grpc/_grpcio_metadata.py
new file mode 100644
index 000000000..25796623b
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_grpcio_metadata.py
@@ -0,0 +1 @@
+__version__ = """1.32.0"""
\ No newline at end of file
diff --git a/venv/Lib/site-packages/grpc/_interceptor.py b/venv/Lib/site-packages/grpc/_interceptor.py
new file mode 100644
index 000000000..ee63cb314
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_interceptor.py
@@ -0,0 +1,562 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Implementation of gRPC Python interceptors."""
+
+import collections
+import sys
+
+import grpc
+
+
+class _ServicePipeline(object):
+
+ def __init__(self, interceptors):
+ self.interceptors = tuple(interceptors)
+
+ def _continuation(self, thunk, index):
+ return lambda context: self._intercept_at(thunk, index, context)
+
+ def _intercept_at(self, thunk, index, context):
+ if index < len(self.interceptors):
+ interceptor = self.interceptors[index]
+ thunk = self._continuation(thunk, index + 1)
+ return interceptor.intercept_service(thunk, context)
+ else:
+ return thunk(context)
+
+ def execute(self, thunk, context):
+ return self._intercept_at(thunk, 0, context)
+
+
+def service_pipeline(interceptors):
+ return _ServicePipeline(interceptors) if interceptors else None
+
+
+class _ClientCallDetails(
+ collections.namedtuple('_ClientCallDetails',
+ ('method', 'timeout', 'metadata', 'credentials',
+ 'wait_for_ready', 'compression')),
+ grpc.ClientCallDetails):
+ pass
+
+
+def _unwrap_client_call_details(call_details, default_details):
+ try:
+ method = call_details.method
+ except AttributeError:
+ method = default_details.method
+
+ try:
+ timeout = call_details.timeout
+ except AttributeError:
+ timeout = default_details.timeout
+
+ try:
+ metadata = call_details.metadata
+ except AttributeError:
+ metadata = default_details.metadata
+
+ try:
+ credentials = call_details.credentials
+ except AttributeError:
+ credentials = default_details.credentials
+
+ try:
+ wait_for_ready = call_details.wait_for_ready
+ except AttributeError:
+ wait_for_ready = default_details.wait_for_ready
+
+ try:
+ compression = call_details.compression
+ except AttributeError:
+ compression = default_details.compression
+
+ return method, timeout, metadata, credentials, wait_for_ready, compression
+
+
+class _FailureOutcome(grpc.RpcError, grpc.Future, grpc.Call): # pylint: disable=too-many-ancestors
+
+ def __init__(self, exception, traceback):
+ super(_FailureOutcome, self).__init__()
+ self._exception = exception
+ self._traceback = traceback
+
+ def initial_metadata(self):
+ return None
+
+ def trailing_metadata(self):
+ return None
+
+ def code(self):
+ return grpc.StatusCode.INTERNAL
+
+ def details(self):
+ return 'Exception raised while intercepting the RPC'
+
+ def cancel(self):
+ return False
+
+ def cancelled(self):
+ return False
+
+ def is_active(self):
+ return False
+
+ def time_remaining(self):
+ return None
+
+ def running(self):
+ return False
+
+ def done(self):
+ return True
+
+ def result(self, ignored_timeout=None):
+ raise self._exception
+
+ def exception(self, ignored_timeout=None):
+ return self._exception
+
+ def traceback(self, ignored_timeout=None):
+ return self._traceback
+
+ def add_callback(self, unused_callback):
+ return False
+
+ def add_done_callback(self, fn):
+ fn(self)
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ raise self._exception
+
+ def next(self):
+ return self.__next__()
+
+
+class _UnaryOutcome(grpc.Call, grpc.Future):
+
+ def __init__(self, response, call):
+ self._response = response
+ self._call = call
+
+ def initial_metadata(self):
+ return self._call.initial_metadata()
+
+ def trailing_metadata(self):
+ return self._call.trailing_metadata()
+
+ def code(self):
+ return self._call.code()
+
+ def details(self):
+ return self._call.details()
+
+ def is_active(self):
+ return self._call.is_active()
+
+ def time_remaining(self):
+ return self._call.time_remaining()
+
+ def cancel(self):
+ return self._call.cancel()
+
+ def add_callback(self, callback):
+ return self._call.add_callback(callback)
+
+ def cancelled(self):
+ return False
+
+ def running(self):
+ return False
+
+ def done(self):
+ return True
+
+ def result(self, ignored_timeout=None):
+ return self._response
+
+ def exception(self, ignored_timeout=None):
+ return None
+
+ def traceback(self, ignored_timeout=None):
+ return None
+
+ def add_done_callback(self, fn):
+ fn(self)
+
+
+class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
+
+ def __init__(self, thunk, method, interceptor):
+ self._thunk = thunk
+ self._method = method
+ self._interceptor = interceptor
+
+ def __call__(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ response, ignored_call = self._with_call(request,
+ timeout=timeout,
+ metadata=metadata,
+ credentials=credentials,
+ wait_for_ready=wait_for_ready,
+ compression=compression)
+ return response
+
+ def _with_call(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ client_call_details = _ClientCallDetails(self._method, timeout,
+ metadata, credentials,
+ wait_for_ready, compression)
+
+ def continuation(new_details, request):
+ (new_method, new_timeout, new_metadata, new_credentials,
+ new_wait_for_ready,
+ new_compression) = (_unwrap_client_call_details(
+ new_details, client_call_details))
+ try:
+ response, call = self._thunk(new_method).with_call(
+ request,
+ timeout=new_timeout,
+ metadata=new_metadata,
+ credentials=new_credentials,
+ wait_for_ready=new_wait_for_ready,
+ compression=new_compression)
+ return _UnaryOutcome(response, call)
+ except grpc.RpcError as rpc_error:
+ return rpc_error
+ except Exception as exception: # pylint:disable=broad-except
+ return _FailureOutcome(exception, sys.exc_info()[2])
+
+ call = self._interceptor.intercept_unary_unary(continuation,
+ client_call_details,
+ request)
+ return call.result(), call
+
+ def with_call(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ return self._with_call(request,
+ timeout=timeout,
+ metadata=metadata,
+ credentials=credentials,
+ wait_for_ready=wait_for_ready,
+ compression=compression)
+
+ def future(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ client_call_details = _ClientCallDetails(self._method, timeout,
+ metadata, credentials,
+ wait_for_ready, compression)
+
+ def continuation(new_details, request):
+ (new_method, new_timeout, new_metadata, new_credentials,
+ new_wait_for_ready,
+ new_compression) = (_unwrap_client_call_details(
+ new_details, client_call_details))
+ return self._thunk(new_method).future(
+ request,
+ timeout=new_timeout,
+ metadata=new_metadata,
+ credentials=new_credentials,
+ wait_for_ready=new_wait_for_ready,
+ compression=new_compression)
+
+ try:
+ return self._interceptor.intercept_unary_unary(
+ continuation, client_call_details, request)
+ except Exception as exception: # pylint:disable=broad-except
+ return _FailureOutcome(exception, sys.exc_info()[2])
+
+
+class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
+
+ def __init__(self, thunk, method, interceptor):
+ self._thunk = thunk
+ self._method = method
+ self._interceptor = interceptor
+
+ def __call__(self,
+ request,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ client_call_details = _ClientCallDetails(self._method, timeout,
+ metadata, credentials,
+ wait_for_ready, compression)
+
+ def continuation(new_details, request):
+ (new_method, new_timeout, new_metadata, new_credentials,
+ new_wait_for_ready,
+ new_compression) = (_unwrap_client_call_details(
+ new_details, client_call_details))
+ return self._thunk(new_method)(request,
+ timeout=new_timeout,
+ metadata=new_metadata,
+ credentials=new_credentials,
+ wait_for_ready=new_wait_for_ready,
+ compression=new_compression)
+
+ try:
+ return self._interceptor.intercept_unary_stream(
+ continuation, client_call_details, request)
+ except Exception as exception: # pylint:disable=broad-except
+ return _FailureOutcome(exception, sys.exc_info()[2])
+
+
+class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
+
+ def __init__(self, thunk, method, interceptor):
+ self._thunk = thunk
+ self._method = method
+ self._interceptor = interceptor
+
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ response, ignored_call = self._with_call(request_iterator,
+ timeout=timeout,
+ metadata=metadata,
+ credentials=credentials,
+ wait_for_ready=wait_for_ready,
+ compression=compression)
+ return response
+
+ def _with_call(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ client_call_details = _ClientCallDetails(self._method, timeout,
+ metadata, credentials,
+ wait_for_ready, compression)
+
+ def continuation(new_details, request_iterator):
+ (new_method, new_timeout, new_metadata, new_credentials,
+ new_wait_for_ready,
+ new_compression) = (_unwrap_client_call_details(
+ new_details, client_call_details))
+ try:
+ response, call = self._thunk(new_method).with_call(
+ request_iterator,
+ timeout=new_timeout,
+ metadata=new_metadata,
+ credentials=new_credentials,
+ wait_for_ready=new_wait_for_ready,
+ compression=new_compression)
+ return _UnaryOutcome(response, call)
+ except grpc.RpcError as rpc_error:
+ return rpc_error
+ except Exception as exception: # pylint:disable=broad-except
+ return _FailureOutcome(exception, sys.exc_info()[2])
+
+ call = self._interceptor.intercept_stream_unary(continuation,
+ client_call_details,
+ request_iterator)
+ return call.result(), call
+
+ def with_call(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ return self._with_call(request_iterator,
+ timeout=timeout,
+ metadata=metadata,
+ credentials=credentials,
+ wait_for_ready=wait_for_ready,
+ compression=compression)
+
+ def future(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ client_call_details = _ClientCallDetails(self._method, timeout,
+ metadata, credentials,
+ wait_for_ready, compression)
+
+ def continuation(new_details, request_iterator):
+ (new_method, new_timeout, new_metadata, new_credentials,
+ new_wait_for_ready,
+ new_compression) = (_unwrap_client_call_details(
+ new_details, client_call_details))
+ return self._thunk(new_method).future(
+ request_iterator,
+ timeout=new_timeout,
+ metadata=new_metadata,
+ credentials=new_credentials,
+ wait_for_ready=new_wait_for_ready,
+ compression=new_compression)
+
+ try:
+ return self._interceptor.intercept_stream_unary(
+ continuation, client_call_details, request_iterator)
+ except Exception as exception: # pylint:disable=broad-except
+ return _FailureOutcome(exception, sys.exc_info()[2])
+
+
+class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
+
+ def __init__(self, thunk, method, interceptor):
+ self._thunk = thunk
+ self._method = method
+ self._interceptor = interceptor
+
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
+ credentials=None,
+ wait_for_ready=None,
+ compression=None):
+ client_call_details = _ClientCallDetails(self._method, timeout,
+ metadata, credentials,
+ wait_for_ready, compression)
+
+ def continuation(new_details, request_iterator):
+ (new_method, new_timeout, new_metadata, new_credentials,
+ new_wait_for_ready,
+ new_compression) = (_unwrap_client_call_details(
+ new_details, client_call_details))
+ return self._thunk(new_method)(request_iterator,
+ timeout=new_timeout,
+ metadata=new_metadata,
+ credentials=new_credentials,
+ wait_for_ready=new_wait_for_ready,
+ compression=new_compression)
+
+ try:
+ return self._interceptor.intercept_stream_stream(
+ continuation, client_call_details, request_iterator)
+ except Exception as exception: # pylint:disable=broad-except
+ return _FailureOutcome(exception, sys.exc_info()[2])
+
+
+class _Channel(grpc.Channel):
+
+ def __init__(self, channel, interceptor):
+ self._channel = channel
+ self._interceptor = interceptor
+
+ def subscribe(self, callback, try_to_connect=False):
+ self._channel.subscribe(callback, try_to_connect=try_to_connect)
+
+ def unsubscribe(self, callback):
+ self._channel.unsubscribe(callback)
+
+ def unary_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ thunk = lambda m: self._channel.unary_unary(m, request_serializer,
+ response_deserializer)
+ if isinstance(self._interceptor, grpc.UnaryUnaryClientInterceptor):
+ return _UnaryUnaryMultiCallable(thunk, method, self._interceptor)
+ else:
+ return thunk(method)
+
+ def unary_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ thunk = lambda m: self._channel.unary_stream(m, request_serializer,
+ response_deserializer)
+ if isinstance(self._interceptor, grpc.UnaryStreamClientInterceptor):
+ return _UnaryStreamMultiCallable(thunk, method, self._interceptor)
+ else:
+ return thunk(method)
+
+ def stream_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ thunk = lambda m: self._channel.stream_unary(m, request_serializer,
+ response_deserializer)
+ if isinstance(self._interceptor, grpc.StreamUnaryClientInterceptor):
+ return _StreamUnaryMultiCallable(thunk, method, self._interceptor)
+ else:
+ return thunk(method)
+
+ def stream_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ thunk = lambda m: self._channel.stream_stream(m, request_serializer,
+ response_deserializer)
+ if isinstance(self._interceptor, grpc.StreamStreamClientInterceptor):
+ return _StreamStreamMultiCallable(thunk, method, self._interceptor)
+ else:
+ return thunk(method)
+
+ def _close(self):
+ self._channel.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._close()
+ return False
+
+ def close(self):
+ self._channel.close()
+
+
+def intercept_channel(channel, *interceptors):
+ for interceptor in reversed(list(interceptors)):
+ if not isinstance(interceptor, grpc.UnaryUnaryClientInterceptor) and \
+ not isinstance(interceptor, grpc.UnaryStreamClientInterceptor) and \
+ not isinstance(interceptor, grpc.StreamUnaryClientInterceptor) and \
+ not isinstance(interceptor, grpc.StreamStreamClientInterceptor):
+ raise TypeError('interceptor must be '
+ 'grpc.UnaryUnaryClientInterceptor or '
+ 'grpc.UnaryStreamClientInterceptor or '
+ 'grpc.StreamUnaryClientInterceptor or '
+ 'grpc.StreamStreamClientInterceptor or ')
+ channel = _Channel(channel, interceptor)
+ return channel
diff --git a/venv/Lib/site-packages/grpc/_plugin_wrapping.py b/venv/Lib/site-packages/grpc/_plugin_wrapping.py
new file mode 100644
index 000000000..e3bfa9091
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_plugin_wrapping.py
@@ -0,0 +1,101 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+import threading
+
+import grpc
+from grpc import _common
+from grpc._cython import cygrpc
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class _AuthMetadataContext(
+ collections.namedtuple('AuthMetadataContext', (
+ 'service_url',
+ 'method_name',
+ )), grpc.AuthMetadataContext):
+ pass
+
+
+class _CallbackState(object):
+
+ def __init__(self):
+ self.lock = threading.Lock()
+ self.called = False
+ self.exception = None
+
+
+class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
+
+ def __init__(self, state, callback):
+ self._state = state
+ self._callback = callback
+
+ def __call__(self, metadata, error):
+ with self._state.lock:
+ if self._state.exception is None:
+ if self._state.called:
+ raise RuntimeError(
+ 'AuthMetadataPluginCallback invoked more than once!')
+ else:
+ self._state.called = True
+ else:
+ raise RuntimeError(
+ 'AuthMetadataPluginCallback raised exception "{}"!'.format(
+ self._state.exception))
+ if error is None:
+ self._callback(metadata, cygrpc.StatusCode.ok, None)
+ else:
+ self._callback(None, cygrpc.StatusCode.internal,
+ _common.encode(str(error)))
+
+
+class _Plugin(object):
+
+ def __init__(self, metadata_plugin):
+ self._metadata_plugin = metadata_plugin
+
+ def __call__(self, service_url, method_name, callback):
+ context = _AuthMetadataContext(_common.decode(service_url),
+ _common.decode(method_name))
+ callback_state = _CallbackState()
+ try:
+ self._metadata_plugin(
+ context, _AuthMetadataPluginCallback(callback_state, callback))
+ except Exception as exception: # pylint: disable=broad-except
+ _LOGGER.exception(
+ 'AuthMetadataPluginCallback "%s" raised exception!',
+ self._metadata_plugin)
+ with callback_state.lock:
+ callback_state.exception = exception
+ if callback_state.called:
+ return
+ callback(None, cygrpc.StatusCode.internal,
+ _common.encode(str(exception)))
+
+
+def metadata_plugin_call_credentials(metadata_plugin, name):
+ if name is None:
+ try:
+ effective_name = metadata_plugin.__name__
+ except AttributeError:
+ effective_name = metadata_plugin.__class__.__name__
+ else:
+ effective_name = name
+ return grpc.CallCredentials(
+ cygrpc.MetadataPluginCallCredentials(_Plugin(metadata_plugin),
+ _common.encode(effective_name)))
diff --git a/venv/Lib/site-packages/grpc/_runtime_protos.py b/venv/Lib/site-packages/grpc/_runtime_protos.py
new file mode 100644
index 000000000..7f555ccd9
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_runtime_protos.py
@@ -0,0 +1,171 @@
+# Copyright 2020 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+_REQUIRED_SYMBOLS = ("_protos", "_services", "_protos_and_services")
+
+
+def _uninstalled_protos(*args, **kwargs):
+ raise NotImplementedError(
+ "Install the grpcio-tools package (1.32.0+) to use the protos function."
+ )
+
+
+def _uninstalled_services(*args, **kwargs):
+ raise NotImplementedError(
+ "Install the grpcio-tools package (1.32.0+) to use the services function."
+ )
+
+
+def _uninstalled_protos_and_services(*args, **kwargs):
+ raise NotImplementedError(
+ "Install the grpcio-tools package (1.32.0+) to use the protos_and_services function."
+ )
+
+
+def _interpreter_version_protos(*args, **kwargs):
+ raise NotImplementedError(
+ "The protos function is only on available on Python 3.X interpreters.")
+
+
+def _interpreter_version_services(*args, **kwargs):
+ raise NotImplementedError(
+ "The services function is only on available on Python 3.X interpreters."
+ )
+
+
+def _interpreter_version_protos_and_services(*args, **kwargs):
+ raise NotImplementedError(
+ "The protos_and_services function is only on available on Python 3.X interpreters."
+ )
+
+
+def protos(protobuf_path): # pylint: disable=unused-argument
+ """Returns a module generated by the indicated .proto file.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ Use this function to retrieve classes corresponding to message
+ definitions in the .proto file.
+
+ To inspect the contents of the returned module, use the dir function.
+ For example:
+
+ ```
+ protos = grpc.protos("foo.proto")
+ print(dir(protos))
+ ```
+
+ The returned module object corresponds to the _pb2.py file generated
+ by protoc. The path is expected to be relative to an entry on sys.path
+ and all transitive dependencies of the file should also be resolveable
+ from an entry on sys.path.
+
+ To completely disable the machinery behind this function, set the
+ GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
+
+ Args:
+ protobuf_path: The path to the .proto file on the filesystem. This path
+ must be resolveable from an entry on sys.path and so must all of its
+ transitive dependencies.
+
+ Returns:
+ A module object corresponding to the message code for the indicated
+ .proto file. Equivalent to a generated _pb2.py file.
+ """
+
+
+def services(protobuf_path): # pylint: disable=unused-argument
+ """Returns a module generated by the indicated .proto file.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ Use this function to retrieve classes and functions corresponding to
+ service definitions in the .proto file, including both stub and servicer
+ definitions.
+
+ To inspect the contents of the returned module, use the dir function.
+ For example:
+
+ ```
+ services = grpc.services("foo.proto")
+ print(dir(services))
+ ```
+
+ The returned module object corresponds to the _pb2_grpc.py file generated
+ by protoc. The path is expected to be relative to an entry on sys.path
+ and all transitive dependencies of the file should also be resolveable
+ from an entry on sys.path.
+
+ To completely disable the machinery behind this function, set the
+ GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
+
+ Args:
+ protobuf_path: The path to the .proto file on the filesystem. This path
+ must be resolveable from an entry on sys.path and so must all of its
+ transitive dependencies.
+
+ Returns:
+ A module object corresponding to the stub/service code for the indicated
+ .proto file. Equivalent to a generated _pb2_grpc.py file.
+ """
+
+
+def protos_and_services(protobuf_path): # pylint: disable=unused-argument
+ """Returns a 2-tuple of modules corresponding to protos and services.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ The return value of this function is equivalent to a call to protos and a
+ call to services.
+
+ To completely disable the machinery behind this function, set the
+ GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
+
+ Args:
+ protobuf_path: The path to the .proto file on the filesystem. This path
+ must be resolveable from an entry on sys.path and so must all of its
+ transitive dependencies.
+
+ Returns:
+ A 2-tuple of module objects corresponding to (protos(path), services(path)).
+ """
+
+
+if sys.version_info < (3, 5, 0):
+ protos = _interpreter_version_protos
+ services = _interpreter_version_services
+ protos_and_services = _interpreter_version_protos_and_services
+else:
+ try:
+ import grpc_tools # pylint: disable=unused-import
+ except ImportError as e:
+ # NOTE: It's possible that we're encountering a transitive ImportError, so
+ # we check for that and re-raise if so.
+ if "grpc_tools" not in e.args[0]:
+ raise
+ protos = _uninstalled_protos
+ services = _uninstalled_services
+ protos_and_services = _uninstalled_protos_and_services
+ else:
+ import grpc_tools.protoc # pylint: disable=unused-import
+ if all(hasattr(grpc_tools.protoc, sym) for sym in _REQUIRED_SYMBOLS):
+ from grpc_tools.protoc import _protos as protos # pylint: disable=unused-import
+ from grpc_tools.protoc import _services as services # pylint: disable=unused-import
+ from grpc_tools.protoc import _protos_and_services as protos_and_services # pylint: disable=unused-import
+ else:
+ protos = _uninstalled_protos
+ services = _uninstalled_services
+ protos_and_services = _uninstalled_protos_and_services
diff --git a/venv/Lib/site-packages/grpc/_server.py b/venv/Lib/site-packages/grpc/_server.py
new file mode 100644
index 000000000..48ff74399
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_server.py
@@ -0,0 +1,995 @@
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Service-side implementation of gRPC Python."""
+
+import collections
+import enum
+import logging
+import threading
+import time
+
+from concurrent import futures
+import six
+
+import grpc
+from grpc import _common
+from grpc import _compression
+from grpc import _interceptor
+from grpc._cython import cygrpc
+
+_LOGGER = logging.getLogger(__name__)
+
+_SHUTDOWN_TAG = 'shutdown'
+_REQUEST_CALL_TAG = 'request_call'
+
+_RECEIVE_CLOSE_ON_SERVER_TOKEN = 'receive_close_on_server'
+_SEND_INITIAL_METADATA_TOKEN = 'send_initial_metadata'
+_RECEIVE_MESSAGE_TOKEN = 'receive_message'
+_SEND_MESSAGE_TOKEN = 'send_message'
+_SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = (
+ 'send_initial_metadata * send_message')
+_SEND_STATUS_FROM_SERVER_TOKEN = 'send_status_from_server'
+_SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = (
+ 'send_initial_metadata * send_status_from_server')
+
+_OPEN = 'open'
+_CLOSED = 'closed'
+_CANCELLED = 'cancelled'
+
+_EMPTY_FLAGS = 0
+
+_DEALLOCATED_SERVER_CHECK_PERIOD_S = 1.0
+_INF_TIMEOUT = 1e9
+
+
+def _serialized_request(request_event):
+ return request_event.batch_operations[0].message()
+
+
+def _application_code(code):
+ cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code)
+ return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code
+
+
+def _completion_code(state):
+ if state.code is None:
+ return cygrpc.StatusCode.ok
+ else:
+ return _application_code(state.code)
+
+
+def _abortion_code(state, code):
+ if state.code is None:
+ return code
+ else:
+ return _application_code(state.code)
+
+
+def _details(state):
+ return b'' if state.details is None else state.details
+
+
+class _HandlerCallDetails(
+ collections.namedtuple('_HandlerCallDetails', (
+ 'method',
+ 'invocation_metadata',
+ )), grpc.HandlerCallDetails):
+ pass
+
+
+class _RPCState(object):
+
+ def __init__(self):
+ self.condition = threading.Condition()
+ self.due = set()
+ self.request = None
+ self.client = _OPEN
+ self.initial_metadata_allowed = True
+ self.compression_algorithm = None
+ self.disable_next_compression = False
+ self.trailing_metadata = None
+ self.code = None
+ self.details = None
+ self.statused = False
+ self.rpc_errors = []
+ self.callbacks = []
+ self.aborted = False
+
+
+def _raise_rpc_error(state):
+ rpc_error = grpc.RpcError()
+ state.rpc_errors.append(rpc_error)
+ raise rpc_error
+
+
+def _possibly_finish_call(state, token):
+ state.due.remove(token)
+ if not _is_rpc_state_active(state) and not state.due:
+ callbacks = state.callbacks
+ state.callbacks = None
+ return state, callbacks
+ else:
+ return None, ()
+
+
+def _send_status_from_server(state, token):
+
+ def send_status_from_server(unused_send_status_from_server_event):
+ with state.condition:
+ return _possibly_finish_call(state, token)
+
+ return send_status_from_server
+
+
+def _get_initial_metadata(state, metadata):
+ with state.condition:
+ if state.compression_algorithm:
+ compression_metadata = (
+ _compression.compression_algorithm_to_metadata(
+ state.compression_algorithm),)
+ if metadata is None:
+ return compression_metadata
+ else:
+ return compression_metadata + tuple(metadata)
+ else:
+ return metadata
+
+
+def _get_initial_metadata_operation(state, metadata):
+ operation = cygrpc.SendInitialMetadataOperation(
+ _get_initial_metadata(state, metadata), _EMPTY_FLAGS)
+ return operation
+
+
+def _abort(state, call, code, details):
+ if state.client is not _CANCELLED:
+ effective_code = _abortion_code(state, code)
+ effective_details = details if state.details is None else state.details
+ if state.initial_metadata_allowed:
+ operations = (
+ _get_initial_metadata_operation(state, None),
+ cygrpc.SendStatusFromServerOperation(state.trailing_metadata,
+ effective_code,
+ effective_details,
+ _EMPTY_FLAGS),
+ )
+ token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
+ else:
+ operations = (cygrpc.SendStatusFromServerOperation(
+ state.trailing_metadata, effective_code, effective_details,
+ _EMPTY_FLAGS),)
+ token = _SEND_STATUS_FROM_SERVER_TOKEN
+ call.start_server_batch(operations,
+ _send_status_from_server(state, token))
+ state.statused = True
+ state.due.add(token)
+
+
+def _receive_close_on_server(state):
+
+ def receive_close_on_server(receive_close_on_server_event):
+ with state.condition:
+ if receive_close_on_server_event.batch_operations[0].cancelled():
+ state.client = _CANCELLED
+ elif state.client is _OPEN:
+ state.client = _CLOSED
+ state.condition.notify_all()
+ return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
+
+ return receive_close_on_server
+
+
+def _receive_message(state, call, request_deserializer):
+
+ def receive_message(receive_message_event):
+ serialized_request = _serialized_request(receive_message_event)
+ if serialized_request is None:
+ with state.condition:
+ if state.client is _OPEN:
+ state.client = _CLOSED
+ state.condition.notify_all()
+ return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
+ else:
+ request = _common.deserialize(serialized_request,
+ request_deserializer)
+ with state.condition:
+ if request is None:
+ _abort(state, call, cygrpc.StatusCode.internal,
+ b'Exception deserializing request!')
+ else:
+ state.request = request
+ state.condition.notify_all()
+ return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
+
+ return receive_message
+
+
+def _send_initial_metadata(state):
+
+ def send_initial_metadata(unused_send_initial_metadata_event):
+ with state.condition:
+ return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
+
+ return send_initial_metadata
+
+
+def _send_message(state, token):
+
+ def send_message(unused_send_message_event):
+ with state.condition:
+ state.condition.notify_all()
+ return _possibly_finish_call(state, token)
+
+ return send_message
+
+
+class _Context(grpc.ServicerContext):
+
+ def __init__(self, rpc_event, state, request_deserializer):
+ self._rpc_event = rpc_event
+ self._state = state
+ self._request_deserializer = request_deserializer
+
+ def is_active(self):
+ with self._state.condition:
+ return _is_rpc_state_active(self._state)
+
+ def time_remaining(self):
+ return max(self._rpc_event.call_details.deadline - time.time(), 0)
+
+ def cancel(self):
+ self._rpc_event.call.cancel()
+
+ def add_callback(self, callback):
+ with self._state.condition:
+ if self._state.callbacks is None:
+ return False
+ else:
+ self._state.callbacks.append(callback)
+ return True
+
+ def disable_next_message_compression(self):
+ with self._state.condition:
+ self._state.disable_next_compression = True
+
+ def invocation_metadata(self):
+ return self._rpc_event.invocation_metadata
+
+ def peer(self):
+ return _common.decode(self._rpc_event.call.peer())
+
+ def peer_identities(self):
+ return cygrpc.peer_identities(self._rpc_event.call)
+
+ def peer_identity_key(self):
+ id_key = cygrpc.peer_identity_key(self._rpc_event.call)
+ return id_key if id_key is None else _common.decode(id_key)
+
+ def auth_context(self):
+ return {
+ _common.decode(key): value for key, value in six.iteritems(
+ cygrpc.auth_context(self._rpc_event.call))
+ }
+
+ def set_compression(self, compression):
+ with self._state.condition:
+ self._state.compression_algorithm = compression
+
+ def send_initial_metadata(self, initial_metadata):
+ with self._state.condition:
+ if self._state.client is _CANCELLED:
+ _raise_rpc_error(self._state)
+ else:
+ if self._state.initial_metadata_allowed:
+ operation = _get_initial_metadata_operation(
+ self._state, initial_metadata)
+ self._rpc_event.call.start_server_batch(
+ (operation,), _send_initial_metadata(self._state))
+ self._state.initial_metadata_allowed = False
+ self._state.due.add(_SEND_INITIAL_METADATA_TOKEN)
+ else:
+ raise ValueError('Initial metadata no longer allowed!')
+
+ def set_trailing_metadata(self, trailing_metadata):
+ with self._state.condition:
+ self._state.trailing_metadata = trailing_metadata
+
+ def abort(self, code, details):
+ # treat OK like other invalid arguments: fail the RPC
+ if code == grpc.StatusCode.OK:
+ _LOGGER.error(
+ 'abort() called with StatusCode.OK; returning UNKNOWN')
+ code = grpc.StatusCode.UNKNOWN
+ details = ''
+ with self._state.condition:
+ self._state.code = code
+ self._state.details = _common.encode(details)
+ self._state.aborted = True
+ raise Exception()
+
+ def abort_with_status(self, status):
+ self._state.trailing_metadata = status.trailing_metadata
+ self.abort(status.code, status.details)
+
+ def set_code(self, code):
+ with self._state.condition:
+ self._state.code = code
+
+ def set_details(self, details):
+ with self._state.condition:
+ self._state.details = _common.encode(details)
+
+ def _finalize_state(self):
+ pass
+
+
+class _RequestIterator(object):
+
+ def __init__(self, state, call, request_deserializer):
+ self._state = state
+ self._call = call
+ self._request_deserializer = request_deserializer
+
+ def _raise_or_start_receive_message(self):
+ if self._state.client is _CANCELLED:
+ _raise_rpc_error(self._state)
+ elif not _is_rpc_state_active(self._state):
+ raise StopIteration()
+ else:
+ self._call.start_server_batch(
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
+ _receive_message(self._state, self._call,
+ self._request_deserializer))
+ self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
+
+ def _look_for_request(self):
+ if self._state.client is _CANCELLED:
+ _raise_rpc_error(self._state)
+ elif (self._state.request is None and
+ _RECEIVE_MESSAGE_TOKEN not in self._state.due):
+ raise StopIteration()
+ else:
+ request = self._state.request
+ self._state.request = None
+ return request
+
+ raise AssertionError() # should never run
+
+ def _next(self):
+ with self._state.condition:
+ self._raise_or_start_receive_message()
+ while True:
+ self._state.condition.wait()
+ request = self._look_for_request()
+ if request is not None:
+ return request
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return self._next()
+
+ def next(self):
+ return self._next()
+
+
+def _unary_request(rpc_event, state, request_deserializer):
+
+ def unary_request():
+ with state.condition:
+ if not _is_rpc_state_active(state):
+ return None
+ else:
+ rpc_event.call.start_server_batch(
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
+ _receive_message(state, rpc_event.call,
+ request_deserializer))
+ state.due.add(_RECEIVE_MESSAGE_TOKEN)
+ while True:
+ state.condition.wait()
+ if state.request is None:
+ if state.client is _CLOSED:
+ details = '"{}" requires exactly one request message.'.format(
+ rpc_event.call_details.method)
+ _abort(state, rpc_event.call,
+ cygrpc.StatusCode.unimplemented,
+ _common.encode(details))
+ return None
+ elif state.client is _CANCELLED:
+ return None
+ else:
+ request = state.request
+ state.request = None
+ return request
+
+ return unary_request
+
+
+def _call_behavior(rpc_event,
+ state,
+ behavior,
+ argument,
+ request_deserializer,
+ send_response_callback=None):
+ from grpc import _create_servicer_context
+ with _create_servicer_context(rpc_event, state,
+ request_deserializer) as context:
+ try:
+ response_or_iterator = None
+ if send_response_callback is not None:
+ response_or_iterator = behavior(argument, context,
+ send_response_callback)
+ else:
+ response_or_iterator = behavior(argument, context)
+ return response_or_iterator, True
+ except Exception as exception: # pylint: disable=broad-except
+ with state.condition:
+ if state.aborted:
+ _abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
+ b'RPC Aborted')
+ elif exception not in state.rpc_errors:
+ details = 'Exception calling application: {}'.format(
+ exception)
+ _LOGGER.exception(details)
+ _abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
+ _common.encode(details))
+ return None, False
+
+
+def _take_response_from_response_iterator(rpc_event, state, response_iterator):
+ try:
+ return next(response_iterator), True
+ except StopIteration:
+ return None, True
+ except Exception as exception: # pylint: disable=broad-except
+ with state.condition:
+ if state.aborted:
+ _abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
+ b'RPC Aborted')
+ elif exception not in state.rpc_errors:
+ details = 'Exception iterating responses: {}'.format(exception)
+ _LOGGER.exception(details)
+ _abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
+ _common.encode(details))
+ return None, False
+
+
+def _serialize_response(rpc_event, state, response, response_serializer):
+ serialized_response = _common.serialize(response, response_serializer)
+ if serialized_response is None:
+ with state.condition:
+ _abort(state, rpc_event.call, cygrpc.StatusCode.internal,
+ b'Failed to serialize response!')
+ return None
+ else:
+ return serialized_response
+
+
+def _get_send_message_op_flags_from_state(state):
+ if state.disable_next_compression:
+ return cygrpc.WriteFlag.no_compress
+ else:
+ return _EMPTY_FLAGS
+
+
+def _reset_per_message_state(state):
+ with state.condition:
+ state.disable_next_compression = False
+
+
+def _send_response(rpc_event, state, serialized_response):
+ with state.condition:
+ if not _is_rpc_state_active(state):
+ return False
+ else:
+ if state.initial_metadata_allowed:
+ operations = (
+ _get_initial_metadata_operation(state, None),
+ cygrpc.SendMessageOperation(
+ serialized_response,
+ _get_send_message_op_flags_from_state(state)),
+ )
+ state.initial_metadata_allowed = False
+ token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
+ else:
+ operations = (cygrpc.SendMessageOperation(
+ serialized_response,
+ _get_send_message_op_flags_from_state(state)),)
+ token = _SEND_MESSAGE_TOKEN
+ rpc_event.call.start_server_batch(operations,
+ _send_message(state, token))
+ state.due.add(token)
+ _reset_per_message_state(state)
+ while True:
+ state.condition.wait()
+ if token not in state.due:
+ return _is_rpc_state_active(state)
+
+
+def _status(rpc_event, state, serialized_response):
+ with state.condition:
+ if state.client is not _CANCELLED:
+ code = _completion_code(state)
+ details = _details(state)
+ operations = [
+ cygrpc.SendStatusFromServerOperation(state.trailing_metadata,
+ code, details,
+ _EMPTY_FLAGS),
+ ]
+ if state.initial_metadata_allowed:
+ operations.append(_get_initial_metadata_operation(state, None))
+ if serialized_response is not None:
+ operations.append(
+ cygrpc.SendMessageOperation(
+ serialized_response,
+ _get_send_message_op_flags_from_state(state)))
+ rpc_event.call.start_server_batch(
+ operations,
+ _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN))
+ state.statused = True
+ _reset_per_message_state(state)
+ state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
+
+
+def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk,
+ request_deserializer, response_serializer):
+ cygrpc.install_context_from_request_call_event(rpc_event)
+ try:
+ argument = argument_thunk()
+ if argument is not None:
+ response, proceed = _call_behavior(rpc_event, state, behavior,
+ argument, request_deserializer)
+ if proceed:
+ serialized_response = _serialize_response(
+ rpc_event, state, response, response_serializer)
+ if serialized_response is not None:
+ _status(rpc_event, state, serialized_response)
+ finally:
+ cygrpc.uninstall_context()
+
+
+def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk,
+ request_deserializer, response_serializer):
+ cygrpc.install_context_from_request_call_event(rpc_event)
+
+ def send_response(response):
+ if response is None:
+ _status(rpc_event, state, None)
+ else:
+ serialized_response = _serialize_response(rpc_event, state,
+ response,
+ response_serializer)
+ if serialized_response is not None:
+ _send_response(rpc_event, state, serialized_response)
+
+ try:
+ argument = argument_thunk()
+ if argument is not None:
+ if hasattr(behavior, 'experimental_non_blocking'
+ ) and behavior.experimental_non_blocking:
+ _call_behavior(rpc_event,
+ state,
+ behavior,
+ argument,
+ request_deserializer,
+ send_response_callback=send_response)
+ else:
+ response_iterator, proceed = _call_behavior(
+ rpc_event, state, behavior, argument, request_deserializer)
+ if proceed:
+ _send_message_callback_to_blocking_iterator_adapter(
+ rpc_event, state, send_response, response_iterator)
+ finally:
+ cygrpc.uninstall_context()
+
+
+def _is_rpc_state_active(state):
+ return state.client is not _CANCELLED and not state.statused
+
+
+def _send_message_callback_to_blocking_iterator_adapter(rpc_event, state,
+ send_response_callback,
+ response_iterator):
+ while True:
+ response, proceed = _take_response_from_response_iterator(
+ rpc_event, state, response_iterator)
+ if proceed:
+ send_response_callback(response)
+ if not _is_rpc_state_active(state):
+ break
+ else:
+ break
+
+
+def _select_thread_pool_for_behavior(behavior, default_thread_pool):
+ if hasattr(behavior, 'experimental_thread_pool') and isinstance(
+ behavior.experimental_thread_pool, futures.ThreadPoolExecutor):
+ return behavior.experimental_thread_pool
+ else:
+ return default_thread_pool
+
+
+def _handle_unary_unary(rpc_event, state, method_handler, default_thread_pool):
+ unary_request = _unary_request(rpc_event, state,
+ method_handler.request_deserializer)
+ thread_pool = _select_thread_pool_for_behavior(method_handler.unary_unary,
+ default_thread_pool)
+ return thread_pool.submit(_unary_response_in_pool, rpc_event, state,
+ method_handler.unary_unary, unary_request,
+ method_handler.request_deserializer,
+ method_handler.response_serializer)
+
+
+def _handle_unary_stream(rpc_event, state, method_handler, default_thread_pool):
+ unary_request = _unary_request(rpc_event, state,
+ method_handler.request_deserializer)
+ thread_pool = _select_thread_pool_for_behavior(method_handler.unary_stream,
+ default_thread_pool)
+ return thread_pool.submit(_stream_response_in_pool, rpc_event, state,
+ method_handler.unary_stream, unary_request,
+ method_handler.request_deserializer,
+ method_handler.response_serializer)
+
+
+def _handle_stream_unary(rpc_event, state, method_handler, default_thread_pool):
+ request_iterator = _RequestIterator(state, rpc_event.call,
+ method_handler.request_deserializer)
+ thread_pool = _select_thread_pool_for_behavior(method_handler.stream_unary,
+ default_thread_pool)
+ return thread_pool.submit(_unary_response_in_pool, rpc_event, state,
+ method_handler.stream_unary,
+ lambda: request_iterator,
+ method_handler.request_deserializer,
+ method_handler.response_serializer)
+
+
+def _handle_stream_stream(rpc_event, state, method_handler,
+ default_thread_pool):
+ request_iterator = _RequestIterator(state, rpc_event.call,
+ method_handler.request_deserializer)
+ thread_pool = _select_thread_pool_for_behavior(method_handler.stream_stream,
+ default_thread_pool)
+ return thread_pool.submit(_stream_response_in_pool, rpc_event, state,
+ method_handler.stream_stream,
+ lambda: request_iterator,
+ method_handler.request_deserializer,
+ method_handler.response_serializer)
+
+
+def _find_method_handler(rpc_event, generic_handlers, interceptor_pipeline):
+
+ def query_handlers(handler_call_details):
+ for generic_handler in generic_handlers:
+ method_handler = generic_handler.service(handler_call_details)
+ if method_handler is not None:
+ return method_handler
+ return None
+
+ handler_call_details = _HandlerCallDetails(
+ _common.decode(rpc_event.call_details.method),
+ rpc_event.invocation_metadata)
+
+ if interceptor_pipeline is not None:
+ return interceptor_pipeline.execute(query_handlers,
+ handler_call_details)
+ else:
+ return query_handlers(handler_call_details)
+
+
+def _reject_rpc(rpc_event, status, details):
+ rpc_state = _RPCState()
+ operations = (
+ _get_initial_metadata_operation(rpc_state, None),
+ cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
+ cygrpc.SendStatusFromServerOperation(None, status, details,
+ _EMPTY_FLAGS),
+ )
+ rpc_event.call.start_server_batch(operations, lambda ignored_event: (
+ rpc_state,
+ (),
+ ))
+ return rpc_state
+
+
+def _handle_with_method_handler(rpc_event, method_handler, thread_pool):
+ state = _RPCState()
+ with state.condition:
+ rpc_event.call.start_server_batch(
+ (cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),),
+ _receive_close_on_server(state))
+ state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
+ if method_handler.request_streaming:
+ if method_handler.response_streaming:
+ return state, _handle_stream_stream(rpc_event, state,
+ method_handler, thread_pool)
+ else:
+ return state, _handle_stream_unary(rpc_event, state,
+ method_handler, thread_pool)
+ else:
+ if method_handler.response_streaming:
+ return state, _handle_unary_stream(rpc_event, state,
+ method_handler, thread_pool)
+ else:
+ return state, _handle_unary_unary(rpc_event, state,
+ method_handler, thread_pool)
+
+
+def _handle_call(rpc_event, generic_handlers, interceptor_pipeline, thread_pool,
+ concurrency_exceeded):
+ if not rpc_event.success:
+ return None, None
+ if rpc_event.call_details.method is not None:
+ try:
+ method_handler = _find_method_handler(rpc_event, generic_handlers,
+ interceptor_pipeline)
+ except Exception as exception: # pylint: disable=broad-except
+ details = 'Exception servicing handler: {}'.format(exception)
+ _LOGGER.exception(details)
+ return _reject_rpc(rpc_event, cygrpc.StatusCode.unknown,
+ b'Error in service handler!'), None
+ if method_handler is None:
+ return _reject_rpc(rpc_event, cygrpc.StatusCode.unimplemented,
+ b'Method not found!'), None
+ elif concurrency_exceeded:
+ return _reject_rpc(rpc_event, cygrpc.StatusCode.resource_exhausted,
+ b'Concurrent RPC limit exceeded!'), None
+ else:
+ return _handle_with_method_handler(rpc_event, method_handler,
+ thread_pool)
+ else:
+ return None, None
+
+
+@enum.unique
+class _ServerStage(enum.Enum):
+ STOPPED = 'stopped'
+ STARTED = 'started'
+ GRACE = 'grace'
+
+
+class _ServerState(object):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, completion_queue, server, generic_handlers,
+ interceptor_pipeline, thread_pool, maximum_concurrent_rpcs):
+ self.lock = threading.RLock()
+ self.completion_queue = completion_queue
+ self.server = server
+ self.generic_handlers = list(generic_handlers)
+ self.interceptor_pipeline = interceptor_pipeline
+ self.thread_pool = thread_pool
+ self.stage = _ServerStage.STOPPED
+ self.termination_event = threading.Event()
+ self.shutdown_events = [self.termination_event]
+ self.maximum_concurrent_rpcs = maximum_concurrent_rpcs
+ self.active_rpc_count = 0
+
+ # TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields.
+ self.rpc_states = set()
+ self.due = set()
+
+ # A "volatile" flag to interrupt the daemon serving thread
+ self.server_deallocated = False
+
+
+def _add_generic_handlers(state, generic_handlers):
+ with state.lock:
+ state.generic_handlers.extend(generic_handlers)
+
+
+def _add_insecure_port(state, address):
+ with state.lock:
+ return state.server.add_http2_port(address)
+
+
+def _add_secure_port(state, address, server_credentials):
+ with state.lock:
+ return state.server.add_http2_port(address,
+ server_credentials._credentials)
+
+
+def _request_call(state):
+ state.server.request_call(state.completion_queue, state.completion_queue,
+ _REQUEST_CALL_TAG)
+ state.due.add(_REQUEST_CALL_TAG)
+
+
+# TODO(https://github.com/grpc/grpc/issues/6597): delete this function.
+def _stop_serving(state):
+ if not state.rpc_states and not state.due:
+ state.server.destroy()
+ for shutdown_event in state.shutdown_events:
+ shutdown_event.set()
+ state.stage = _ServerStage.STOPPED
+ return True
+ else:
+ return False
+
+
+def _on_call_completed(state):
+ with state.lock:
+ state.active_rpc_count -= 1
+
+
+def _process_event_and_continue(state, event):
+ should_continue = True
+ if event.tag is _SHUTDOWN_TAG:
+ with state.lock:
+ state.due.remove(_SHUTDOWN_TAG)
+ if _stop_serving(state):
+ should_continue = False
+ elif event.tag is _REQUEST_CALL_TAG:
+ with state.lock:
+ state.due.remove(_REQUEST_CALL_TAG)
+ concurrency_exceeded = (
+ state.maximum_concurrent_rpcs is not None and
+ state.active_rpc_count >= state.maximum_concurrent_rpcs)
+ rpc_state, rpc_future = _handle_call(event, state.generic_handlers,
+ state.interceptor_pipeline,
+ state.thread_pool,
+ concurrency_exceeded)
+ if rpc_state is not None:
+ state.rpc_states.add(rpc_state)
+ if rpc_future is not None:
+ state.active_rpc_count += 1
+ rpc_future.add_done_callback(
+ lambda unused_future: _on_call_completed(state))
+ if state.stage is _ServerStage.STARTED:
+ _request_call(state)
+ elif _stop_serving(state):
+ should_continue = False
+ else:
+ rpc_state, callbacks = event.tag(event)
+ for callback in callbacks:
+ try:
+ callback()
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.exception('Exception calling callback!')
+ if rpc_state is not None:
+ with state.lock:
+ state.rpc_states.remove(rpc_state)
+ if _stop_serving(state):
+ should_continue = False
+ return should_continue
+
+
+def _serve(state):
+ while True:
+ timeout = time.time() + _DEALLOCATED_SERVER_CHECK_PERIOD_S
+ event = state.completion_queue.poll(timeout)
+ if state.server_deallocated:
+ _begin_shutdown_once(state)
+ if event.completion_type != cygrpc.CompletionType.queue_timeout:
+ if not _process_event_and_continue(state, event):
+ return
+ # We want to force the deletion of the previous event
+ # ~before~ we poll again; if the event has a reference
+ # to a shutdown Call object, this can induce spinlock.
+ event = None
+
+
+def _begin_shutdown_once(state):
+ with state.lock:
+ if state.stage is _ServerStage.STARTED:
+ state.server.shutdown(state.completion_queue, _SHUTDOWN_TAG)
+ state.stage = _ServerStage.GRACE
+ state.due.add(_SHUTDOWN_TAG)
+
+
+def _stop(state, grace):
+ with state.lock:
+ if state.stage is _ServerStage.STOPPED:
+ shutdown_event = threading.Event()
+ shutdown_event.set()
+ return shutdown_event
+ else:
+ _begin_shutdown_once(state)
+ shutdown_event = threading.Event()
+ state.shutdown_events.append(shutdown_event)
+ if grace is None:
+ state.server.cancel_all_calls()
+ else:
+
+ def cancel_all_calls_after_grace():
+ shutdown_event.wait(timeout=grace)
+ with state.lock:
+ state.server.cancel_all_calls()
+
+ thread = threading.Thread(target=cancel_all_calls_after_grace)
+ thread.start()
+ return shutdown_event
+ shutdown_event.wait()
+ return shutdown_event
+
+
+def _start(state):
+ with state.lock:
+ if state.stage is not _ServerStage.STOPPED:
+ raise ValueError('Cannot start already-started server!')
+ state.server.start()
+ state.stage = _ServerStage.STARTED
+ _request_call(state)
+
+ thread = threading.Thread(target=_serve, args=(state,))
+ thread.daemon = True
+ thread.start()
+
+
+def _validate_generic_rpc_handlers(generic_rpc_handlers):
+ for generic_rpc_handler in generic_rpc_handlers:
+ service_attribute = getattr(generic_rpc_handler, 'service', None)
+ if service_attribute is None:
+ raise AttributeError(
+ '"{}" must conform to grpc.GenericRpcHandler type but does '
+ 'not have "service" method!'.format(generic_rpc_handler))
+
+
+def _augment_options(base_options, compression):
+ compression_option = _compression.create_channel_option(compression)
+ return tuple(base_options) + compression_option
+
+
+class _Server(grpc.Server):
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, thread_pool, generic_handlers, interceptors, options,
+ maximum_concurrent_rpcs, compression):
+ completion_queue = cygrpc.CompletionQueue()
+ server = cygrpc.Server(_augment_options(options, compression))
+ server.register_completion_queue(completion_queue)
+ self._state = _ServerState(completion_queue, server, generic_handlers,
+ _interceptor.service_pipeline(interceptors),
+ thread_pool, maximum_concurrent_rpcs)
+
+ def add_generic_rpc_handlers(self, generic_rpc_handlers):
+ _validate_generic_rpc_handlers(generic_rpc_handlers)
+ _add_generic_handlers(self._state, generic_rpc_handlers)
+
+ def add_insecure_port(self, address):
+ return _common.validate_port_binding_result(
+ address, _add_insecure_port(self._state, _common.encode(address)))
+
+ def add_secure_port(self, address, server_credentials):
+ return _common.validate_port_binding_result(
+ address,
+ _add_secure_port(self._state, _common.encode(address),
+ server_credentials))
+
+ def start(self):
+ _start(self._state)
+
+ def wait_for_termination(self, timeout=None):
+ # NOTE(https://bugs.python.org/issue35935)
+ # Remove this workaround once threading.Event.wait() is working with
+ # CTRL+C across platforms.
+ return _common.wait(self._state.termination_event.wait,
+ self._state.termination_event.is_set,
+ timeout=timeout)
+
+ def stop(self, grace):
+ return _stop(self._state, grace)
+
+ def __del__(self):
+ if hasattr(self, '_state'):
+ # We can not grab a lock in __del__(), so set a flag to signal the
+ # serving daemon thread (if it exists) to initiate shutdown.
+ self._state.server_deallocated = True
+
+
+def create_server(thread_pool, generic_rpc_handlers, interceptors, options,
+ maximum_concurrent_rpcs, compression):
+ _validate_generic_rpc_handlers(generic_rpc_handlers)
+ return _Server(thread_pool, generic_rpc_handlers, interceptors, options,
+ maximum_concurrent_rpcs, compression)
diff --git a/venv/Lib/site-packages/grpc/_simple_stubs.py b/venv/Lib/site-packages/grpc/_simple_stubs.py
new file mode 100644
index 000000000..baa7ae5db
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_simple_stubs.py
@@ -0,0 +1,493 @@
+# Copyright 2020 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Functions that obviate explicit stubs and explicit channels."""
+
+import collections
+import datetime
+import os
+import logging
+import threading
+from typing import (Any, AnyStr, Callable, Dict, Iterator, Optional, Sequence,
+ Tuple, TypeVar, Union)
+
+import grpc
+from grpc.experimental import experimental_api
+
+RequestType = TypeVar('RequestType')
+ResponseType = TypeVar('ResponseType')
+
+OptionsType = Sequence[Tuple[str, str]]
+CacheKey = Tuple[str, OptionsType, Optional[grpc.ChannelCredentials], Optional[
+ grpc.Compression]]
+
+_LOGGER = logging.getLogger(__name__)
+
+_EVICTION_PERIOD_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"
+if _EVICTION_PERIOD_KEY in os.environ:
+ _EVICTION_PERIOD = datetime.timedelta(
+ seconds=float(os.environ[_EVICTION_PERIOD_KEY]))
+ _LOGGER.debug("Setting managed channel eviction period to %s",
+ _EVICTION_PERIOD)
+else:
+ _EVICTION_PERIOD = datetime.timedelta(minutes=10)
+
+_MAXIMUM_CHANNELS_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"
+if _MAXIMUM_CHANNELS_KEY in os.environ:
+ _MAXIMUM_CHANNELS = int(os.environ[_MAXIMUM_CHANNELS_KEY])
+ _LOGGER.debug("Setting maximum managed channels to %d", _MAXIMUM_CHANNELS)
+else:
+ _MAXIMUM_CHANNELS = 2**8
+
+_DEFAULT_TIMEOUT_KEY = "GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"
+if _DEFAULT_TIMEOUT_KEY in os.environ:
+ _DEFAULT_TIMEOUT = float(os.environ[_DEFAULT_TIMEOUT_KEY])
+ _LOGGER.debug("Setting default timeout seconds to %f", _DEFAULT_TIMEOUT)
+else:
+ _DEFAULT_TIMEOUT = 60.0
+
+
+def _create_channel(target: str, options: Sequence[Tuple[str, str]],
+ channel_credentials: Optional[grpc.ChannelCredentials],
+ compression: Optional[grpc.Compression]) -> grpc.Channel:
+ if channel_credentials is grpc.experimental.insecure_channel_credentials():
+ _LOGGER.debug(f"Creating insecure channel with options '{options}' " +
+ f"and compression '{compression}'")
+ return grpc.insecure_channel(target,
+ options=options,
+ compression=compression)
+ else:
+ _LOGGER.debug(
+ f"Creating secure channel with credentials '{channel_credentials}', "
+ + f"options '{options}' and compression '{compression}'")
+ return grpc.secure_channel(target,
+ credentials=channel_credentials,
+ options=options,
+ compression=compression)
+
+
+class ChannelCache:
+ # NOTE(rbellevi): Untyped due to reference cycle.
+ _singleton = None
+ _lock: threading.RLock = threading.RLock()
+ _condition: threading.Condition = threading.Condition(lock=_lock)
+ _eviction_ready: threading.Event = threading.Event()
+
+ _mapping: Dict[CacheKey, Tuple[grpc.Channel, datetime.datetime]]
+ _eviction_thread: threading.Thread
+
+ def __init__(self):
+ self._mapping = collections.OrderedDict()
+ self._eviction_thread = threading.Thread(
+ target=ChannelCache._perform_evictions, daemon=True)
+ self._eviction_thread.start()
+
+ @staticmethod
+ def get():
+ with ChannelCache._lock:
+ if ChannelCache._singleton is None:
+ ChannelCache._singleton = ChannelCache()
+ ChannelCache._eviction_ready.wait()
+ return ChannelCache._singleton
+
+ def _evict_locked(self, key: CacheKey):
+ channel, _ = self._mapping.pop(key)
+ _LOGGER.debug("Evicting channel %s with configuration %s.", channel,
+ key)
+ channel.close()
+ del channel
+
+ @staticmethod
+ def _perform_evictions():
+ while True:
+ with ChannelCache._lock:
+ ChannelCache._eviction_ready.set()
+ if not ChannelCache._singleton._mapping:
+ ChannelCache._condition.wait()
+ elif len(ChannelCache._singleton._mapping) > _MAXIMUM_CHANNELS:
+ key = next(iter(ChannelCache._singleton._mapping.keys()))
+ ChannelCache._singleton._evict_locked(key)
+ # And immediately reevaluate.
+ else:
+ key, (_, eviction_time) = next(
+ iter(ChannelCache._singleton._mapping.items()))
+ now = datetime.datetime.now()
+ if eviction_time <= now:
+ ChannelCache._singleton._evict_locked(key)
+ continue
+ else:
+ time_to_eviction = (eviction_time - now).total_seconds()
+ # NOTE: We aim to *eventually* coalesce to a state in
+ # which no overdue channels are in the cache and the
+ # length of the cache is longer than _MAXIMUM_CHANNELS.
+ # We tolerate momentary states in which these two
+ # criteria are not met.
+ ChannelCache._condition.wait(timeout=time_to_eviction)
+
+ def get_channel(self, target: str, options: Sequence[Tuple[str, str]],
+ channel_credentials: Optional[grpc.ChannelCredentials],
+ insecure: bool,
+ compression: Optional[grpc.Compression]) -> grpc.Channel:
+ if insecure and channel_credentials:
+ raise ValueError("The insecure option is mutually exclusive with " +
+ "the channel_credentials option. Please use one " +
+ "or the other.")
+ if insecure:
+ channel_credentials = grpc.experimental.insecure_channel_credentials(
+ )
+ elif channel_credentials is None:
+ _LOGGER.debug("Defaulting to SSL channel credentials.")
+ channel_credentials = grpc.ssl_channel_credentials()
+ key = (target, options, channel_credentials, compression)
+ with self._lock:
+ channel_data = self._mapping.get(key, None)
+ if channel_data is not None:
+ channel = channel_data[0]
+ self._mapping.pop(key)
+ self._mapping[key] = (channel, datetime.datetime.now() +
+ _EVICTION_PERIOD)
+ return channel
+ else:
+ channel = _create_channel(target, options, channel_credentials,
+ compression)
+ self._mapping[key] = (channel, datetime.datetime.now() +
+ _EVICTION_PERIOD)
+ if len(self._mapping) == 1 or len(
+ self._mapping) >= _MAXIMUM_CHANNELS:
+ self._condition.notify()
+ return channel
+
+ def _test_only_channel_count(self) -> int:
+ with self._lock:
+ return len(self._mapping)
+
+
+@experimental_api
+def unary_unary(
+ request: RequestType,
+ target: str,
+ method: str,
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
+ insecure: bool = False,
+ call_credentials: Optional[grpc.CallCredentials] = None,
+ compression: Optional[grpc.Compression] = None,
+ wait_for_ready: Optional[bool] = None,
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
+) -> ResponseType:
+ """Invokes a unary-unary RPC without an explicitly specified channel.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ This is backed by a per-process cache of channels. Channels are evicted
+ from the cache after a fixed period by a background. Channels will also be
+ evicted if more than a configured maximum accumulate.
+
+ The default eviction period is 10 minutes. One may set the environment
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
+
+ The default maximum number of channels is 256. One may set the
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
+ this.
+
+ Args:
+ request: An iterator that yields request values for the RPC.
+ target: The server address.
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
+ message. Response goes undeserialized in case None is passed.
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
+ runtime) to configure the channel.
+ channel_credentials: A credential applied to the whole channel, e.g. the
+ return value of grpc.ssl_channel_credentials() or
+ grpc.insecure_channel_credentials().
+ insecure: If True, specifies channel_credentials as
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
+ exclusive with the `channel_credentials` option.
+ call_credentials: A call credential applied to each call individually,
+ e.g. the output of grpc.metadata_call_credentials() or
+ grpc.access_token_call_credentials().
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
+ wait_for_ready: An optional flag indicating whether the RPC should fail
+ immediately if the connection is not ready at the time the RPC is
+ invoked, or if it should wait until the connection to the server
+ becomes ready. When using this option, the user will likely also want
+ to set a timeout. Defaults to True.
+ timeout: An optional duration of time in seconds to allow for the RPC,
+ after which an exception will be raised. If timeout is unspecified,
+ defaults to a timeout controlled by the
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
+ no timeout should be enforced.
+ metadata: Optional metadata to send to the server.
+
+ Returns:
+ The response to the RPC.
+ """
+ channel = ChannelCache.get().get_channel(target, options,
+ channel_credentials, insecure,
+ compression)
+ multicallable = channel.unary_unary(method, request_serializer,
+ response_deserializer)
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
+ return multicallable(request,
+ metadata=metadata,
+ wait_for_ready=wait_for_ready,
+ credentials=call_credentials,
+ timeout=timeout)
+
+
+@experimental_api
+def unary_stream(
+ request: RequestType,
+ target: str,
+ method: str,
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
+ insecure: bool = False,
+ call_credentials: Optional[grpc.CallCredentials] = None,
+ compression: Optional[grpc.Compression] = None,
+ wait_for_ready: Optional[bool] = None,
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
+) -> Iterator[ResponseType]:
+ """Invokes a unary-stream RPC without an explicitly specified channel.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ This is backed by a per-process cache of channels. Channels are evicted
+ from the cache after a fixed period by a background. Channels will also be
+ evicted if more than a configured maximum accumulate.
+
+ The default eviction period is 10 minutes. One may set the environment
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
+
+ The default maximum number of channels is 256. One may set the
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
+ this.
+
+ Args:
+ request: An iterator that yields request values for the RPC.
+ target: The server address.
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
+ message. Response goes undeserialized in case None is passed.
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
+ runtime) to configure the channel.
+ channel_credentials: A credential applied to the whole channel, e.g. the
+ return value of grpc.ssl_channel_credentials().
+ insecure: If True, specifies channel_credentials as
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
+ exclusive with the `channel_credentials` option.
+ call_credentials: A call credential applied to each call individually,
+ e.g. the output of grpc.metadata_call_credentials() or
+ grpc.access_token_call_credentials().
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
+ wait_for_ready: An optional flag indicating whether the RPC should fail
+ immediately if the connection is not ready at the time the RPC is
+ invoked, or if it should wait until the connection to the server
+ becomes ready. When using this option, the user will likely also want
+ to set a timeout. Defaults to True.
+ timeout: An optional duration of time in seconds to allow for the RPC,
+ after which an exception will be raised. If timeout is unspecified,
+ defaults to a timeout controlled by the
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
+ no timeout should be enforced.
+ metadata: Optional metadata to send to the server.
+
+ Returns:
+ An iterator of responses.
+ """
+ channel = ChannelCache.get().get_channel(target, options,
+ channel_credentials, insecure,
+ compression)
+ multicallable = channel.unary_stream(method, request_serializer,
+ response_deserializer)
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
+ return multicallable(request,
+ metadata=metadata,
+ wait_for_ready=wait_for_ready,
+ credentials=call_credentials,
+ timeout=timeout)
+
+
+@experimental_api
+def stream_unary(
+ request_iterator: Iterator[RequestType],
+ target: str,
+ method: str,
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
+ insecure: bool = False,
+ call_credentials: Optional[grpc.CallCredentials] = None,
+ compression: Optional[grpc.Compression] = None,
+ wait_for_ready: Optional[bool] = None,
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
+) -> ResponseType:
+ """Invokes a stream-unary RPC without an explicitly specified channel.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ This is backed by a per-process cache of channels. Channels are evicted
+ from the cache after a fixed period by a background. Channels will also be
+ evicted if more than a configured maximum accumulate.
+
+ The default eviction period is 10 minutes. One may set the environment
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
+
+ The default maximum number of channels is 256. One may set the
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
+ this.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ target: The server address.
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
+ message. Response goes undeserialized in case None is passed.
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
+ runtime) to configure the channel.
+ channel_credentials: A credential applied to the whole channel, e.g. the
+ return value of grpc.ssl_channel_credentials().
+ call_credentials: A call credential applied to each call individually,
+ e.g. the output of grpc.metadata_call_credentials() or
+ grpc.access_token_call_credentials().
+ insecure: If True, specifies channel_credentials as
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
+ exclusive with the `channel_credentials` option.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
+ wait_for_ready: An optional flag indicating whether the RPC should fail
+ immediately if the connection is not ready at the time the RPC is
+ invoked, or if it should wait until the connection to the server
+ becomes ready. When using this option, the user will likely also want
+ to set a timeout. Defaults to True.
+ timeout: An optional duration of time in seconds to allow for the RPC,
+ after which an exception will be raised. If timeout is unspecified,
+ defaults to a timeout controlled by the
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
+ no timeout should be enforced.
+ metadata: Optional metadata to send to the server.
+
+ Returns:
+ The response to the RPC.
+ """
+ channel = ChannelCache.get().get_channel(target, options,
+ channel_credentials, insecure,
+ compression)
+ multicallable = channel.stream_unary(method, request_serializer,
+ response_deserializer)
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
+ return multicallable(request_iterator,
+ metadata=metadata,
+ wait_for_ready=wait_for_ready,
+ credentials=call_credentials,
+ timeout=timeout)
+
+
+@experimental_api
+def stream_stream(
+ request_iterator: Iterator[RequestType],
+ target: str,
+ method: str,
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
+ insecure: bool = False,
+ call_credentials: Optional[grpc.CallCredentials] = None,
+ compression: Optional[grpc.Compression] = None,
+ wait_for_ready: Optional[bool] = None,
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
+) -> Iterator[ResponseType]:
+ """Invokes a stream-stream RPC without an explicitly specified channel.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ This is backed by a per-process cache of channels. Channels are evicted
+ from the cache after a fixed period by a background. Channels will also be
+ evicted if more than a configured maximum accumulate.
+
+ The default eviction period is 10 minutes. One may set the environment
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
+
+ The default maximum number of channels is 256. One may set the
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
+ this.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ target: The server address.
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
+ message. Response goes undeserialized in case None is passed.
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
+ runtime) to configure the channel.
+ channel_credentials: A credential applied to the whole channel, e.g. the
+ return value of grpc.ssl_channel_credentials().
+ call_credentials: A call credential applied to each call individually,
+ e.g. the output of grpc.metadata_call_credentials() or
+ grpc.access_token_call_credentials().
+ insecure: If True, specifies channel_credentials as
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
+ exclusive with the `channel_credentials` option.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
+ wait_for_ready: An optional flag indicating whether the RPC should fail
+ immediately if the connection is not ready at the time the RPC is
+ invoked, or if it should wait until the connection to the server
+ becomes ready. When using this option, the user will likely also want
+ to set a timeout. Defaults to True.
+ timeout: An optional duration of time in seconds to allow for the RPC,
+ after which an exception will be raised. If timeout is unspecified,
+ defaults to a timeout controlled by the
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
+ no timeout should be enforced.
+ metadata: Optional metadata to send to the server.
+
+ Returns:
+ An iterator of responses.
+ """
+ channel = ChannelCache.get().get_channel(target, options,
+ channel_credentials, insecure,
+ compression)
+ multicallable = channel.stream_stream(method, request_serializer,
+ response_deserializer)
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
+ return multicallable(request_iterator,
+ metadata=metadata,
+ wait_for_ready=wait_for_ready,
+ credentials=call_credentials,
+ timeout=timeout)
diff --git a/venv/Lib/site-packages/grpc/_utilities.py b/venv/Lib/site-packages/grpc/_utilities.py
new file mode 100644
index 000000000..c48aaf60a
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/_utilities.py
@@ -0,0 +1,169 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Internal utilities for gRPC Python."""
+
+import collections
+import threading
+import time
+import logging
+
+import six
+
+import grpc
+from grpc import _common
+
+_LOGGER = logging.getLogger(__name__)
+
+_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
+ 'Exception calling connectivity future "done" callback!')
+
+
+class RpcMethodHandler(
+ collections.namedtuple('_RpcMethodHandler', (
+ 'request_streaming',
+ 'response_streaming',
+ 'request_deserializer',
+ 'response_serializer',
+ 'unary_unary',
+ 'unary_stream',
+ 'stream_unary',
+ 'stream_stream',
+ )), grpc.RpcMethodHandler):
+ pass
+
+
+class DictionaryGenericHandler(grpc.ServiceRpcHandler):
+
+ def __init__(self, service, method_handlers):
+ self._name = service
+ self._method_handlers = {
+ _common.fully_qualified_method(service, method): method_handler
+ for method, method_handler in six.iteritems(method_handlers)
+ }
+
+ def service_name(self):
+ return self._name
+
+ def service(self, handler_call_details):
+ return self._method_handlers.get(handler_call_details.method)
+
+
+class _ChannelReadyFuture(grpc.Future):
+
+ def __init__(self, channel):
+ self._condition = threading.Condition()
+ self._channel = channel
+
+ self._matured = False
+ self._cancelled = False
+ self._done_callbacks = []
+
+ def _block(self, timeout):
+ until = None if timeout is None else time.time() + timeout
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise grpc.FutureCancelledError()
+ elif self._matured:
+ return
+ else:
+ if until is None:
+ self._condition.wait()
+ else:
+ remaining = until - time.time()
+ if remaining < 0:
+ raise grpc.FutureTimeoutError()
+ else:
+ self._condition.wait(timeout=remaining)
+
+ def _update(self, connectivity):
+ with self._condition:
+ if (not self._cancelled and
+ connectivity is grpc.ChannelConnectivity.READY):
+ self._matured = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return
+
+ for done_callback in done_callbacks:
+ try:
+ done_callback(self)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
+
+ def cancel(self):
+ with self._condition:
+ if not self._matured:
+ self._cancelled = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return False
+
+ for done_callback in done_callbacks:
+ try:
+ done_callback(self)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
+
+ return True
+
+ def cancelled(self):
+ with self._condition:
+ return self._cancelled
+
+ def running(self):
+ with self._condition:
+ return not self._cancelled and not self._matured
+
+ def done(self):
+ with self._condition:
+ return self._cancelled or self._matured
+
+ def result(self, timeout=None):
+ self._block(timeout)
+
+ def exception(self, timeout=None):
+ self._block(timeout)
+
+ def traceback(self, timeout=None):
+ self._block(timeout)
+
+ def add_done_callback(self, fn):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._done_callbacks.append(fn)
+ return
+
+ fn(self)
+
+ def start(self):
+ with self._condition:
+ self._channel.subscribe(self._update, try_to_connect=True)
+
+ def __del__(self):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._channel.unsubscribe(self._update)
+
+
+def channel_ready_future(channel):
+ ready_future = _ChannelReadyFuture(channel)
+ ready_future.start()
+ return ready_future
diff --git a/venv/Lib/site-packages/grpc/aio/__init__.py b/venv/Lib/site-packages/grpc/aio/__init__.py
new file mode 100644
index 000000000..2933aa5a4
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/__init__.py
@@ -0,0 +1,81 @@
+# Copyright 2019 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""gRPC's Asynchronous Python API.
+
+gRPC Async API objects may only be used on the thread on which they were
+created. AsyncIO doesn't provide thread safety for most of its APIs.
+"""
+
+from typing import Any, Optional, Sequence, Tuple
+
+import grpc
+from grpc._cython.cygrpc import (init_grpc_aio, shutdown_grpc_aio, EOF,
+ AbortError, BaseError, InternalError,
+ UsageError)
+
+from ._base_call import (Call, RpcContext, StreamStreamCall, StreamUnaryCall,
+ UnaryStreamCall, UnaryUnaryCall)
+from ._base_channel import (Channel, StreamStreamMultiCallable,
+ StreamUnaryMultiCallable, UnaryStreamMultiCallable,
+ UnaryUnaryMultiCallable)
+from ._call import AioRpcError
+from ._interceptor import (ClientCallDetails, ClientInterceptor,
+ InterceptedUnaryUnaryCall,
+ UnaryUnaryClientInterceptor,
+ UnaryStreamClientInterceptor,
+ StreamUnaryClientInterceptor,
+ StreamStreamClientInterceptor, ServerInterceptor)
+from ._server import server
+from ._base_server import Server, ServicerContext
+from ._typing import ChannelArgumentType
+from ._channel import insecure_channel, secure_channel
+from ._metadata import Metadata
+
+################################### __all__ #################################
+
+__all__ = (
+ 'init_grpc_aio',
+ 'shutdown_grpc_aio',
+ 'AioRpcError',
+ 'RpcContext',
+ 'Call',
+ 'UnaryUnaryCall',
+ 'UnaryStreamCall',
+ 'StreamUnaryCall',
+ 'StreamStreamCall',
+ 'Channel',
+ 'UnaryUnaryMultiCallable',
+ 'UnaryStreamMultiCallable',
+ 'StreamUnaryMultiCallable',
+ 'StreamStreamMultiCallable',
+ 'ClientCallDetails',
+ 'ClientInterceptor',
+ 'UnaryStreamClientInterceptor',
+ 'UnaryUnaryClientInterceptor',
+ 'StreamUnaryClientInterceptor',
+ 'StreamStreamClientInterceptor',
+ 'InterceptedUnaryUnaryCall',
+ 'ServerInterceptor',
+ 'insecure_channel',
+ 'server',
+ 'Server',
+ 'ServicerContext',
+ 'EOF',
+ 'secure_channel',
+ 'AbortError',
+ 'BaseError',
+ 'UsageError',
+ 'InternalError',
+ 'Metadata',
+)
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..5cf57d5af
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_base_call.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_base_call.cpython-36.pyc
new file mode 100644
index 000000000..99d71be21
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_base_call.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_base_channel.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_base_channel.cpython-36.pyc
new file mode 100644
index 000000000..20ee02c27
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_base_channel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_base_server.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_base_server.cpython-36.pyc
new file mode 100644
index 000000000..b3d681821
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_base_server.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_call.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_call.cpython-36.pyc
new file mode 100644
index 000000000..edcdc78c3
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_call.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_channel.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_channel.cpython-36.pyc
new file mode 100644
index 000000000..2d5db0839
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_channel.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_interceptor.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_interceptor.cpython-36.pyc
new file mode 100644
index 000000000..81a369d50
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_interceptor.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_metadata.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_metadata.cpython-36.pyc
new file mode 100644
index 000000000..be7d31b30
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_metadata.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_server.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_server.cpython-36.pyc
new file mode 100644
index 000000000..74125cb67
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_server.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_typing.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_typing.cpython-36.pyc
new file mode 100644
index 000000000..2343485ae
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_typing.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/__pycache__/_utils.cpython-36.pyc b/venv/Lib/site-packages/grpc/aio/__pycache__/_utils.cpython-36.pyc
new file mode 100644
index 000000000..b48a642ee
Binary files /dev/null and b/venv/Lib/site-packages/grpc/aio/__pycache__/_utils.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/aio/_base_call.py b/venv/Lib/site-packages/grpc/aio/_base_call.py
new file mode 100644
index 000000000..4ccbb3be1
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_base_call.py
@@ -0,0 +1,244 @@
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Abstract base classes for client-side Call objects.
+
+Call objects represents the RPC itself, and offer methods to access / modify
+its information. They also offer methods to manipulate the life-cycle of the
+RPC, e.g. cancellation.
+"""
+
+from abc import ABCMeta, abstractmethod
+from typing import AsyncIterable, Awaitable, Generic, Optional, Union
+
+import grpc
+
+from ._typing import (DoneCallbackType, EOFType, RequestType, ResponseType)
+from ._metadata import Metadata
+
+__all__ = 'RpcContext', 'Call', 'UnaryUnaryCall', 'UnaryStreamCall'
+
+
+class RpcContext(metaclass=ABCMeta):
+ """Provides RPC-related information and action."""
+
+ @abstractmethod
+ def cancelled(self) -> bool:
+ """Return True if the RPC is cancelled.
+
+ The RPC is cancelled when the cancellation was requested with cancel().
+
+ Returns:
+ A bool indicates whether the RPC is cancelled or not.
+ """
+
+ @abstractmethod
+ def done(self) -> bool:
+ """Return True if the RPC is done.
+
+ An RPC is done if the RPC is completed, cancelled or aborted.
+
+ Returns:
+ A bool indicates if the RPC is done.
+ """
+
+ @abstractmethod
+ def time_remaining(self) -> Optional[float]:
+ """Describes the length of allowed time remaining for the RPC.
+
+ Returns:
+ A nonnegative float indicating the length of allowed time in seconds
+ remaining for the RPC to complete before it is considered to have
+ timed out, or None if no deadline was specified for the RPC.
+ """
+
+ @abstractmethod
+ def cancel(self) -> bool:
+ """Cancels the RPC.
+
+ Idempotent and has no effect if the RPC has already terminated.
+
+ Returns:
+ A bool indicates if the cancellation is performed or not.
+ """
+
+ @abstractmethod
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
+ """Registers a callback to be called on RPC termination.
+
+ Args:
+ callback: A callable object will be called with the call object as
+ its only argument.
+ """
+
+
+class Call(RpcContext, metaclass=ABCMeta):
+ """The abstract base class of an RPC on the client-side."""
+
+ @abstractmethod
+ async def initial_metadata(self) -> Metadata:
+ """Accesses the initial metadata sent by the server.
+
+ Returns:
+ The initial :term:`metadata`.
+ """
+
+ @abstractmethod
+ async def trailing_metadata(self) -> Metadata:
+ """Accesses the trailing metadata sent by the server.
+
+ Returns:
+ The trailing :term:`metadata`.
+ """
+
+ @abstractmethod
+ async def code(self) -> grpc.StatusCode:
+ """Accesses the status code sent by the server.
+
+ Returns:
+ The StatusCode value for the RPC.
+ """
+
+ @abstractmethod
+ async def details(self) -> str:
+ """Accesses the details sent by the server.
+
+ Returns:
+ The details string of the RPC.
+ """
+
+ @abstractmethod
+ async def wait_for_connection(self) -> None:
+ """Waits until connected to peer and raises aio.AioRpcError if failed.
+
+ This is an EXPERIMENTAL method.
+
+ This method ensures the RPC has been successfully connected. Otherwise,
+ an AioRpcError will be raised to explain the reason of the connection
+ failure.
+
+ This method is recommended for building retry mechanisms.
+ """
+
+
+class UnaryUnaryCall(Generic[RequestType, ResponseType],
+ Call,
+ metaclass=ABCMeta):
+ """The abstract base class of an unary-unary RPC on the client-side."""
+
+ @abstractmethod
+ def __await__(self) -> Awaitable[ResponseType]:
+ """Await the response message to be ready.
+
+ Returns:
+ The response message of the RPC.
+ """
+
+
+class UnaryStreamCall(Generic[RequestType, ResponseType],
+ Call,
+ metaclass=ABCMeta):
+
+ @abstractmethod
+ def __aiter__(self) -> AsyncIterable[ResponseType]:
+ """Returns the async iterable representation that yields messages.
+
+ Under the hood, it is calling the "read" method.
+
+ Returns:
+ An async iterable object that yields messages.
+ """
+
+ @abstractmethod
+ async def read(self) -> Union[EOFType, ResponseType]:
+ """Reads one message from the stream.
+
+ Read operations must be serialized when called from multiple
+ coroutines.
+
+ Returns:
+ A response message, or an `grpc.aio.EOF` to indicate the end of the
+ stream.
+ """
+
+
+class StreamUnaryCall(Generic[RequestType, ResponseType],
+ Call,
+ metaclass=ABCMeta):
+
+ @abstractmethod
+ async def write(self, request: RequestType) -> None:
+ """Writes one message to the stream.
+
+ Raises:
+ An RpcError exception if the write failed.
+ """
+
+ @abstractmethod
+ async def done_writing(self) -> None:
+ """Notifies server that the client is done sending messages.
+
+ After done_writing is called, any additional invocation to the write
+ function will fail. This function is idempotent.
+ """
+
+ @abstractmethod
+ def __await__(self) -> Awaitable[ResponseType]:
+ """Await the response message to be ready.
+
+ Returns:
+ The response message of the stream.
+ """
+
+
+class StreamStreamCall(Generic[RequestType, ResponseType],
+ Call,
+ metaclass=ABCMeta):
+
+ @abstractmethod
+ def __aiter__(self) -> AsyncIterable[ResponseType]:
+ """Returns the async iterable representation that yields messages.
+
+ Under the hood, it is calling the "read" method.
+
+ Returns:
+ An async iterable object that yields messages.
+ """
+
+ @abstractmethod
+ async def read(self) -> Union[EOFType, ResponseType]:
+ """Reads one message from the stream.
+
+ Read operations must be serialized when called from multiple
+ coroutines.
+
+ Returns:
+ A response message, or an `grpc.aio.EOF` to indicate the end of the
+ stream.
+ """
+
+ @abstractmethod
+ async def write(self, request: RequestType) -> None:
+ """Writes one message to the stream.
+
+ Raises:
+ An RpcError exception if the write failed.
+ """
+
+ @abstractmethod
+ async def done_writing(self) -> None:
+ """Notifies server that the client is done sending messages.
+
+ After done_writing is called, any additional invocation to the write
+ function will fail. This function is idempotent.
+ """
diff --git a/venv/Lib/site-packages/grpc/aio/_base_channel.py b/venv/Lib/site-packages/grpc/aio/_base_channel.py
new file mode 100644
index 000000000..4b4ea1355
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_base_channel.py
@@ -0,0 +1,347 @@
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Abstract base classes for Channel objects and Multicallable objects."""
+
+import abc
+from typing import Any, Optional
+
+import grpc
+
+from . import _base_call
+from ._typing import (DeserializingFunction, RequestIterableType,
+ SerializingFunction)
+from ._metadata import Metadata
+
+
+class UnaryUnaryMultiCallable(abc.ABC):
+ """Enables asynchronous invocation of a unary-call RPC."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request: Any,
+ *,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.UnaryUnaryCall:
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: An optional duration of time in seconds to allow
+ for the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ A UnaryUnaryCall object.
+
+ Raises:
+ RpcError: Indicates that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+
+
+class UnaryStreamMultiCallable(abc.ABC):
+ """Enables asynchronous invocation of a server-streaming RPC."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request: Any,
+ *,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.UnaryStreamCall:
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: An optional duration of time in seconds to allow
+ for the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ A UnaryStreamCall object.
+
+ Raises:
+ RpcError: Indicates that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+
+
+class StreamUnaryMultiCallable(abc.ABC):
+ """Enables asynchronous invocation of a client-streaming RPC."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator: Optional[RequestIterableType] = None,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.StreamUnaryCall:
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An optional async iterable or iterable of request
+ messages for the RPC.
+ timeout: An optional duration of time in seconds to allow
+ for the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ A StreamUnaryCall object.
+
+ Raises:
+ RpcError: Indicates that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+
+
+class StreamStreamMultiCallable(abc.ABC):
+ """Enables asynchronous invocation of a bidirectional-streaming RPC."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator: Optional[RequestIterableType] = None,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.StreamStreamCall:
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An optional async iterable or iterable of request
+ messages for the RPC.
+ timeout: An optional duration of time in seconds to allow
+ for the RPC.
+ metadata: Optional :term:`metadata` to be transmitted to the
+ service-side of the RPC.
+ credentials: An optional CallCredentials for the RPC. Only valid for
+ secure Channel.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This is an EXPERIMENTAL option.
+
+ Returns:
+ A StreamStreamCall object.
+
+ Raises:
+ RpcError: Indicates that the RPC terminated with non-OK status. The
+ raised RpcError will also be a Call for the RPC affording the RPC's
+ metadata, status code, and details.
+ """
+
+
+class Channel(abc.ABC):
+ """Enables asynchronous RPC invocation as a client.
+
+ Channel objects implement the Asynchronous Context Manager (aka. async
+ with) type, although they are not supportted to be entered and exited
+ multiple times.
+ """
+
+ @abc.abstractmethod
+ async def __aenter__(self):
+ """Starts an asynchronous context manager.
+
+ Returns:
+ Channel the channel that was instantiated.
+ """
+
+ @abc.abstractmethod
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ """Finishes the asynchronous context manager by closing the channel.
+
+ Still active RPCs will be cancelled.
+ """
+
+ @abc.abstractmethod
+ async def close(self, grace: Optional[float] = None):
+ """Closes this Channel and releases all resources held by it.
+
+ This method immediately stops the channel from executing new RPCs in
+ all cases.
+
+ If a grace period is specified, this method wait until all active
+ RPCs are finshed, once the grace period is reached the ones that haven't
+ been terminated are cancelled. If a grace period is not specified
+ (by passing None for grace), all existing RPCs are cancelled immediately.
+
+ This method is idempotent.
+ """
+
+ @abc.abstractmethod
+ def get_state(self,
+ try_to_connect: bool = False) -> grpc.ChannelConnectivity:
+ """Checks the connectivity state of a channel.
+
+ This is an EXPERIMENTAL API.
+
+ If the channel reaches a stable connectivity state, it is guaranteed
+ that the return value of this function will eventually converge to that
+ state.
+
+ Args:
+ try_to_connect: a bool indicate whether the Channel should try to
+ connect to peer or not.
+
+ Returns: A ChannelConnectivity object.
+ """
+
+ @abc.abstractmethod
+ async def wait_for_state_change(
+ self,
+ last_observed_state: grpc.ChannelConnectivity,
+ ) -> None:
+ """Waits for a change in connectivity state.
+
+ This is an EXPERIMENTAL API.
+
+ The function blocks until there is a change in the channel connectivity
+ state from the "last_observed_state". If the state is already
+ different, this function will return immediately.
+
+ There is an inherent race between the invocation of
+ "Channel.wait_for_state_change" and "Channel.get_state". The state can
+ change arbitrary many times during the race, so there is no way to
+ observe every state transition.
+
+ If there is a need to put a timeout for this function, please refer to
+ "asyncio.wait_for".
+
+ Args:
+ last_observed_state: A grpc.ChannelConnectivity object representing
+ the last known state.
+ """
+
+ @abc.abstractmethod
+ async def channel_ready(self) -> None:
+ """Creates a coroutine that blocks until the Channel is READY."""
+
+ @abc.abstractmethod
+ def unary_unary(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> UnaryUnaryMultiCallable:
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None
+ is passed.
+
+ Returns:
+ A UnaryUnaryMultiCallable value for the named unary-unary method.
+ """
+
+ @abc.abstractmethod
+ def unary_stream(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> UnaryStreamMultiCallable:
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None
+ is passed.
+
+ Returns:
+ A UnarySteramMultiCallable value for the named unary-stream method.
+ """
+
+ @abc.abstractmethod
+ def stream_unary(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> StreamUnaryMultiCallable:
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None
+ is passed.
+
+ Returns:
+ A StreamUnaryMultiCallable value for the named stream-unary method.
+ """
+
+ @abc.abstractmethod
+ def stream_stream(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> StreamStreamMultiCallable:
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
+
+ Args:
+ method: The name of the RPC method.
+ request_serializer: Optional :term:`serializer` for serializing the request
+ message. Request goes unserialized in case None is passed.
+ response_deserializer: Optional :term:`deserializer` for deserializing the
+ response message. Response goes undeserialized in case None
+ is passed.
+
+ Returns:
+ A StreamStreamMultiCallable value for the named stream-stream method.
+ """
diff --git a/venv/Lib/site-packages/grpc/aio/_base_server.py b/venv/Lib/site-packages/grpc/aio/_base_server.py
new file mode 100644
index 000000000..926c86517
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_base_server.py
@@ -0,0 +1,294 @@
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Abstract base classes for server-side classes."""
+
+import abc
+from typing import Generic, Mapping, Optional, Iterable, Sequence
+
+import grpc
+
+from ._typing import RequestType, ResponseType
+from ._metadata import Metadata
+
+
+class Server(abc.ABC):
+ """Serves RPCs."""
+
+ @abc.abstractmethod
+ def add_generic_rpc_handlers(
+ self,
+ generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]) -> None:
+ """Registers GenericRpcHandlers with this Server.
+
+ This method is only safe to call before the server is started.
+
+ Args:
+ generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
+ used to service RPCs.
+ """
+
+ @abc.abstractmethod
+ def add_insecure_port(self, address: str) -> int:
+ """Opens an insecure port for accepting RPCs.
+
+ A port is a communication endpoint that used by networking protocols,
+ like TCP and UDP. To date, we only support TCP.
+
+ This method may only be called before starting the server.
+
+ Args:
+ address: The address for which to open a port. If the port is 0,
+ or not specified in the address, then the gRPC runtime will choose a port.
+
+ Returns:
+ An integer port on which the server will accept RPC requests.
+ """
+
+ @abc.abstractmethod
+ def add_secure_port(self, address: str,
+ server_credentials: grpc.ServerCredentials) -> int:
+ """Opens a secure port for accepting RPCs.
+
+ A port is a communication endpoint that used by networking protocols,
+ like TCP and UDP. To date, we only support TCP.
+
+ This method may only be called before starting the server.
+
+ Args:
+ address: The address for which to open a port.
+ if the port is 0, or not specified in the address, then the gRPC
+ runtime will choose a port.
+ server_credentials: A ServerCredentials object.
+
+ Returns:
+ An integer port on which the server will accept RPC requests.
+ """
+
+ @abc.abstractmethod
+ async def start(self) -> None:
+ """Starts this Server.
+
+ This method may only be called once. (i.e. it is not idempotent).
+ """
+
+ @abc.abstractmethod
+ async def stop(self, grace: Optional[float]) -> None:
+ """Stops this Server.
+
+ This method immediately stops the server from servicing new RPCs in
+ all cases.
+
+ If a grace period is specified, this method returns immediately and all
+ RPCs active at the end of the grace period are aborted. If a grace
+ period is not specified (by passing None for grace), all existing RPCs
+ are aborted immediately and this method blocks until the last RPC
+ handler terminates.
+
+ This method is idempotent and may be called at any time. Passing a
+ smaller grace value in a subsequent call will have the effect of
+ stopping the Server sooner (passing None will have the effect of
+ stopping the server immediately). Passing a larger grace value in a
+ subsequent call will not have the effect of stopping the server later
+ (i.e. the most restrictive grace value is used).
+
+ Args:
+ grace: A duration of time in seconds or None.
+ """
+
+ @abc.abstractmethod
+ async def wait_for_termination(self,
+ timeout: Optional[float] = None) -> bool:
+ """Continues current coroutine once the server stops.
+
+ This is an EXPERIMENTAL API.
+
+ The wait will not consume computational resources during blocking, and
+ it will block until one of the two following conditions are met:
+
+ 1) The server is stopped or terminated;
+ 2) A timeout occurs if timeout is not `None`.
+
+ The timeout argument works in the same way as `threading.Event.wait()`.
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
+
+ Args:
+ timeout: A floating point number specifying a timeout for the
+ operation in seconds.
+
+ Returns:
+ A bool indicates if the operation times out.
+ """
+
+
+class ServicerContext(Generic[RequestType, ResponseType], abc.ABC):
+ """A context object passed to method implementations."""
+
+ @abc.abstractmethod
+ async def read(self) -> RequestType:
+ """Reads one message from the RPC.
+
+ Only one read operation is allowed simultaneously.
+
+ Returns:
+ A response message of the RPC.
+
+ Raises:
+ An RpcError exception if the read failed.
+ """
+
+ @abc.abstractmethod
+ async def write(self, message: ResponseType) -> None:
+ """Writes one message to the RPC.
+
+ Only one write operation is allowed simultaneously.
+
+ Raises:
+ An RpcError exception if the write failed.
+ """
+
+ @abc.abstractmethod
+ async def send_initial_metadata(self, initial_metadata: Metadata) -> None:
+ """Sends the initial metadata value to the client.
+
+ This method need not be called by implementations if they have no
+ metadata to add to what the gRPC runtime will transmit.
+
+ Args:
+ initial_metadata: The initial :term:`metadata`.
+ """
+
+ @abc.abstractmethod
+ async def abort(self, code: grpc.StatusCode, details: str,
+ trailing_metadata: Metadata) -> None:
+ """Raises an exception to terminate the RPC with a non-OK status.
+
+ The code and details passed as arguments will supercede any existing
+ ones.
+
+ Args:
+ code: A StatusCode object to be sent to the client.
+ It must not be StatusCode.OK.
+ details: A UTF-8-encodable string to be sent to the client upon
+ termination of the RPC.
+ trailing_metadata: A sequence of tuple represents the trailing
+ :term:`metadata`.
+
+ Raises:
+ Exception: An exception is always raised to signal the abortion the
+ RPC to the gRPC runtime.
+ """
+
+ @abc.abstractmethod
+ async def set_trailing_metadata(self, trailing_metadata: Metadata) -> None:
+ """Sends the trailing metadata for the RPC.
+
+ This method need not be called by implementations if they have no
+ metadata to add to what the gRPC runtime will transmit.
+
+ Args:
+ trailing_metadata: The trailing :term:`metadata`.
+ """
+
+ @abc.abstractmethod
+ def invocation_metadata(self) -> Optional[Metadata]:
+ """Accesses the metadata from the sent by the client.
+
+ Returns:
+ The invocation :term:`metadata`.
+ """
+
+ @abc.abstractmethod
+ def set_code(self, code: grpc.StatusCode) -> None:
+ """Sets the value to be used as status code upon RPC completion.
+
+ This method need not be called by method implementations if they wish
+ the gRPC runtime to determine the status code of the RPC.
+
+ Args:
+ code: A StatusCode object to be sent to the client.
+ """
+
+ @abc.abstractmethod
+ def set_details(self, details: str) -> None:
+ """Sets the value to be used the as detail string upon RPC completion.
+
+ This method need not be called by method implementations if they have
+ no details to transmit.
+
+ Args:
+ details: A UTF-8-encodable string to be sent to the client upon
+ termination of the RPC.
+ """
+
+ @abc.abstractmethod
+ def set_compression(self, compression: grpc.Compression) -> None:
+ """Set the compression algorithm to be used for the entire call.
+
+ This is an EXPERIMENTAL method.
+
+ Args:
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip.
+ """
+
+ @abc.abstractmethod
+ def disable_next_message_compression(self) -> None:
+ """Disables compression for the next response message.
+
+ This is an EXPERIMENTAL method.
+
+ This method will override any compression configuration set during
+ server creation or set on the call.
+ """
+
+ @abc.abstractmethod
+ def peer(self) -> str:
+ """Identifies the peer that invoked the RPC being serviced.
+
+ Returns:
+ A string identifying the peer that invoked the RPC being serviced.
+ The string format is determined by gRPC runtime.
+ """
+
+ @abc.abstractmethod
+ def peer_identities(self) -> Optional[Iterable[bytes]]:
+ """Gets one or more peer identity(s).
+
+ Equivalent to
+ servicer_context.auth_context().get(servicer_context.peer_identity_key())
+
+ Returns:
+ An iterable of the identities, or None if the call is not
+ authenticated. Each identity is returned as a raw bytes type.
+ """
+
+ @abc.abstractmethod
+ def peer_identity_key(self) -> Optional[str]:
+ """The auth property used to identify the peer.
+
+ For example, "x509_common_name" or "x509_subject_alternative_name" are
+ used to identify an SSL peer.
+
+ Returns:
+ The auth property (string) that indicates the
+ peer identity, or None if the call is not authenticated.
+ """
+
+ @abc.abstractmethod
+ def auth_context(self) -> Mapping[str, Iterable[bytes]]:
+ """Gets the auth context for the call.
+
+ Returns:
+ A map of strings to an iterable of bytes for each auth property.
+ """
diff --git a/venv/Lib/site-packages/grpc/aio/_call.py b/venv/Lib/site-packages/grpc/aio/_call.py
new file mode 100644
index 000000000..ba229f35c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_call.py
@@ -0,0 +1,629 @@
+# Copyright 2019 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Invocation-side implementation of gRPC Asyncio Python."""
+
+import asyncio
+import enum
+import inspect
+import logging
+from functools import partial
+from typing import AsyncIterable, Optional, Tuple
+
+import grpc
+from grpc import _common
+from grpc._cython import cygrpc
+
+from . import _base_call
+from ._metadata import Metadata
+from ._typing import (DeserializingFunction, DoneCallbackType, MetadatumType,
+ RequestIterableType, RequestType, ResponseType,
+ SerializingFunction)
+
+__all__ = 'AioRpcError', 'Call', 'UnaryUnaryCall', 'UnaryStreamCall'
+
+_LOCAL_CANCELLATION_DETAILS = 'Locally cancelled by application!'
+_GC_CANCELLATION_DETAILS = 'Cancelled upon garbage collection!'
+_RPC_ALREADY_FINISHED_DETAILS = 'RPC already finished.'
+_RPC_HALF_CLOSED_DETAILS = 'RPC is half closed after calling "done_writing".'
+_API_STYLE_ERROR = 'The iterator and read/write APIs may not be mixed on a single RPC.'
+
+_OK_CALL_REPRESENTATION = ('<{} of RPC that terminated with:\n'
+ '\tstatus = {}\n'
+ '\tdetails = "{}"\n'
+ '>')
+
+_NON_OK_CALL_REPRESENTATION = ('<{} of RPC that terminated with:\n'
+ '\tstatus = {}\n'
+ '\tdetails = "{}"\n'
+ '\tdebug_error_string = "{}"\n'
+ '>')
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class AioRpcError(grpc.RpcError):
+ """An implementation of RpcError to be used by the asynchronous API.
+
+ Raised RpcError is a snapshot of the final status of the RPC, values are
+ determined. Hence, its methods no longer needs to be coroutines.
+ """
+
+ _code: grpc.StatusCode
+ _details: Optional[str]
+ _initial_metadata: Optional[Metadata]
+ _trailing_metadata: Optional[Metadata]
+ _debug_error_string: Optional[str]
+
+ def __init__(self,
+ code: grpc.StatusCode,
+ initial_metadata: Metadata,
+ trailing_metadata: Metadata,
+ details: Optional[str] = None,
+ debug_error_string: Optional[str] = None) -> None:
+ """Constructor.
+
+ Args:
+ code: The status code with which the RPC has been finalized.
+ details: Optional details explaining the reason of the error.
+ initial_metadata: Optional initial metadata that could be sent by the
+ Server.
+ trailing_metadata: Optional metadata that could be sent by the Server.
+ """
+
+ super().__init__(self)
+ self._code = code
+ self._details = details
+ self._initial_metadata = initial_metadata
+ self._trailing_metadata = trailing_metadata
+ self._debug_error_string = debug_error_string
+
+ def code(self) -> grpc.StatusCode:
+ """Accesses the status code sent by the server.
+
+ Returns:
+ The `grpc.StatusCode` status code.
+ """
+ return self._code
+
+ def details(self) -> Optional[str]:
+ """Accesses the details sent by the server.
+
+ Returns:
+ The description of the error.
+ """
+ return self._details
+
+ def initial_metadata(self) -> Metadata:
+ """Accesses the initial metadata sent by the server.
+
+ Returns:
+ The initial metadata received.
+ """
+ return self._initial_metadata
+
+ def trailing_metadata(self) -> Metadata:
+ """Accesses the trailing metadata sent by the server.
+
+ Returns:
+ The trailing metadata received.
+ """
+ return self._trailing_metadata
+
+ def debug_error_string(self) -> str:
+ """Accesses the debug error string sent by the server.
+
+ Returns:
+ The debug error string received.
+ """
+ return self._debug_error_string
+
+ def _repr(self) -> str:
+ """Assembles the error string for the RPC error."""
+ return _NON_OK_CALL_REPRESENTATION.format(self.__class__.__name__,
+ self._code, self._details,
+ self._debug_error_string)
+
+ def __repr__(self) -> str:
+ return self._repr()
+
+ def __str__(self) -> str:
+ return self._repr()
+
+
+def _create_rpc_error(initial_metadata: Metadata,
+ status: cygrpc.AioRpcStatus) -> AioRpcError:
+ return AioRpcError(
+ _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[status.code()],
+ Metadata.from_tuple(initial_metadata),
+ Metadata.from_tuple(status.trailing_metadata()),
+ details=status.details(),
+ debug_error_string=status.debug_error_string(),
+ )
+
+
+class Call:
+ """Base implementation of client RPC Call object.
+
+ Implements logic around final status, metadata and cancellation.
+ """
+ _loop: asyncio.AbstractEventLoop
+ _code: grpc.StatusCode
+ _cython_call: cygrpc._AioCall
+ _metadata: Tuple[MetadatumType]
+ _request_serializer: SerializingFunction
+ _response_deserializer: DeserializingFunction
+
+ def __init__(self, cython_call: cygrpc._AioCall, metadata: Metadata,
+ request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._cython_call = cython_call
+ self._metadata = tuple(metadata)
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __del__(self) -> None:
+ # The '_cython_call' object might be destructed before Call object
+ if hasattr(self, '_cython_call'):
+ if not self._cython_call.done():
+ self._cancel(_GC_CANCELLATION_DETAILS)
+
+ def cancelled(self) -> bool:
+ return self._cython_call.cancelled()
+
+ def _cancel(self, details: str) -> bool:
+ """Forwards the application cancellation reasoning."""
+ if not self._cython_call.done():
+ self._cython_call.cancel(details)
+ return True
+ else:
+ return False
+
+ def cancel(self) -> bool:
+ return self._cancel(_LOCAL_CANCELLATION_DETAILS)
+
+ def done(self) -> bool:
+ return self._cython_call.done()
+
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
+ cb = partial(callback, self)
+ self._cython_call.add_done_callback(cb)
+
+ def time_remaining(self) -> Optional[float]:
+ return self._cython_call.time_remaining()
+
+ async def initial_metadata(self) -> Metadata:
+ raw_metadata_tuple = await self._cython_call.initial_metadata()
+ return Metadata.from_tuple(raw_metadata_tuple)
+
+ async def trailing_metadata(self) -> Metadata:
+ raw_metadata_tuple = (await
+ self._cython_call.status()).trailing_metadata()
+ return Metadata.from_tuple(raw_metadata_tuple)
+
+ async def code(self) -> grpc.StatusCode:
+ cygrpc_code = (await self._cython_call.status()).code()
+ return _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[cygrpc_code]
+
+ async def details(self) -> str:
+ return (await self._cython_call.status()).details()
+
+ async def debug_error_string(self) -> str:
+ return (await self._cython_call.status()).debug_error_string()
+
+ async def _raise_for_status(self) -> None:
+ if self._cython_call.is_locally_cancelled():
+ raise asyncio.CancelledError()
+ code = await self.code()
+ if code != grpc.StatusCode.OK:
+ raise _create_rpc_error(await self.initial_metadata(), await
+ self._cython_call.status())
+
+ def _repr(self) -> str:
+ return repr(self._cython_call)
+
+ def __repr__(self) -> str:
+ return self._repr()
+
+ def __str__(self) -> str:
+ return self._repr()
+
+
+class _APIStyle(enum.IntEnum):
+ UNKNOWN = 0
+ ASYNC_GENERATOR = 1
+ READER_WRITER = 2
+
+
+class _UnaryResponseMixin(Call):
+ _call_response: asyncio.Task
+
+ def _init_unary_response_mixin(self, response_task: asyncio.Task):
+ self._call_response = response_task
+
+ def cancel(self) -> bool:
+ if super().cancel():
+ self._call_response.cancel()
+ return True
+ else:
+ return False
+
+ def __await__(self) -> ResponseType:
+ """Wait till the ongoing RPC request finishes."""
+ try:
+ response = yield from self._call_response
+ except asyncio.CancelledError:
+ # Even if we caught all other CancelledError, there is still
+ # this corner case. If the application cancels immediately after
+ # the Call object is created, we will observe this
+ # `CancelledError`.
+ if not self.cancelled():
+ self.cancel()
+ raise
+
+ # NOTE(lidiz) If we raise RpcError in the task, and users doesn't
+ # 'await' on it. AsyncIO will log 'Task exception was never retrieved'.
+ # Instead, if we move the exception raising here, the spam stops.
+ # Unfortunately, there can only be one 'yield from' in '__await__'. So,
+ # we need to access the private instance variable.
+ if response is cygrpc.EOF:
+ if self._cython_call.is_locally_cancelled():
+ raise asyncio.CancelledError()
+ else:
+ raise _create_rpc_error(self._cython_call._initial_metadata,
+ self._cython_call._status)
+ else:
+ return response
+
+
+class _StreamResponseMixin(Call):
+ _message_aiter: AsyncIterable[ResponseType]
+ _preparation: asyncio.Task
+ _response_style: _APIStyle
+
+ def _init_stream_response_mixin(self, preparation: asyncio.Task):
+ self._message_aiter = None
+ self._preparation = preparation
+ self._response_style = _APIStyle.UNKNOWN
+
+ def _update_response_style(self, style: _APIStyle):
+ if self._response_style is _APIStyle.UNKNOWN:
+ self._response_style = style
+ elif self._response_style is not style:
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
+
+ def cancel(self) -> bool:
+ if super().cancel():
+ self._preparation.cancel()
+ return True
+ else:
+ return False
+
+ async def _fetch_stream_responses(self) -> ResponseType:
+ message = await self._read()
+ while message is not cygrpc.EOF:
+ yield message
+ message = await self._read()
+
+ # If the read operation failed, Core should explain why.
+ await self._raise_for_status()
+
+ def __aiter__(self) -> AsyncIterable[ResponseType]:
+ self._update_response_style(_APIStyle.ASYNC_GENERATOR)
+ if self._message_aiter is None:
+ self._message_aiter = self._fetch_stream_responses()
+ return self._message_aiter
+
+ async def _read(self) -> ResponseType:
+ # Wait for the request being sent
+ await self._preparation
+
+ # Reads response message from Core
+ try:
+ raw_response = await self._cython_call.receive_serialized_message()
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+ await self._raise_for_status()
+
+ if raw_response is cygrpc.EOF:
+ return cygrpc.EOF
+ else:
+ return _common.deserialize(raw_response,
+ self._response_deserializer)
+
+ async def read(self) -> ResponseType:
+ if self.done():
+ await self._raise_for_status()
+ return cygrpc.EOF
+ self._update_response_style(_APIStyle.READER_WRITER)
+
+ response_message = await self._read()
+
+ if response_message is cygrpc.EOF:
+ # If the read operation failed, Core should explain why.
+ await self._raise_for_status()
+ return response_message
+
+
+class _StreamRequestMixin(Call):
+ _metadata_sent: asyncio.Event
+ _done_writing_flag: bool
+ _async_request_poller: Optional[asyncio.Task]
+ _request_style: _APIStyle
+
+ def _init_stream_request_mixin(
+ self, request_iterator: Optional[RequestIterableType]):
+ self._metadata_sent = asyncio.Event(loop=self._loop)
+ self._done_writing_flag = False
+
+ # If user passes in an async iterator, create a consumer Task.
+ if request_iterator is not None:
+ self._async_request_poller = self._loop.create_task(
+ self._consume_request_iterator(request_iterator))
+ self._request_style = _APIStyle.ASYNC_GENERATOR
+ else:
+ self._async_request_poller = None
+ self._request_style = _APIStyle.READER_WRITER
+
+ def _raise_for_different_style(self, style: _APIStyle):
+ if self._request_style is not style:
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
+
+ def cancel(self) -> bool:
+ if super().cancel():
+ if self._async_request_poller is not None:
+ self._async_request_poller.cancel()
+ return True
+ else:
+ return False
+
+ def _metadata_sent_observer(self):
+ self._metadata_sent.set()
+
+ async def _consume_request_iterator(self,
+ request_iterator: RequestIterableType
+ ) -> None:
+ try:
+ if inspect.isasyncgen(request_iterator) or hasattr(
+ request_iterator, '__aiter__'):
+ async for request in request_iterator:
+ await self._write(request)
+ else:
+ for request in request_iterator:
+ await self._write(request)
+
+ await self._done_writing()
+ except AioRpcError as rpc_error:
+ # Rpc status should be exposed through other API. Exceptions raised
+ # within this Task won't be retrieved by another coroutine. It's
+ # better to suppress the error than spamming users' screen.
+ _LOGGER.debug('Exception while consuming the request_iterator: %s',
+ rpc_error)
+
+ async def _write(self, request: RequestType) -> None:
+ if self.done():
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
+ if self._done_writing_flag:
+ raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
+ if not self._metadata_sent.is_set():
+ await self._metadata_sent.wait()
+ if self.done():
+ await self._raise_for_status()
+
+ serialized_request = _common.serialize(request,
+ self._request_serializer)
+ try:
+ await self._cython_call.send_serialized_message(serialized_request)
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+ await self._raise_for_status()
+
+ async def _done_writing(self) -> None:
+ if self.done():
+ # If the RPC is finished, do nothing.
+ return
+ if not self._done_writing_flag:
+ # If the done writing is not sent before, try to send it.
+ self._done_writing_flag = True
+ try:
+ await self._cython_call.send_receive_close()
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+ await self._raise_for_status()
+
+ async def write(self, request: RequestType) -> None:
+ self._raise_for_different_style(_APIStyle.READER_WRITER)
+ await self._write(request)
+
+ async def done_writing(self) -> None:
+ """Signal peer that client is done writing.
+
+ This method is idempotent.
+ """
+ self._raise_for_different_style(_APIStyle.READER_WRITER)
+ await self._done_writing()
+
+ async def wait_for_connection(self) -> None:
+ await self._metadata_sent.wait()
+ if self.done():
+ await self._raise_for_status()
+
+
+class UnaryUnaryCall(_UnaryResponseMixin, Call, _base_call.UnaryUnaryCall):
+ """Object for managing unary-unary RPC calls.
+
+ Returned when an instance of `UnaryUnaryMultiCallable` object is called.
+ """
+ _request: RequestType
+ _invocation_task: asyncio.Task
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, request: RequestType, deadline: Optional[float],
+ metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ super().__init__(
+ channel.call(method, deadline, credentials, wait_for_ready),
+ metadata, request_serializer, response_deserializer, loop)
+ self._request = request
+ self._invocation_task = loop.create_task(self._invoke())
+ self._init_unary_response_mixin(self._invocation_task)
+
+ async def _invoke(self) -> ResponseType:
+ serialized_request = _common.serialize(self._request,
+ self._request_serializer)
+
+ # NOTE(lidiz) asyncio.CancelledError is not a good transport for status,
+ # because the asyncio.Task class do not cache the exception object.
+ # https://github.com/python/cpython/blob/edad4d89e357c92f70c0324b937845d652b20afd/Lib/asyncio/tasks.py#L785
+ try:
+ serialized_response = await self._cython_call.unary_unary(
+ serialized_request, self._metadata)
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+
+ if self._cython_call.is_ok():
+ return _common.deserialize(serialized_response,
+ self._response_deserializer)
+ else:
+ return cygrpc.EOF
+
+ async def wait_for_connection(self) -> None:
+ await self._invocation_task
+ if self.done():
+ await self._raise_for_status()
+
+
+class UnaryStreamCall(_StreamResponseMixin, Call, _base_call.UnaryStreamCall):
+ """Object for managing unary-stream RPC calls.
+
+ Returned when an instance of `UnaryStreamMultiCallable` object is called.
+ """
+ _request: RequestType
+ _send_unary_request_task: asyncio.Task
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, request: RequestType, deadline: Optional[float],
+ metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ super().__init__(
+ channel.call(method, deadline, credentials, wait_for_ready),
+ metadata, request_serializer, response_deserializer, loop)
+ self._request = request
+ self._send_unary_request_task = loop.create_task(
+ self._send_unary_request())
+ self._init_stream_response_mixin(self._send_unary_request_task)
+
+ async def _send_unary_request(self) -> ResponseType:
+ serialized_request = _common.serialize(self._request,
+ self._request_serializer)
+ try:
+ await self._cython_call.initiate_unary_stream(
+ serialized_request, self._metadata)
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+ raise
+
+ async def wait_for_connection(self) -> None:
+ await self._send_unary_request_task
+ if self.done():
+ await self._raise_for_status()
+
+
+class StreamUnaryCall(_StreamRequestMixin, _UnaryResponseMixin, Call,
+ _base_call.StreamUnaryCall):
+ """Object for managing stream-unary RPC calls.
+
+ Returned when an instance of `StreamUnaryMultiCallable` object is called.
+ """
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, request_iterator: Optional[RequestIterableType],
+ deadline: Optional[float], metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ super().__init__(
+ channel.call(method, deadline, credentials, wait_for_ready),
+ metadata, request_serializer, response_deserializer, loop)
+
+ self._init_stream_request_mixin(request_iterator)
+ self._init_unary_response_mixin(loop.create_task(self._conduct_rpc()))
+
+ async def _conduct_rpc(self) -> ResponseType:
+ try:
+ serialized_response = await self._cython_call.stream_unary(
+ self._metadata, self._metadata_sent_observer)
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+
+ if self._cython_call.is_ok():
+ return _common.deserialize(serialized_response,
+ self._response_deserializer)
+ else:
+ return cygrpc.EOF
+
+
+class StreamStreamCall(_StreamRequestMixin, _StreamResponseMixin, Call,
+ _base_call.StreamStreamCall):
+ """Object for managing stream-stream RPC calls.
+
+ Returned when an instance of `StreamStreamMultiCallable` object is called.
+ """
+ _initializer: asyncio.Task
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, request_iterator: Optional[RequestIterableType],
+ deadline: Optional[float], metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ super().__init__(
+ channel.call(method, deadline, credentials, wait_for_ready),
+ metadata, request_serializer, response_deserializer, loop)
+ self._initializer = self._loop.create_task(self._prepare_rpc())
+ self._init_stream_request_mixin(request_iterator)
+ self._init_stream_response_mixin(self._initializer)
+
+ async def _prepare_rpc(self):
+ """This method prepares the RPC for receiving/sending messages.
+
+ All other operations around the stream should only happen after the
+ completion of this method.
+ """
+ try:
+ await self._cython_call.initiate_stream_stream(
+ self._metadata, self._metadata_sent_observer)
+ except asyncio.CancelledError:
+ if not self.cancelled():
+ self.cancel()
+ # No need to raise RpcError here, because no one will `await` this task.
diff --git a/venv/Lib/site-packages/grpc/aio/_channel.py b/venv/Lib/site-packages/grpc/aio/_channel.py
new file mode 100644
index 000000000..3af346770
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_channel.py
@@ -0,0 +1,469 @@
+# Copyright 2019 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Invocation-side implementation of gRPC Asyncio Python."""
+
+import asyncio
+import sys
+from typing import Any, Iterable, Optional, Sequence, List
+
+import grpc
+from grpc import _common, _compression, _grpcio_metadata
+from grpc._cython import cygrpc
+
+from . import _base_call, _base_channel
+from ._call import (StreamStreamCall, StreamUnaryCall, UnaryStreamCall,
+ UnaryUnaryCall)
+from ._interceptor import (
+ InterceptedUnaryUnaryCall, InterceptedUnaryStreamCall,
+ InterceptedStreamUnaryCall, InterceptedStreamStreamCall, ClientInterceptor,
+ UnaryUnaryClientInterceptor, UnaryStreamClientInterceptor,
+ StreamUnaryClientInterceptor, StreamStreamClientInterceptor)
+from ._metadata import Metadata
+from ._typing import (ChannelArgumentType, DeserializingFunction,
+ SerializingFunction, RequestIterableType)
+from ._utils import _timeout_to_deadline
+
+_USER_AGENT = 'grpc-python-asyncio/{}'.format(_grpcio_metadata.__version__)
+
+if sys.version_info[1] < 7:
+
+ def _all_tasks() -> Iterable[asyncio.Task]:
+ return asyncio.Task.all_tasks()
+else:
+
+ def _all_tasks() -> Iterable[asyncio.Task]:
+ return asyncio.all_tasks()
+
+
+def _augment_channel_arguments(base_options: ChannelArgumentType,
+ compression: Optional[grpc.Compression]):
+ compression_channel_argument = _compression.create_channel_option(
+ compression)
+ user_agent_channel_argument = ((
+ cygrpc.ChannelArgKey.primary_user_agent_string,
+ _USER_AGENT,
+ ),)
+ return tuple(base_options
+ ) + compression_channel_argument + user_agent_channel_argument
+
+
+class _BaseMultiCallable:
+ """Base class of all multi callable objects.
+
+ Handles the initialization logic and stores common attributes.
+ """
+ _loop: asyncio.AbstractEventLoop
+ _channel: cygrpc.AioChannel
+ _method: bytes
+ _request_serializer: SerializingFunction
+ _response_deserializer: DeserializingFunction
+ _interceptors: Optional[Sequence[ClientInterceptor]]
+ _loop: asyncio.AbstractEventLoop
+
+ # pylint: disable=too-many-arguments
+ def __init__(
+ self,
+ channel: cygrpc.AioChannel,
+ method: bytes,
+ request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ interceptors: Optional[Sequence[ClientInterceptor]],
+ loop: asyncio.AbstractEventLoop,
+ ) -> None:
+ self._loop = loop
+ self._channel = channel
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+ self._interceptors = interceptors
+
+ @staticmethod
+ def _init_metadata(metadata: Optional[Metadata] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> Metadata:
+ """Based on the provided values for or initialise the final
+ metadata, as it should be used for the current call.
+ """
+ metadata = metadata or Metadata()
+ if compression:
+ metadata = Metadata(
+ *_compression.augment_metadata(metadata, compression))
+ return metadata
+
+
+class UnaryUnaryMultiCallable(_BaseMultiCallable,
+ _base_channel.UnaryUnaryMultiCallable):
+
+ def __call__(self,
+ request: Any,
+ *,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.UnaryUnaryCall:
+
+ metadata = self._init_metadata(metadata, compression)
+ if not self._interceptors:
+ call = UnaryUnaryCall(request, _timeout_to_deadline(timeout),
+ metadata, credentials, wait_for_ready,
+ self._channel, self._method,
+ self._request_serializer,
+ self._response_deserializer, self._loop)
+ else:
+ call = InterceptedUnaryUnaryCall(
+ self._interceptors, request, timeout, metadata, credentials,
+ wait_for_ready, self._channel, self._method,
+ self._request_serializer, self._response_deserializer,
+ self._loop)
+
+ return call
+
+
+class UnaryStreamMultiCallable(_BaseMultiCallable,
+ _base_channel.UnaryStreamMultiCallable):
+
+ def __call__(self,
+ request: Any,
+ *,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.UnaryStreamCall:
+
+ metadata = self._init_metadata(metadata, compression)
+ deadline = _timeout_to_deadline(timeout)
+
+ if not self._interceptors:
+ call = UnaryStreamCall(request, deadline, metadata, credentials,
+ wait_for_ready, self._channel, self._method,
+ self._request_serializer,
+ self._response_deserializer, self._loop)
+ else:
+ call = InterceptedUnaryStreamCall(
+ self._interceptors, request, deadline, metadata, credentials,
+ wait_for_ready, self._channel, self._method,
+ self._request_serializer, self._response_deserializer,
+ self._loop)
+
+ return call
+
+
+class StreamUnaryMultiCallable(_BaseMultiCallable,
+ _base_channel.StreamUnaryMultiCallable):
+
+ def __call__(self,
+ request_iterator: Optional[RequestIterableType] = None,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.StreamUnaryCall:
+
+ metadata = self._init_metadata(metadata, compression)
+ deadline = _timeout_to_deadline(timeout)
+
+ if not self._interceptors:
+ call = StreamUnaryCall(request_iterator, deadline, metadata,
+ credentials, wait_for_ready, self._channel,
+ self._method, self._request_serializer,
+ self._response_deserializer, self._loop)
+ else:
+ call = InterceptedStreamUnaryCall(
+ self._interceptors, request_iterator, deadline, metadata,
+ credentials, wait_for_ready, self._channel, self._method,
+ self._request_serializer, self._response_deserializer,
+ self._loop)
+
+ return call
+
+
+class StreamStreamMultiCallable(_BaseMultiCallable,
+ _base_channel.StreamStreamMultiCallable):
+
+ def __call__(self,
+ request_iterator: Optional[RequestIterableType] = None,
+ timeout: Optional[float] = None,
+ metadata: Optional[Metadata] = None,
+ credentials: Optional[grpc.CallCredentials] = None,
+ wait_for_ready: Optional[bool] = None,
+ compression: Optional[grpc.Compression] = None
+ ) -> _base_call.StreamStreamCall:
+
+ metadata = self._init_metadata(metadata, compression)
+ deadline = _timeout_to_deadline(timeout)
+
+ if not self._interceptors:
+ call = StreamStreamCall(request_iterator, deadline, metadata,
+ credentials, wait_for_ready, self._channel,
+ self._method, self._request_serializer,
+ self._response_deserializer, self._loop)
+ else:
+ call = InterceptedStreamStreamCall(
+ self._interceptors, request_iterator, deadline, metadata,
+ credentials, wait_for_ready, self._channel, self._method,
+ self._request_serializer, self._response_deserializer,
+ self._loop)
+
+ return call
+
+
+class Channel(_base_channel.Channel):
+ _loop: asyncio.AbstractEventLoop
+ _channel: cygrpc.AioChannel
+ _unary_unary_interceptors: List[UnaryUnaryClientInterceptor]
+ _unary_stream_interceptors: List[UnaryStreamClientInterceptor]
+ _stream_unary_interceptors: List[StreamUnaryClientInterceptor]
+ _stream_stream_interceptors: List[StreamStreamClientInterceptor]
+
+ def __init__(self, target: str, options: ChannelArgumentType,
+ credentials: Optional[grpc.ChannelCredentials],
+ compression: Optional[grpc.Compression],
+ interceptors: Optional[Sequence[ClientInterceptor]]):
+ """Constructor.
+
+ Args:
+ target: The target to which to connect.
+ options: Configuration options for the channel.
+ credentials: A cygrpc.ChannelCredentials or None.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel.
+ interceptors: An optional list of interceptors that would be used for
+ intercepting any RPC executed with that channel.
+ """
+ self._unary_unary_interceptors = []
+ self._unary_stream_interceptors = []
+ self._stream_unary_interceptors = []
+ self._stream_stream_interceptors = []
+
+ if interceptors is not None:
+ for interceptor in interceptors:
+ if isinstance(interceptor, UnaryUnaryClientInterceptor):
+ self._unary_unary_interceptors.append(interceptor)
+ elif isinstance(interceptor, UnaryStreamClientInterceptor):
+ self._unary_stream_interceptors.append(interceptor)
+ elif isinstance(interceptor, StreamUnaryClientInterceptor):
+ self._stream_unary_interceptors.append(interceptor)
+ elif isinstance(interceptor, StreamStreamClientInterceptor):
+ self._stream_stream_interceptors.append(interceptor)
+ else:
+ raise ValueError(
+ "Interceptor {} must be ".format(interceptor) +
+ "{} or ".format(UnaryUnaryClientInterceptor.__name__) +
+ "{} or ".format(UnaryStreamClientInterceptor.__name__) +
+ "{} or ".format(StreamUnaryClientInterceptor.__name__) +
+ "{}. ".format(StreamStreamClientInterceptor.__name__))
+
+ self._loop = cygrpc.get_working_loop()
+ self._channel = cygrpc.AioChannel(
+ _common.encode(target),
+ _augment_channel_arguments(options, compression), credentials,
+ self._loop)
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ await self._close(None)
+
+ async def _close(self, grace): # pylint: disable=too-many-branches
+ if self._channel.closed():
+ return
+
+ # No new calls will be accepted by the Cython channel.
+ self._channel.closing()
+
+ # Iterate through running tasks
+ tasks = _all_tasks()
+ calls = []
+ call_tasks = []
+ for task in tasks:
+ try:
+ stack = task.get_stack(limit=1)
+ except AttributeError as attribute_error:
+ # NOTE(lidiz) tl;dr: If the Task is created with a CPython
+ # object, it will trigger AttributeError.
+ #
+ # In the global finalizer, the event loop schedules
+ # a CPython PyAsyncGenAThrow object.
+ # https://github.com/python/cpython/blob/00e45877e33d32bb61aa13a2033e3bba370bda4d/Lib/asyncio/base_events.py#L484
+ #
+ # However, the PyAsyncGenAThrow object is written in C and
+ # failed to include the normal Python frame objects. Hence,
+ # this exception is a false negative, and it is safe to ignore
+ # the failure. It is fixed by https://github.com/python/cpython/pull/18669,
+ # but not available until 3.9 or 3.8.3. So, we have to keep it
+ # for a while.
+ # TODO(lidiz) drop this hack after 3.8 deprecation
+ if 'frame' in str(attribute_error):
+ continue
+ else:
+ raise
+
+ # If the Task is created by a C-extension, the stack will be empty.
+ if not stack:
+ continue
+
+ # Locate ones created by `aio.Call`.
+ frame = stack[0]
+ candidate = frame.f_locals.get('self')
+ if candidate:
+ if isinstance(candidate, _base_call.Call):
+ if hasattr(candidate, '_channel'):
+ # For intercepted Call object
+ if candidate._channel is not self._channel:
+ continue
+ elif hasattr(candidate, '_cython_call'):
+ # For normal Call object
+ if candidate._cython_call._channel is not self._channel:
+ continue
+ else:
+ # Unidentified Call object
+ raise cygrpc.InternalError(
+ f'Unrecognized call object: {candidate}')
+
+ calls.append(candidate)
+ call_tasks.append(task)
+
+ # If needed, try to wait for them to finish.
+ # Call objects are not always awaitables.
+ if grace and call_tasks:
+ await asyncio.wait(call_tasks, timeout=grace, loop=self._loop)
+
+ # Time to cancel existing calls.
+ for call in calls:
+ call.cancel()
+
+ # Destroy the channel
+ self._channel.close()
+
+ async def close(self, grace: Optional[float] = None):
+ await self._close(grace)
+
+ def get_state(self,
+ try_to_connect: bool = False) -> grpc.ChannelConnectivity:
+ result = self._channel.check_connectivity_state(try_to_connect)
+ return _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[result]
+
+ async def wait_for_state_change(
+ self,
+ last_observed_state: grpc.ChannelConnectivity,
+ ) -> None:
+ assert await self._channel.watch_connectivity_state(
+ last_observed_state.value[0], None)
+
+ async def channel_ready(self) -> None:
+ state = self.get_state(try_to_connect=True)
+ while state != grpc.ChannelConnectivity.READY:
+ await self.wait_for_state_change(state)
+ state = self.get_state(try_to_connect=True)
+
+ def unary_unary(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> UnaryUnaryMultiCallable:
+ return UnaryUnaryMultiCallable(self._channel, _common.encode(method),
+ request_serializer,
+ response_deserializer,
+ self._unary_unary_interceptors,
+ self._loop)
+
+ def unary_stream(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> UnaryStreamMultiCallable:
+ return UnaryStreamMultiCallable(self._channel, _common.encode(method),
+ request_serializer,
+ response_deserializer,
+ self._unary_stream_interceptors,
+ self._loop)
+
+ def stream_unary(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> StreamUnaryMultiCallable:
+ return StreamUnaryMultiCallable(self._channel, _common.encode(method),
+ request_serializer,
+ response_deserializer,
+ self._stream_unary_interceptors,
+ self._loop)
+
+ def stream_stream(
+ self,
+ method: str,
+ request_serializer: Optional[SerializingFunction] = None,
+ response_deserializer: Optional[DeserializingFunction] = None
+ ) -> StreamStreamMultiCallable:
+ return StreamStreamMultiCallable(self._channel, _common.encode(method),
+ request_serializer,
+ response_deserializer,
+ self._stream_stream_interceptors,
+ self._loop)
+
+
+def insecure_channel(
+ target: str,
+ options: Optional[ChannelArgumentType] = None,
+ compression: Optional[grpc.Compression] = None,
+ interceptors: Optional[Sequence[ClientInterceptor]] = None):
+ """Creates an insecure asynchronous Channel to a server.
+
+ Args:
+ target: The server address
+ options: An optional list of key-value pairs (:term:`channel_arguments`
+ in gRPC Core runtime) to configure the channel.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel. This is an EXPERIMENTAL option.
+ interceptors: An optional sequence of interceptors that will be executed for
+ any call executed with this channel.
+
+ Returns:
+ A Channel.
+ """
+ return Channel(target, () if options is None else options, None,
+ compression, interceptors)
+
+
+def secure_channel(target: str,
+ credentials: grpc.ChannelCredentials,
+ options: Optional[ChannelArgumentType] = None,
+ compression: Optional[grpc.Compression] = None,
+ interceptors: Optional[Sequence[ClientInterceptor]] = None):
+ """Creates a secure asynchronous Channel to a server.
+
+ Args:
+ target: The server address.
+ credentials: A ChannelCredentials instance.
+ options: An optional list of key-value pairs (:term:`channel_arguments`
+ in gRPC Core runtime) to configure the channel.
+ compression: An optional value indicating the compression method to be
+ used over the lifetime of the channel. This is an EXPERIMENTAL option.
+ interceptors: An optional sequence of interceptors that will be executed for
+ any call executed with this channel.
+
+ Returns:
+ An aio.Channel.
+ """
+ return Channel(target, () if options is None else options,
+ credentials._credentials, compression, interceptors)
diff --git a/venv/Lib/site-packages/grpc/aio/_interceptor.py b/venv/Lib/site-packages/grpc/aio/_interceptor.py
new file mode 100644
index 000000000..80e9625c5
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_interceptor.py
@@ -0,0 +1,987 @@
+# Copyright 2019 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Interceptors implementation of gRPC Asyncio Python."""
+import asyncio
+import collections
+import functools
+from abc import ABCMeta, abstractmethod
+from typing import Callable, Optional, Iterator, Sequence, Union, Awaitable, AsyncIterable
+
+import grpc
+from grpc._cython import cygrpc
+
+from . import _base_call
+from ._call import UnaryUnaryCall, UnaryStreamCall, StreamUnaryCall, StreamStreamCall, AioRpcError
+from ._call import _RPC_ALREADY_FINISHED_DETAILS, _RPC_HALF_CLOSED_DETAILS
+from ._call import _API_STYLE_ERROR
+from ._utils import _timeout_to_deadline
+from ._typing import (RequestType, SerializingFunction, DeserializingFunction,
+ ResponseType, DoneCallbackType, RequestIterableType,
+ ResponseIterableType)
+from ._metadata import Metadata
+
+_LOCAL_CANCELLATION_DETAILS = 'Locally cancelled by application!'
+
+
+class ServerInterceptor(metaclass=ABCMeta):
+ """Affords intercepting incoming RPCs on the service-side.
+
+ This is an EXPERIMENTAL API.
+ """
+
+ @abstractmethod
+ async def intercept_service(
+ self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
+ grpc.RpcMethodHandler]],
+ handler_call_details: grpc.HandlerCallDetails
+ ) -> grpc.RpcMethodHandler:
+ """Intercepts incoming RPCs before handing them over to a handler.
+
+ Args:
+ continuation: A function that takes a HandlerCallDetails and
+ proceeds to invoke the next interceptor in the chain, if any,
+ or the RPC handler lookup logic, with the call details passed
+ as an argument, and returns an RpcMethodHandler instance if
+ the RPC is considered serviced, or None otherwise.
+ handler_call_details: A HandlerCallDetails describing the RPC.
+
+ Returns:
+ An RpcMethodHandler with which the RPC may be serviced if the
+ interceptor chooses to service this RPC, or None otherwise.
+ """
+
+
+class ClientCallDetails(
+ collections.namedtuple(
+ 'ClientCallDetails',
+ ('method', 'timeout', 'metadata', 'credentials', 'wait_for_ready')),
+ grpc.ClientCallDetails):
+ """Describes an RPC to be invoked.
+
+ This is an EXPERIMENTAL API.
+
+ Args:
+ method: The method name of the RPC.
+ timeout: An optional duration of time in seconds to allow for the RPC.
+ metadata: Optional metadata to be transmitted to the service-side of
+ the RPC.
+ credentials: An optional CallCredentials for the RPC.
+ wait_for_ready: This is an EXPERIMENTAL argument. An optional
+ flag to enable :term:`wait_for_ready` mechanism.
+ """
+
+ method: str
+ timeout: Optional[float]
+ metadata: Optional[Metadata]
+ credentials: Optional[grpc.CallCredentials]
+ wait_for_ready: Optional[bool]
+
+
+class ClientInterceptor(metaclass=ABCMeta):
+ """Base class used for all Aio Client Interceptor classes"""
+
+
+class UnaryUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
+ """Affords intercepting unary-unary invocations."""
+
+ @abstractmethod
+ async def intercept_unary_unary(
+ self, continuation: Callable[[ClientCallDetails, RequestType],
+ UnaryUnaryCall],
+ client_call_details: ClientCallDetails,
+ request: RequestType) -> Union[UnaryUnaryCall, ResponseType]:
+ """Intercepts a unary-unary invocation asynchronously.
+
+ Args:
+ continuation: A coroutine that proceeds with the invocation by
+ executing the next interceptor in the chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `call = await continuation(client_call_details, request)`
+ to continue with the RPC. `continuation` returns the call to the
+ RPC.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request: The request value for the RPC.
+
+ Returns:
+ An object with the RPC response.
+
+ Raises:
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
+ asyncio.CancelledError: Indicating that the RPC was canceled.
+ """
+
+
+class UnaryStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
+ """Affords intercepting unary-stream invocations."""
+
+ @abstractmethod
+ async def intercept_unary_stream(
+ self, continuation: Callable[[ClientCallDetails, RequestType],
+ UnaryStreamCall],
+ client_call_details: ClientCallDetails, request: RequestType
+ ) -> Union[ResponseIterableType, UnaryStreamCall]:
+ """Intercepts a unary-stream invocation asynchronously.
+
+ The function could return the call object or an asynchronous
+ iterator, in case of being an asyncrhonous iterator this will
+ become the source of the reads done by the caller.
+
+ Args:
+ continuation: A coroutine that proceeds with the invocation by
+ executing the next interceptor in the chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `call = await continuation(client_call_details, request)`
+ to continue with the RPC. `continuation` returns the call to the
+ RPC.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request: The request value for the RPC.
+
+ Returns:
+ The RPC Call or an asynchronous iterator.
+
+ Raises:
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
+ asyncio.CancelledError: Indicating that the RPC was canceled.
+ """
+
+
+class StreamUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
+ """Affords intercepting stream-unary invocations."""
+
+ @abstractmethod
+ async def intercept_stream_unary(
+ self,
+ continuation: Callable[[ClientCallDetails, RequestType],
+ UnaryStreamCall],
+ client_call_details: ClientCallDetails,
+ request_iterator: RequestIterableType,
+ ) -> StreamUnaryCall:
+ """Intercepts a stream-unary invocation asynchronously.
+
+ Within the interceptor the usage of the call methods like `write` or
+ even awaiting the call should be done carefully, since the caller
+ could be expecting an untouched call, for example for start writing
+ messages to it.
+
+ Args:
+ continuation: A coroutine that proceeds with the invocation by
+ executing the next interceptor in the chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `call = await continuation(client_call_details, request_iterator)`
+ to continue with the RPC. `continuation` returns the call to the
+ RPC.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request_iterator: The request iterator that will produce requests
+ for the RPC.
+
+ Returns:
+ The RPC Call.
+
+ Raises:
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
+ asyncio.CancelledError: Indicating that the RPC was canceled.
+ """
+
+
+class StreamStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
+ """Affords intercepting stream-stream invocations."""
+
+ @abstractmethod
+ async def intercept_stream_stream(
+ self,
+ continuation: Callable[[ClientCallDetails, RequestType],
+ UnaryStreamCall],
+ client_call_details: ClientCallDetails,
+ request_iterator: RequestIterableType,
+ ) -> Union[ResponseIterableType, StreamStreamCall]:
+ """Intercepts a stream-stream invocation asynchronously.
+
+ Within the interceptor the usage of the call methods like `write` or
+ even awaiting the call should be done carefully, since the caller
+ could be expecting an untouched call, for example for start writing
+ messages to it.
+
+ The function could return the call object or an asynchronous
+ iterator, in case of being an asyncrhonous iterator this will
+ become the source of the reads done by the caller.
+
+ Args:
+ continuation: A coroutine that proceeds with the invocation by
+ executing the next interceptor in the chain or invoking the
+ actual RPC on the underlying Channel. It is the interceptor's
+ responsibility to call it if it decides to move the RPC forward.
+ The interceptor can use
+ `call = await continuation(client_call_details, request_iterator)`
+ to continue with the RPC. `continuation` returns the call to the
+ RPC.
+ client_call_details: A ClientCallDetails object describing the
+ outgoing RPC.
+ request_iterator: The request iterator that will produce requests
+ for the RPC.
+
+ Returns:
+ The RPC Call or an asynchronous iterator.
+
+ Raises:
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
+ asyncio.CancelledError: Indicating that the RPC was canceled.
+ """
+
+
+class InterceptedCall:
+ """Base implementation for all intercepted call arities.
+
+ Interceptors might have some work to do before the RPC invocation with
+ the capacity of changing the invocation parameters, and some work to do
+ after the RPC invocation with the capacity for accessing to the wrapped
+ `UnaryUnaryCall`.
+
+ It handles also early and later cancellations, when the RPC has not even
+ started and the execution is still held by the interceptors or when the
+ RPC has finished but again the execution is still held by the interceptors.
+
+ Once the RPC is finally executed, all methods are finally done against the
+ intercepted call, being at the same time the same call returned to the
+ interceptors.
+
+ As a base class for all of the interceptors implements the logic around
+ final status, metadata and cancellation.
+ """
+
+ _interceptors_task: asyncio.Task
+ _pending_add_done_callbacks: Sequence[DoneCallbackType]
+
+ def __init__(self, interceptors_task: asyncio.Task) -> None:
+ self._interceptors_task = interceptors_task
+ self._pending_add_done_callbacks = []
+ self._interceptors_task.add_done_callback(
+ self._fire_or_add_pending_done_callbacks)
+
+ def __del__(self):
+ self.cancel()
+
+ def _fire_or_add_pending_done_callbacks(self,
+ interceptors_task: asyncio.Task
+ ) -> None:
+
+ if not self._pending_add_done_callbacks:
+ return
+
+ call_completed = False
+
+ try:
+ call = interceptors_task.result()
+ if call.done():
+ call_completed = True
+ except (AioRpcError, asyncio.CancelledError):
+ call_completed = True
+
+ if call_completed:
+ for callback in self._pending_add_done_callbacks:
+ callback(self)
+ else:
+ for callback in self._pending_add_done_callbacks:
+ callback = functools.partial(self._wrap_add_done_callback,
+ callback)
+ call.add_done_callback(callback)
+
+ self._pending_add_done_callbacks = []
+
+ def _wrap_add_done_callback(self, callback: DoneCallbackType,
+ unused_call: _base_call.Call) -> None:
+ callback(self)
+
+ def cancel(self) -> bool:
+ if not self._interceptors_task.done():
+ # There is no yet the intercepted call available,
+ # Trying to cancel it by using the generic Asyncio
+ # cancellation method.
+ return self._interceptors_task.cancel()
+
+ try:
+ call = self._interceptors_task.result()
+ except AioRpcError:
+ return False
+ except asyncio.CancelledError:
+ return False
+
+ return call.cancel()
+
+ def cancelled(self) -> bool:
+ if not self._interceptors_task.done():
+ return False
+
+ try:
+ call = self._interceptors_task.result()
+ except AioRpcError as err:
+ return err.code() == grpc.StatusCode.CANCELLED
+ except asyncio.CancelledError:
+ return True
+
+ return call.cancelled()
+
+ def done(self) -> bool:
+ if not self._interceptors_task.done():
+ return False
+
+ try:
+ call = self._interceptors_task.result()
+ except (AioRpcError, asyncio.CancelledError):
+ return True
+
+ return call.done()
+
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
+ if not self._interceptors_task.done():
+ self._pending_add_done_callbacks.append(callback)
+ return
+
+ try:
+ call = self._interceptors_task.result()
+ except (AioRpcError, asyncio.CancelledError):
+ callback(self)
+ return
+
+ if call.done():
+ callback(self)
+ else:
+ callback = functools.partial(self._wrap_add_done_callback, callback)
+ call.add_done_callback(callback)
+
+ def time_remaining(self) -> Optional[float]:
+ raise NotImplementedError()
+
+ async def initial_metadata(self) -> Optional[Metadata]:
+ try:
+ call = await self._interceptors_task
+ except AioRpcError as err:
+ return err.initial_metadata()
+ except asyncio.CancelledError:
+ return None
+
+ return await call.initial_metadata()
+
+ async def trailing_metadata(self) -> Optional[Metadata]:
+ try:
+ call = await self._interceptors_task
+ except AioRpcError as err:
+ return err.trailing_metadata()
+ except asyncio.CancelledError:
+ return None
+
+ return await call.trailing_metadata()
+
+ async def code(self) -> grpc.StatusCode:
+ try:
+ call = await self._interceptors_task
+ except AioRpcError as err:
+ return err.code()
+ except asyncio.CancelledError:
+ return grpc.StatusCode.CANCELLED
+
+ return await call.code()
+
+ async def details(self) -> str:
+ try:
+ call = await self._interceptors_task
+ except AioRpcError as err:
+ return err.details()
+ except asyncio.CancelledError:
+ return _LOCAL_CANCELLATION_DETAILS
+
+ return await call.details()
+
+ async def debug_error_string(self) -> Optional[str]:
+ try:
+ call = await self._interceptors_task
+ except AioRpcError as err:
+ return err.debug_error_string()
+ except asyncio.CancelledError:
+ return ''
+
+ return await call.debug_error_string()
+
+ async def wait_for_connection(self) -> None:
+ call = await self._interceptors_task
+ return await call.wait_for_connection()
+
+
+class _InterceptedUnaryResponseMixin:
+
+ def __await__(self):
+ call = yield from self._interceptors_task.__await__()
+ response = yield from call.__await__()
+ return response
+
+
+class _InterceptedStreamResponseMixin:
+ _response_aiter: Optional[AsyncIterable[ResponseType]]
+
+ def _init_stream_response_mixin(self) -> None:
+ # Is initalized later, otherwise if the iterator is not finnally
+ # consumed a logging warning is emmited by Asyncio.
+ self._response_aiter = None
+
+ async def _wait_for_interceptor_task_response_iterator(self
+ ) -> ResponseType:
+ call = await self._interceptors_task
+ async for response in call:
+ yield response
+
+ def __aiter__(self) -> AsyncIterable[ResponseType]:
+ if self._response_aiter is None:
+ self._response_aiter = self._wait_for_interceptor_task_response_iterator(
+ )
+ return self._response_aiter
+
+ async def read(self) -> ResponseType:
+ if self._response_aiter is None:
+ self._response_aiter = self._wait_for_interceptor_task_response_iterator(
+ )
+ return await self._response_aiter.asend(None)
+
+
+class _InterceptedStreamRequestMixin:
+
+ _write_to_iterator_async_gen: Optional[AsyncIterable[RequestType]]
+ _write_to_iterator_queue: Optional[asyncio.Queue]
+
+ _FINISH_ITERATOR_SENTINEL = object()
+
+ def _init_stream_request_mixin(
+ self, request_iterator: Optional[RequestIterableType]
+ ) -> RequestIterableType:
+
+ if request_iterator is None:
+ # We provide our own request iterator which is a proxy
+ # of the futures writes that will be done by the caller.
+ self._write_to_iterator_queue = asyncio.Queue(maxsize=1)
+ self._write_to_iterator_async_gen = self._proxy_writes_as_request_iterator(
+ )
+ request_iterator = self._write_to_iterator_async_gen
+ else:
+ self._write_to_iterator_queue = None
+
+ return request_iterator
+
+ async def _proxy_writes_as_request_iterator(self):
+ await self._interceptors_task
+
+ while True:
+ value = await self._write_to_iterator_queue.get()
+ if value is _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL:
+ break
+ yield value
+
+ async def write(self, request: RequestType) -> None:
+ # If no queue was created it means that requests
+ # should be expected through an iterators provided
+ # by the caller.
+ if self._write_to_iterator_queue is None:
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
+
+ try:
+ call = await self._interceptors_task
+ except (asyncio.CancelledError, AioRpcError):
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
+
+ if call.done():
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
+ elif call._done_writing_flag:
+ raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
+
+ # Write might never end up since the call could abrubtly finish,
+ # we give up on the first awaitable object that finishes.
+ _, _ = await asyncio.wait(
+ (self._write_to_iterator_queue.put(request), call.code()),
+ return_when=asyncio.FIRST_COMPLETED)
+
+ if call.done():
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
+
+ async def done_writing(self) -> None:
+ """Signal peer that client is done writing.
+
+ This method is idempotent.
+ """
+ # If no queue was created it means that requests
+ # should be expected through an iterators provided
+ # by the caller.
+ if self._write_to_iterator_queue is None:
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
+
+ try:
+ call = await self._interceptors_task
+ except asyncio.CancelledError:
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
+
+ # Write might never end up since the call could abrubtly finish,
+ # we give up on the first awaitable object that finishes.
+ _, _ = await asyncio.wait((self._write_to_iterator_queue.put(
+ _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL),
+ call.code()),
+ return_when=asyncio.FIRST_COMPLETED)
+
+
+class InterceptedUnaryUnaryCall(_InterceptedUnaryResponseMixin, InterceptedCall,
+ _base_call.UnaryUnaryCall):
+ """Used for running a `UnaryUnaryCall` wrapped by interceptors.
+
+ For the `__await__` method is it is proxied to the intercepted call only when
+ the interceptor task is finished.
+ """
+
+ _loop: asyncio.AbstractEventLoop
+ _channel: cygrpc.AioChannel
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, interceptors: Sequence[UnaryUnaryClientInterceptor],
+ request: RequestType, timeout: Optional[float],
+ metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._channel = channel
+ interceptors_task = loop.create_task(
+ self._invoke(interceptors, method, timeout, metadata, credentials,
+ wait_for_ready, request, request_serializer,
+ response_deserializer))
+ super().__init__(interceptors_task)
+
+ # pylint: disable=too-many-arguments
+ async def _invoke(self, interceptors: Sequence[UnaryUnaryClientInterceptor],
+ method: bytes, timeout: Optional[float],
+ metadata: Optional[Metadata],
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], request: RequestType,
+ request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction
+ ) -> UnaryUnaryCall:
+ """Run the RPC call wrapped in interceptors"""
+
+ async def _run_interceptor(
+ interceptors: Iterator[UnaryUnaryClientInterceptor],
+ client_call_details: ClientCallDetails,
+ request: RequestType) -> _base_call.UnaryUnaryCall:
+
+ interceptor = next(interceptors, None)
+
+ if interceptor:
+ continuation = functools.partial(_run_interceptor, interceptors)
+
+ call_or_response = await interceptor.intercept_unary_unary(
+ continuation, client_call_details, request)
+
+ if isinstance(call_or_response, _base_call.UnaryUnaryCall):
+ return call_or_response
+ else:
+ return UnaryUnaryCallResponse(call_or_response)
+
+ else:
+ return UnaryUnaryCall(
+ request, _timeout_to_deadline(client_call_details.timeout),
+ client_call_details.metadata,
+ client_call_details.credentials,
+ client_call_details.wait_for_ready, self._channel,
+ client_call_details.method, request_serializer,
+ response_deserializer, self._loop)
+
+ client_call_details = ClientCallDetails(method, timeout, metadata,
+ credentials, wait_for_ready)
+ return await _run_interceptor(iter(interceptors), client_call_details,
+ request)
+
+ def time_remaining(self) -> Optional[float]:
+ raise NotImplementedError()
+
+
+class InterceptedUnaryStreamCall(_InterceptedStreamResponseMixin,
+ InterceptedCall, _base_call.UnaryStreamCall):
+ """Used for running a `UnaryStreamCall` wrapped by interceptors."""
+
+ _loop: asyncio.AbstractEventLoop
+ _channel: cygrpc.AioChannel
+ _last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall]
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, interceptors: Sequence[UnaryStreamClientInterceptor],
+ request: RequestType, timeout: Optional[float],
+ metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._channel = channel
+ self._init_stream_response_mixin()
+ self._last_returned_call_from_interceptors = None
+ interceptors_task = loop.create_task(
+ self._invoke(interceptors, method, timeout, metadata, credentials,
+ wait_for_ready, request, request_serializer,
+ response_deserializer))
+ super().__init__(interceptors_task)
+
+ # pylint: disable=too-many-arguments
+ async def _invoke(self, interceptors: Sequence[UnaryUnaryClientInterceptor],
+ method: bytes, timeout: Optional[float],
+ metadata: Optional[Metadata],
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], request: RequestType,
+ request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction
+ ) -> UnaryStreamCall:
+ """Run the RPC call wrapped in interceptors"""
+
+ async def _run_interceptor(
+ interceptors: Iterator[UnaryStreamClientInterceptor],
+ client_call_details: ClientCallDetails,
+ request: RequestType,
+ ) -> _base_call.UnaryUnaryCall:
+
+ interceptor = next(interceptors, None)
+
+ if interceptor:
+ continuation = functools.partial(_run_interceptor, interceptors)
+
+ call_or_response_iterator = await interceptor.intercept_unary_stream(
+ continuation, client_call_details, request)
+
+ if isinstance(call_or_response_iterator,
+ _base_call.UnaryStreamCall):
+ self._last_returned_call_from_interceptors = call_or_response_iterator
+ else:
+ self._last_returned_call_from_interceptors = UnaryStreamCallResponseIterator(
+ self._last_returned_call_from_interceptors,
+ call_or_response_iterator)
+ return self._last_returned_call_from_interceptors
+ else:
+ self._last_returned_call_from_interceptors = UnaryStreamCall(
+ request, _timeout_to_deadline(client_call_details.timeout),
+ client_call_details.metadata,
+ client_call_details.credentials,
+ client_call_details.wait_for_ready, self._channel,
+ client_call_details.method, request_serializer,
+ response_deserializer, self._loop)
+
+ return self._last_returned_call_from_interceptors
+
+ client_call_details = ClientCallDetails(method, timeout, metadata,
+ credentials, wait_for_ready)
+ return await _run_interceptor(iter(interceptors), client_call_details,
+ request)
+
+ def time_remaining(self) -> Optional[float]:
+ raise NotImplementedError()
+
+
+class InterceptedStreamUnaryCall(_InterceptedUnaryResponseMixin,
+ _InterceptedStreamRequestMixin,
+ InterceptedCall, _base_call.StreamUnaryCall):
+ """Used for running a `StreamUnaryCall` wrapped by interceptors.
+
+ For the `__await__` method is it is proxied to the intercepted call only when
+ the interceptor task is finished.
+ """
+
+ _loop: asyncio.AbstractEventLoop
+ _channel: cygrpc.AioChannel
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, interceptors: Sequence[StreamUnaryClientInterceptor],
+ request_iterator: Optional[RequestIterableType],
+ timeout: Optional[float], metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._channel = channel
+ request_iterator = self._init_stream_request_mixin(request_iterator)
+ interceptors_task = loop.create_task(
+ self._invoke(interceptors, method, timeout, metadata, credentials,
+ wait_for_ready, request_iterator, request_serializer,
+ response_deserializer))
+ super().__init__(interceptors_task)
+
+ # pylint: disable=too-many-arguments
+ async def _invoke(
+ self, interceptors: Sequence[StreamUnaryClientInterceptor],
+ method: bytes, timeout: Optional[float],
+ metadata: Optional[Metadata],
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool],
+ request_iterator: RequestIterableType,
+ request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction) -> StreamUnaryCall:
+ """Run the RPC call wrapped in interceptors"""
+
+ async def _run_interceptor(
+ interceptors: Iterator[UnaryUnaryClientInterceptor],
+ client_call_details: ClientCallDetails,
+ request_iterator: RequestIterableType
+ ) -> _base_call.StreamUnaryCall:
+
+ interceptor = next(interceptors, None)
+
+ if interceptor:
+ continuation = functools.partial(_run_interceptor, interceptors)
+
+ return await interceptor.intercept_stream_unary(
+ continuation, client_call_details, request_iterator)
+ else:
+ return StreamUnaryCall(
+ request_iterator,
+ _timeout_to_deadline(client_call_details.timeout),
+ client_call_details.metadata,
+ client_call_details.credentials,
+ client_call_details.wait_for_ready, self._channel,
+ client_call_details.method, request_serializer,
+ response_deserializer, self._loop)
+
+ client_call_details = ClientCallDetails(method, timeout, metadata,
+ credentials, wait_for_ready)
+ return await _run_interceptor(iter(interceptors), client_call_details,
+ request_iterator)
+
+ def time_remaining(self) -> Optional[float]:
+ raise NotImplementedError()
+
+
+class InterceptedStreamStreamCall(_InterceptedStreamResponseMixin,
+ _InterceptedStreamRequestMixin,
+ InterceptedCall, _base_call.StreamStreamCall):
+ """Used for running a `StreamStreamCall` wrapped by interceptors."""
+
+ _loop: asyncio.AbstractEventLoop
+ _channel: cygrpc.AioChannel
+ _last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall]
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, interceptors: Sequence[StreamStreamClientInterceptor],
+ request_iterator: Optional[RequestIterableType],
+ timeout: Optional[float], metadata: Metadata,
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool], channel: cygrpc.AioChannel,
+ method: bytes, request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction,
+ loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._channel = channel
+ self._init_stream_response_mixin()
+ request_iterator = self._init_stream_request_mixin(request_iterator)
+ self._last_returned_call_from_interceptors = None
+ interceptors_task = loop.create_task(
+ self._invoke(interceptors, method, timeout, metadata, credentials,
+ wait_for_ready, request_iterator, request_serializer,
+ response_deserializer))
+ super().__init__(interceptors_task)
+
+ # pylint: disable=too-many-arguments
+ async def _invoke(
+ self, interceptors: Sequence[StreamStreamClientInterceptor],
+ method: bytes, timeout: Optional[float],
+ metadata: Optional[Metadata],
+ credentials: Optional[grpc.CallCredentials],
+ wait_for_ready: Optional[bool],
+ request_iterator: RequestIterableType,
+ request_serializer: SerializingFunction,
+ response_deserializer: DeserializingFunction) -> StreamStreamCall:
+ """Run the RPC call wrapped in interceptors"""
+
+ async def _run_interceptor(
+ interceptors: Iterator[StreamStreamClientInterceptor],
+ client_call_details: ClientCallDetails,
+ request_iterator: RequestIterableType
+ ) -> _base_call.StreamStreamCall:
+
+ interceptor = next(interceptors, None)
+
+ if interceptor:
+ continuation = functools.partial(_run_interceptor, interceptors)
+
+ call_or_response_iterator = await interceptor.intercept_stream_stream(
+ continuation, client_call_details, request_iterator)
+
+ if isinstance(call_or_response_iterator,
+ _base_call.StreamStreamCall):
+ self._last_returned_call_from_interceptors = call_or_response_iterator
+ else:
+ self._last_returned_call_from_interceptors = StreamStreamCallResponseIterator(
+ self._last_returned_call_from_interceptors,
+ call_or_response_iterator)
+ return self._last_returned_call_from_interceptors
+ else:
+ self._last_returned_call_from_interceptors = StreamStreamCall(
+ request_iterator,
+ _timeout_to_deadline(client_call_details.timeout),
+ client_call_details.metadata,
+ client_call_details.credentials,
+ client_call_details.wait_for_ready, self._channel,
+ client_call_details.method, request_serializer,
+ response_deserializer, self._loop)
+ return self._last_returned_call_from_interceptors
+
+ client_call_details = ClientCallDetails(method, timeout, metadata,
+ credentials, wait_for_ready)
+ return await _run_interceptor(iter(interceptors), client_call_details,
+ request_iterator)
+
+ def time_remaining(self) -> Optional[float]:
+ raise NotImplementedError()
+
+
+class UnaryUnaryCallResponse(_base_call.UnaryUnaryCall):
+ """Final UnaryUnaryCall class finished with a response."""
+ _response: ResponseType
+
+ def __init__(self, response: ResponseType) -> None:
+ self._response = response
+
+ def cancel(self) -> bool:
+ return False
+
+ def cancelled(self) -> bool:
+ return False
+
+ def done(self) -> bool:
+ return True
+
+ def add_done_callback(self, unused_callback) -> None:
+ raise NotImplementedError()
+
+ def time_remaining(self) -> Optional[float]:
+ raise NotImplementedError()
+
+ async def initial_metadata(self) -> Optional[Metadata]:
+ return None
+
+ async def trailing_metadata(self) -> Optional[Metadata]:
+ return None
+
+ async def code(self) -> grpc.StatusCode:
+ return grpc.StatusCode.OK
+
+ async def details(self) -> str:
+ return ''
+
+ async def debug_error_string(self) -> Optional[str]:
+ return None
+
+ def __await__(self):
+ if False: # pylint: disable=using-constant-test
+ # This code path is never used, but a yield statement is needed
+ # for telling the interpreter that __await__ is a generator.
+ yield None
+ return self._response
+
+ async def wait_for_connection(self) -> None:
+ pass
+
+
+class _StreamCallResponseIterator:
+
+ _call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall]
+ _response_iterator: AsyncIterable[ResponseType]
+
+ def __init__(self, call: Union[_base_call.UnaryStreamCall, _base_call.
+ StreamStreamCall],
+ response_iterator: AsyncIterable[ResponseType]) -> None:
+ self._response_iterator = response_iterator
+ self._call = call
+
+ def cancel(self) -> bool:
+ return self._call.cancel()
+
+ def cancelled(self) -> bool:
+ return self._call.cancelled()
+
+ def done(self) -> bool:
+ return self._call.done()
+
+ def add_done_callback(self, callback) -> None:
+ self._call.add_done_callback(callback)
+
+ def time_remaining(self) -> Optional[float]:
+ return self._call.time_remaining()
+
+ async def initial_metadata(self) -> Optional[Metadata]:
+ return await self._call.initial_metadata()
+
+ async def trailing_metadata(self) -> Optional[Metadata]:
+ return await self._call.trailing_metadata()
+
+ async def code(self) -> grpc.StatusCode:
+ return await self._call.code()
+
+ async def details(self) -> str:
+ return await self._call.details()
+
+ async def debug_error_string(self) -> Optional[str]:
+ return await self._call.debug_error_string()
+
+ def __aiter__(self):
+ return self._response_iterator.__aiter__()
+
+ async def wait_for_connection(self) -> None:
+ return await self._call.wait_for_connection()
+
+
+class UnaryStreamCallResponseIterator(_StreamCallResponseIterator,
+ _base_call.UnaryStreamCall):
+ """UnaryStreamCall class wich uses an alternative response iterator."""
+
+ async def read(self) -> ResponseType:
+ # Behind the scenes everyting goes through the
+ # async iterator. So this path should not be reached.
+ raise NotImplementedError()
+
+
+class StreamStreamCallResponseIterator(_StreamCallResponseIterator,
+ _base_call.StreamStreamCall):
+ """StreamStreamCall class wich uses an alternative response iterator."""
+
+ async def read(self) -> ResponseType:
+ # Behind the scenes everyting goes through the
+ # async iterator. So this path should not be reached.
+ raise NotImplementedError()
+
+ async def write(self, request: RequestType) -> None:
+ # Behind the scenes everyting goes through the
+ # async iterator provided by the InterceptedStreamStreamCall.
+ # So this path should not be reached.
+ raise NotImplementedError()
+
+ async def done_writing(self) -> None:
+ # Behind the scenes everyting goes through the
+ # async iterator provided by the InterceptedStreamStreamCall.
+ # So this path should not be reached.
+ raise NotImplementedError()
+
+ @property
+ def _done_writing_flag(self) -> bool:
+ return self._call._done_writing_flag
diff --git a/venv/Lib/site-packages/grpc/aio/_metadata.py b/venv/Lib/site-packages/grpc/aio/_metadata.py
new file mode 100644
index 000000000..3c7d92440
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_metadata.py
@@ -0,0 +1,119 @@
+# Copyright 2020 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Implementation of the metadata abstraction for gRPC Asyncio Python."""
+from typing import List, Tuple, Iterator, Any, Union
+from collections import abc, OrderedDict
+
+MetadataKey = str
+MetadataValue = Union[str, bytes]
+
+
+class Metadata(abc.Mapping):
+ """Metadata abstraction for the asynchronous calls and interceptors.
+
+ The metadata is a mapping from str -> List[str]
+
+ Traits
+ * Multiple entries are allowed for the same key
+ * The order of the values by key is preserved
+ * Getting by an element by key, retrieves the first mapped value
+ * Supports an immutable view of the data
+ * Allows partial mutation on the data without recreating the new object from scratch.
+ """
+
+ def __init__(self, *args: Tuple[MetadataKey, MetadataValue]) -> None:
+ self._metadata = OrderedDict()
+ for md_key, md_value in args:
+ self.add(md_key, md_value)
+
+ @classmethod
+ def from_tuple(cls, raw_metadata: tuple):
+ if raw_metadata:
+ return cls(*raw_metadata)
+ return cls()
+
+ def add(self, key: MetadataKey, value: MetadataValue) -> None:
+ self._metadata.setdefault(key, [])
+ self._metadata[key].append(value)
+
+ def __len__(self) -> int:
+ """Return the total number of elements that there are in the metadata,
+ including multiple values for the same key.
+ """
+ return sum(map(len, self._metadata.values()))
+
+ def __getitem__(self, key: MetadataKey) -> MetadataValue:
+ """When calling [], the first element of all those
+ mapped for is returned.
+ """
+ try:
+ return self._metadata[key][0]
+ except (ValueError, IndexError) as e:
+ raise KeyError("{0!r}".format(key)) from e
+
+ def __setitem__(self, key: MetadataKey, value: MetadataValue) -> None:
+ """Calling metadata[] =
+ Maps to the first instance of .
+ """
+ if key not in self:
+ self._metadata[key] = [value]
+ else:
+ current_values = self.get_all(key)
+ self._metadata[key] = [value, *current_values[1:]]
+
+ def __delitem__(self, key: MetadataKey) -> None:
+ """``del metadata[]`` deletes the first mapping for ."""
+ current_values = self.get_all(key)
+ if not current_values:
+ raise KeyError(repr(key))
+ self._metadata[key] = current_values[1:]
+
+ def delete_all(self, key: MetadataKey) -> None:
+ """Delete all mappings for ."""
+ del self._metadata[key]
+
+ def __iter__(self) -> Iterator[Tuple[MetadataKey, MetadataValue]]:
+ for key, values in self._metadata.items():
+ for value in values:
+ yield (key, value)
+
+ def get_all(self, key: MetadataKey) -> List[MetadataValue]:
+ """For compatibility with other Metadata abstraction objects (like in Java),
+ this would return all items under the desired .
+ """
+ return self._metadata.get(key, [])
+
+ def set_all(self, key: MetadataKey, values: List[MetadataValue]) -> None:
+ self._metadata[key] = values
+
+ def __contains__(self, key: MetadataKey) -> bool:
+ return key in self._metadata
+
+ def __eq__(self, other: Any) -> bool:
+ if isinstance(other, self.__class__):
+ return self._metadata == other._metadata
+ if isinstance(other, tuple):
+ return tuple(self) == other
+ return NotImplemented # pytype: disable=bad-return-type
+
+ def __add__(self, other: Any) -> 'Metadata':
+ if isinstance(other, self.__class__):
+ return Metadata(*(tuple(self) + tuple(other)))
+ if isinstance(other, tuple):
+ return Metadata(*(tuple(self) + other))
+ return NotImplemented # pytype: disable=bad-return-type
+
+ def __repr__(self) -> str:
+ view = tuple(self)
+ return "{0}({1!r})".format(self.__class__.__name__, view)
diff --git a/venv/Lib/site-packages/grpc/aio/_server.py b/venv/Lib/site-packages/grpc/aio/_server.py
new file mode 100644
index 000000000..4d7cb7589
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_server.py
@@ -0,0 +1,209 @@
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Server-side implementation of gRPC Asyncio Python."""
+
+from concurrent.futures import Executor
+from typing import Any, Optional, Sequence
+
+import grpc
+from grpc import _common, _compression
+from grpc._cython import cygrpc
+
+from . import _base_server
+from ._typing import ChannelArgumentType
+from ._interceptor import ServerInterceptor
+
+
+def _augment_channel_arguments(base_options: ChannelArgumentType,
+ compression: Optional[grpc.Compression]):
+ compression_option = _compression.create_channel_option(compression)
+ return tuple(base_options) + compression_option
+
+
+class Server(_base_server.Server):
+ """Serves RPCs."""
+
+ def __init__(self, thread_pool: Optional[Executor],
+ generic_handlers: Optional[Sequence[grpc.GenericRpcHandler]],
+ interceptors: Optional[Sequence[Any]],
+ options: ChannelArgumentType,
+ maximum_concurrent_rpcs: Optional[int],
+ compression: Optional[grpc.Compression]):
+ self._loop = cygrpc.get_working_loop()
+ if interceptors:
+ invalid_interceptors = [
+ interceptor for interceptor in interceptors
+ if not isinstance(interceptor, ServerInterceptor)
+ ]
+ if invalid_interceptors:
+ raise ValueError(
+ 'Interceptor must be ServerInterceptor, the '
+ f'following are invalid: {invalid_interceptors}')
+ self._server = cygrpc.AioServer(
+ self._loop, thread_pool, generic_handlers, interceptors,
+ _augment_channel_arguments(options, compression),
+ maximum_concurrent_rpcs)
+
+ def add_generic_rpc_handlers(
+ self,
+ generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]) -> None:
+ """Registers GenericRpcHandlers with this Server.
+
+ This method is only safe to call before the server is started.
+
+ Args:
+ generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
+ used to service RPCs.
+ """
+ self._server.add_generic_rpc_handlers(generic_rpc_handlers)
+
+ def add_insecure_port(self, address: str) -> int:
+ """Opens an insecure port for accepting RPCs.
+
+ This method may only be called before starting the server.
+
+ Args:
+ address: The address for which to open a port. If the port is 0,
+ or not specified in the address, then the gRPC runtime will choose a port.
+
+ Returns:
+ An integer port on which the server will accept RPC requests.
+ """
+ return _common.validate_port_binding_result(
+ address, self._server.add_insecure_port(_common.encode(address)))
+
+ def add_secure_port(self, address: str,
+ server_credentials: grpc.ServerCredentials) -> int:
+ """Opens a secure port for accepting RPCs.
+
+ This method may only be called before starting the server.
+
+ Args:
+ address: The address for which to open a port.
+ if the port is 0, or not specified in the address, then the gRPC
+ runtime will choose a port.
+ server_credentials: A ServerCredentials object.
+
+ Returns:
+ An integer port on which the server will accept RPC requests.
+ """
+ return _common.validate_port_binding_result(
+ address,
+ self._server.add_secure_port(_common.encode(address),
+ server_credentials))
+
+ async def start(self) -> None:
+ """Starts this Server.
+
+ This method may only be called once. (i.e. it is not idempotent).
+ """
+ await self._server.start()
+
+ async def stop(self, grace: Optional[float]) -> None:
+ """Stops this Server.
+
+ This method immediately stops the server from servicing new RPCs in
+ all cases.
+
+ If a grace period is specified, this method returns immediately and all
+ RPCs active at the end of the grace period are aborted. If a grace
+ period is not specified (by passing None for grace), all existing RPCs
+ are aborted immediately and this method blocks until the last RPC
+ handler terminates.
+
+ This method is idempotent and may be called at any time. Passing a
+ smaller grace value in a subsequent call will have the effect of
+ stopping the Server sooner (passing None will have the effect of
+ stopping the server immediately). Passing a larger grace value in a
+ subsequent call will not have the effect of stopping the server later
+ (i.e. the most restrictive grace value is used).
+
+ Args:
+ grace: A duration of time in seconds or None.
+ """
+ await self._server.shutdown(grace)
+
+ async def wait_for_termination(self,
+ timeout: Optional[float] = None) -> bool:
+ """Block current coroutine until the server stops.
+
+ This is an EXPERIMENTAL API.
+
+ The wait will not consume computational resources during blocking, and
+ it will block until one of the two following conditions are met:
+
+ 1) The server is stopped or terminated;
+ 2) A timeout occurs if timeout is not `None`.
+
+ The timeout argument works in the same way as `threading.Event.wait()`.
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
+
+ Args:
+ timeout: A floating point number specifying a timeout for the
+ operation in seconds.
+
+ Returns:
+ A bool indicates if the operation times out.
+ """
+ return await self._server.wait_for_termination(timeout)
+
+ def __del__(self):
+ """Schedules a graceful shutdown in current event loop.
+
+ The Cython AioServer doesn't hold a ref-count to this class. It should
+ be safe to slightly extend the underlying Cython object's life span.
+ """
+ if hasattr(self, '_server'):
+ if self._server.is_running():
+ cygrpc.schedule_coro_threadsafe(
+ self._server.shutdown(None),
+ self._loop,
+ )
+
+
+def server(migration_thread_pool: Optional[Executor] = None,
+ handlers: Optional[Sequence[grpc.GenericRpcHandler]] = None,
+ interceptors: Optional[Sequence[Any]] = None,
+ options: Optional[ChannelArgumentType] = None,
+ maximum_concurrent_rpcs: Optional[int] = None,
+ compression: Optional[grpc.Compression] = None):
+ """Creates a Server with which RPCs can be serviced.
+
+ Args:
+ migration_thread_pool: A futures.ThreadPoolExecutor to be used by the
+ Server to execute non-AsyncIO RPC handlers for migration purpose.
+ handlers: An optional list of GenericRpcHandlers used for executing RPCs.
+ More handlers may be added by calling add_generic_rpc_handlers any time
+ before the server is started.
+ interceptors: An optional list of ServerInterceptor objects that observe
+ and optionally manipulate the incoming RPCs before handing them over to
+ handlers. The interceptors are given control in the order they are
+ specified. This is an EXPERIMENTAL API.
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
+ to configure the channel.
+ maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
+ will service before returning RESOURCE_EXHAUSTED status, or None to
+ indicate no limit.
+ compression: An element of grpc.compression, e.g.
+ grpc.compression.Gzip. This compression algorithm will be used for the
+ lifetime of the server unless overridden by set_compression. This is an
+ EXPERIMENTAL option.
+
+ Returns:
+ A Server object.
+ """
+ return Server(migration_thread_pool, () if handlers is None else handlers,
+ () if interceptors is None else interceptors,
+ () if options is None else options, maximum_concurrent_rpcs,
+ compression)
diff --git a/venv/Lib/site-packages/grpc/aio/_typing.py b/venv/Lib/site-packages/grpc/aio/_typing.py
new file mode 100644
index 000000000..7e2e8da8a
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_typing.py
@@ -0,0 +1,32 @@
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Common types for gRPC Async API"""
+
+from typing import (Any, AsyncIterable, Callable, Iterable, Sequence, Tuple,
+ TypeVar, Union)
+
+from grpc._cython.cygrpc import EOF
+from ._metadata import Metadata, MetadataKey, MetadataValue
+
+RequestType = TypeVar('RequestType')
+ResponseType = TypeVar('ResponseType')
+SerializingFunction = Callable[[Any], bytes]
+DeserializingFunction = Callable[[bytes], Any]
+MetadatumType = Tuple[MetadataKey, MetadataValue]
+MetadataType = Metadata
+ChannelArgumentType = Sequence[Tuple[str, Any]]
+EOFType = type(EOF)
+DoneCallbackType = Callable[[Any], None]
+RequestIterableType = Union[Iterable[Any], AsyncIterable[Any]]
+ResponseIterableType = AsyncIterable[Any]
diff --git a/venv/Lib/site-packages/grpc/aio/_utils.py b/venv/Lib/site-packages/grpc/aio/_utils.py
new file mode 100644
index 000000000..e5772dce2
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/aio/_utils.py
@@ -0,0 +1,22 @@
+# Copyright 2019 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Internal utilities used by the gRPC Aio module."""
+import time
+from typing import Optional
+
+
+def _timeout_to_deadline(timeout: Optional[float]) -> Optional[float]:
+ if timeout is None:
+ return None
+ return time.time() + timeout
diff --git a/venv/Lib/site-packages/grpc/beta/__init__.py b/venv/Lib/site-packages/grpc/beta/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..bccfe55be
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-36.pyc
new file mode 100644
index 000000000..6d79851cc
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/_metadata.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/_metadata.cpython-36.pyc
new file mode 100644
index 000000000..7c9ec218a
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/_metadata.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-36.pyc
new file mode 100644
index 000000000..dc48c7088
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/implementations.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/implementations.cpython-36.pyc
new file mode 100644
index 000000000..324713d51
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/implementations.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/interfaces.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/interfaces.cpython-36.pyc
new file mode 100644
index 000000000..e7f8241f0
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/interfaces.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/__pycache__/utilities.cpython-36.pyc b/venv/Lib/site-packages/grpc/beta/__pycache__/utilities.cpython-36.pyc
new file mode 100644
index 000000000..c492db27e
Binary files /dev/null and b/venv/Lib/site-packages/grpc/beta/__pycache__/utilities.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/beta/_client_adaptations.py b/venv/Lib/site-packages/grpc/beta/_client_adaptations.py
new file mode 100644
index 000000000..652ae0ea1
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/_client_adaptations.py
@@ -0,0 +1,706 @@
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Translates gRPC's client-side API into gRPC's client-side Beta API."""
+
+import grpc
+from grpc import _common
+from grpc.beta import _metadata
+from grpc.beta import interfaces
+from grpc.framework.common import cardinality
+from grpc.framework.foundation import future
+from grpc.framework.interfaces.face import face
+
+# pylint: disable=too-many-arguments,too-many-locals,unused-argument
+
+_STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = {
+ grpc.StatusCode.CANCELLED:
+ (face.Abortion.Kind.CANCELLED, face.CancellationError),
+ grpc.StatusCode.UNKNOWN:
+ (face.Abortion.Kind.REMOTE_FAILURE, face.RemoteError),
+ grpc.StatusCode.DEADLINE_EXCEEDED:
+ (face.Abortion.Kind.EXPIRED, face.ExpirationError),
+ grpc.StatusCode.UNIMPLEMENTED:
+ (face.Abortion.Kind.LOCAL_FAILURE, face.LocalError),
+}
+
+
+def _effective_metadata(metadata, metadata_transformer):
+ non_none_metadata = () if metadata is None else metadata
+ if metadata_transformer is None:
+ return non_none_metadata
+ else:
+ return metadata_transformer(non_none_metadata)
+
+
+def _credentials(grpc_call_options):
+ return None if grpc_call_options is None else grpc_call_options.credentials
+
+
+def _abortion(rpc_error_call):
+ code = rpc_error_call.code()
+ pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
+ error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0]
+ return face.Abortion(error_kind, rpc_error_call.initial_metadata(),
+ rpc_error_call.trailing_metadata(), code,
+ rpc_error_call.details())
+
+
+def _abortion_error(rpc_error_call):
+ code = rpc_error_call.code()
+ pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
+ exception_class = face.AbortionError if pair is None else pair[1]
+ return exception_class(rpc_error_call.initial_metadata(),
+ rpc_error_call.trailing_metadata(), code,
+ rpc_error_call.details())
+
+
+class _InvocationProtocolContext(interfaces.GRPCInvocationContext):
+
+ def disable_next_request_compression(self):
+ pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
+
+
+class _Rendezvous(future.Future, face.Call):
+
+ def __init__(self, response_future, response_iterator, call):
+ self._future = response_future
+ self._iterator = response_iterator
+ self._call = call
+
+ def cancel(self):
+ return self._call.cancel()
+
+ def cancelled(self):
+ return self._future.cancelled()
+
+ def running(self):
+ return self._future.running()
+
+ def done(self):
+ return self._future.done()
+
+ def result(self, timeout=None):
+ try:
+ return self._future.result(timeout=timeout)
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+ except grpc.FutureTimeoutError:
+ raise future.TimeoutError()
+ except grpc.FutureCancelledError:
+ raise future.CancelledError()
+
+ def exception(self, timeout=None):
+ try:
+ rpc_error_call = self._future.exception(timeout=timeout)
+ if rpc_error_call is None:
+ return None
+ else:
+ return _abortion_error(rpc_error_call)
+ except grpc.FutureTimeoutError:
+ raise future.TimeoutError()
+ except grpc.FutureCancelledError:
+ raise future.CancelledError()
+
+ def traceback(self, timeout=None):
+ try:
+ return self._future.traceback(timeout=timeout)
+ except grpc.FutureTimeoutError:
+ raise future.TimeoutError()
+ except grpc.FutureCancelledError:
+ raise future.CancelledError()
+
+ def add_done_callback(self, fn):
+ self._future.add_done_callback(lambda ignored_callback: fn(self))
+
+ def __iter__(self):
+ return self
+
+ def _next(self):
+ try:
+ return next(self._iterator)
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+
+ def __next__(self):
+ return self._next()
+
+ def next(self):
+ return self._next()
+
+ def is_active(self):
+ return self._call.is_active()
+
+ def time_remaining(self):
+ return self._call.time_remaining()
+
+ def add_abortion_callback(self, abortion_callback):
+
+ def done_callback():
+ if self.code() is not grpc.StatusCode.OK:
+ abortion_callback(_abortion(self._call))
+
+ registered = self._call.add_callback(done_callback)
+ return None if registered else done_callback()
+
+ def protocol_context(self):
+ return _InvocationProtocolContext()
+
+ def initial_metadata(self):
+ return _metadata.beta(self._call.initial_metadata())
+
+ def terminal_metadata(self):
+ return _metadata.beta(self._call.terminal_metadata())
+
+ def code(self):
+ return self._call.code()
+
+ def details(self):
+ return self._call.details()
+
+
+def _blocking_unary_unary(channel, group, method, timeout, with_call,
+ protocol_options, metadata, metadata_transformer,
+ request, request_serializer, response_deserializer):
+ try:
+ multi_callable = channel.unary_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ if with_call:
+ response, call = multi_callable.with_call(
+ request,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ return response, _Rendezvous(None, None, call)
+ else:
+ return multi_callable(request,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+
+
+def _future_unary_unary(channel, group, method, timeout, protocol_options,
+ metadata, metadata_transformer, request,
+ request_serializer, response_deserializer):
+ multi_callable = channel.unary_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_future = multi_callable.future(
+ request,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(response_future, None, response_future)
+
+
+def _unary_stream(channel, group, method, timeout, protocol_options, metadata,
+ metadata_transformer, request, request_serializer,
+ response_deserializer):
+ multi_callable = channel.unary_stream(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_iterator = multi_callable(
+ request,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(None, response_iterator, response_iterator)
+
+
+def _blocking_stream_unary(channel, group, method, timeout, with_call,
+ protocol_options, metadata, metadata_transformer,
+ request_iterator, request_serializer,
+ response_deserializer):
+ try:
+ multi_callable = channel.stream_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ if with_call:
+ response, call = multi_callable.with_call(
+ request_iterator,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ return response, _Rendezvous(None, None, call)
+ else:
+ return multi_callable(request_iterator,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+
+
+def _future_stream_unary(channel, group, method, timeout, protocol_options,
+ metadata, metadata_transformer, request_iterator,
+ request_serializer, response_deserializer):
+ multi_callable = channel.stream_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_future = multi_callable.future(
+ request_iterator,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(response_future, None, response_future)
+
+
+def _stream_stream(channel, group, method, timeout, protocol_options, metadata,
+ metadata_transformer, request_iterator, request_serializer,
+ response_deserializer):
+ multi_callable = channel.stream_stream(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_iterator = multi_callable(
+ request_iterator,
+ timeout=timeout,
+ metadata=_metadata.unbeta(effective_metadata),
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(None, response_iterator, response_iterator)
+
+
+class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ return _blocking_unary_unary(self._channel, self._group, self._method,
+ timeout, with_call, protocol_options,
+ metadata, self._metadata_transformer,
+ request, self._request_serializer,
+ self._response_deserializer)
+
+ def future(self, request, timeout, metadata=None, protocol_options=None):
+ return _future_unary_unary(self._channel, self._group, self._method,
+ timeout, protocol_options, metadata,
+ self._metadata_transformer, request,
+ self._request_serializer,
+ self._response_deserializer)
+
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self, request, timeout, metadata=None, protocol_options=None):
+ return _unary_stream(self._channel, self._group, self._method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request,
+ self._request_serializer,
+ self._response_deserializer)
+
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ return _blocking_stream_unary(self._channel, self._group, self._method,
+ timeout, with_call, protocol_options,
+ metadata, self._metadata_transformer,
+ request_iterator,
+ self._request_serializer,
+ self._response_deserializer)
+
+ def future(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ return _future_stream_unary(self._channel, self._group, self._method,
+ timeout, protocol_options, metadata,
+ self._metadata_transformer,
+ request_iterator, self._request_serializer,
+ self._response_deserializer)
+
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _StreamStreamMultiCallable(face.StreamStreamMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ return _stream_stream(self._channel, self._group, self._method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request_iterator,
+ self._request_serializer,
+ self._response_deserializer)
+
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _GenericStub(face.GenericStub):
+
+ def __init__(self, channel, metadata_transformer, request_serializers,
+ response_deserializers):
+ self._channel = channel
+ self._metadata_transformer = metadata_transformer
+ self._request_serializers = request_serializers or {}
+ self._response_deserializers = response_deserializers or {}
+
+ def blocking_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ with_call=None,
+ protocol_options=None):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _blocking_unary_unary(self._channel, group, method, timeout,
+ with_call, protocol_options, metadata,
+ self._metadata_transformer, request,
+ request_serializer, response_deserializer)
+
+ def future_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _future_unary_unary(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request,
+ request_serializer, response_deserializer)
+
+ def inline_unary_stream(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _unary_stream(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request,
+ request_serializer, response_deserializer)
+
+ def blocking_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=None,
+ protocol_options=None):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _blocking_stream_unary(self._channel, group, method, timeout,
+ with_call, protocol_options, metadata,
+ self._metadata_transformer,
+ request_iterator, request_serializer,
+ response_deserializer)
+
+ def future_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _future_stream_unary(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer,
+ request_iterator, request_serializer,
+ response_deserializer)
+
+ def inline_stream_stream(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _stream_stream(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request_iterator,
+ request_serializer, response_deserializer)
+
+ def event_unary_unary(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def event_unary_stream(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def event_stream_unary(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def event_stream_stream(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def unary_unary(self, group, method):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _UnaryUnaryMultiCallable(self._channel, group, method,
+ self._metadata_transformer,
+ request_serializer,
+ response_deserializer)
+
+ def unary_stream(self, group, method):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _UnaryStreamMultiCallable(self._channel, group, method,
+ self._metadata_transformer,
+ request_serializer,
+ response_deserializer)
+
+ def stream_unary(self, group, method):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _StreamUnaryMultiCallable(self._channel, group, method,
+ self._metadata_transformer,
+ request_serializer,
+ response_deserializer)
+
+ def stream_stream(self, group, method):
+ request_serializer = self._request_serializers.get((
+ group,
+ method,
+ ))
+ response_deserializer = self._response_deserializers.get((
+ group,
+ method,
+ ))
+ return _StreamStreamMultiCallable(self._channel, group, method,
+ self._metadata_transformer,
+ request_serializer,
+ response_deserializer)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+
+class _DynamicStub(face.DynamicStub):
+
+ def __init__(self, backing_generic_stub, group, cardinalities):
+ self._generic_stub = backing_generic_stub
+ self._group = group
+ self._cardinalities = cardinalities
+
+ def __getattr__(self, attr):
+ method_cardinality = self._cardinalities.get(attr)
+ if method_cardinality is cardinality.Cardinality.UNARY_UNARY:
+ return self._generic_stub.unary_unary(self._group, attr)
+ elif method_cardinality is cardinality.Cardinality.UNARY_STREAM:
+ return self._generic_stub.unary_stream(self._group, attr)
+ elif method_cardinality is cardinality.Cardinality.STREAM_UNARY:
+ return self._generic_stub.stream_unary(self._group, attr)
+ elif method_cardinality is cardinality.Cardinality.STREAM_STREAM:
+ return self._generic_stub.stream_stream(self._group, attr)
+ else:
+ raise AttributeError('_DynamicStub object has no attribute "%s"!' %
+ attr)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+
+def generic_stub(channel, host, metadata_transformer, request_serializers,
+ response_deserializers):
+ return _GenericStub(channel, metadata_transformer, request_serializers,
+ response_deserializers)
+
+
+def dynamic_stub(channel, service, cardinalities, host, metadata_transformer,
+ request_serializers, response_deserializers):
+ return _DynamicStub(
+ _GenericStub(channel, metadata_transformer, request_serializers,
+ response_deserializers), service, cardinalities)
diff --git a/venv/Lib/site-packages/grpc/beta/_metadata.py b/venv/Lib/site-packages/grpc/beta/_metadata.py
new file mode 100644
index 000000000..b7c853528
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/_metadata.py
@@ -0,0 +1,52 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""API metadata conversion utilities."""
+
+import collections
+
+_Metadatum = collections.namedtuple('_Metadatum', (
+ 'key',
+ 'value',
+))
+
+
+def _beta_metadatum(key, value):
+ beta_key = key if isinstance(key, (bytes,)) else key.encode('ascii')
+ beta_value = value if isinstance(value, (bytes,)) else value.encode('ascii')
+ return _Metadatum(beta_key, beta_value)
+
+
+def _metadatum(beta_key, beta_value):
+ key = beta_key if isinstance(beta_key, (str,)) else beta_key.decode('utf8')
+ if isinstance(beta_value, (str,)) or key[-4:] == '-bin':
+ value = beta_value
+ else:
+ value = beta_value.decode('utf8')
+ return _Metadatum(key, value)
+
+
+def beta(metadata):
+ if metadata is None:
+ return ()
+ else:
+ return tuple(_beta_metadatum(key, value) for key, value in metadata)
+
+
+def unbeta(beta_metadata):
+ if beta_metadata is None:
+ return ()
+ else:
+ return tuple(
+ _metadatum(beta_key, beta_value)
+ for beta_key, beta_value in beta_metadata)
diff --git a/venv/Lib/site-packages/grpc/beta/_server_adaptations.py b/venv/Lib/site-packages/grpc/beta/_server_adaptations.py
new file mode 100644
index 000000000..8843a3c55
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/_server_adaptations.py
@@ -0,0 +1,385 @@
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
+
+import collections
+import threading
+
+import grpc
+from grpc import _common
+from grpc.beta import _metadata
+from grpc.beta import interfaces
+from grpc.framework.common import cardinality
+from grpc.framework.common import style
+from grpc.framework.foundation import abandonment
+from grpc.framework.foundation import logging_pool
+from grpc.framework.foundation import stream
+from grpc.framework.interfaces.face import face
+
+# pylint: disable=too-many-return-statements
+
+_DEFAULT_POOL_SIZE = 8
+
+
+class _ServerProtocolContext(interfaces.GRPCServicerContext):
+
+ def __init__(self, servicer_context):
+ self._servicer_context = servicer_context
+
+ def peer(self):
+ return self._servicer_context.peer()
+
+ def disable_next_response_compression(self):
+ pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
+
+
+class _FaceServicerContext(face.ServicerContext):
+
+ def __init__(self, servicer_context):
+ self._servicer_context = servicer_context
+
+ def is_active(self):
+ return self._servicer_context.is_active()
+
+ def time_remaining(self):
+ return self._servicer_context.time_remaining()
+
+ def add_abortion_callback(self, abortion_callback):
+ raise NotImplementedError(
+ 'add_abortion_callback no longer supported server-side!')
+
+ def cancel(self):
+ self._servicer_context.cancel()
+
+ def protocol_context(self):
+ return _ServerProtocolContext(self._servicer_context)
+
+ def invocation_metadata(self):
+ return _metadata.beta(self._servicer_context.invocation_metadata())
+
+ def initial_metadata(self, initial_metadata):
+ self._servicer_context.send_initial_metadata(
+ _metadata.unbeta(initial_metadata))
+
+ def terminal_metadata(self, terminal_metadata):
+ self._servicer_context.set_terminal_metadata(
+ _metadata.unbeta(terminal_metadata))
+
+ def code(self, code):
+ self._servicer_context.set_code(code)
+
+ def details(self, details):
+ self._servicer_context.set_details(details)
+
+
+def _adapt_unary_request_inline(unary_request_inline):
+
+ def adaptation(request, servicer_context):
+ return unary_request_inline(request,
+ _FaceServicerContext(servicer_context))
+
+ return adaptation
+
+
+def _adapt_stream_request_inline(stream_request_inline):
+
+ def adaptation(request_iterator, servicer_context):
+ return stream_request_inline(request_iterator,
+ _FaceServicerContext(servicer_context))
+
+ return adaptation
+
+
+class _Callback(stream.Consumer):
+
+ def __init__(self):
+ self._condition = threading.Condition()
+ self._values = []
+ self._terminated = False
+ self._cancelled = False
+
+ def consume(self, value):
+ with self._condition:
+ self._values.append(value)
+ self._condition.notify_all()
+
+ def terminate(self):
+ with self._condition:
+ self._terminated = True
+ self._condition.notify_all()
+
+ def consume_and_terminate(self, value):
+ with self._condition:
+ self._values.append(value)
+ self._terminated = True
+ self._condition.notify_all()
+
+ def cancel(self):
+ with self._condition:
+ self._cancelled = True
+ self._condition.notify_all()
+
+ def draw_one_value(self):
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise abandonment.Abandoned()
+ elif self._values:
+ return self._values.pop(0)
+ elif self._terminated:
+ return None
+ else:
+ self._condition.wait()
+
+ def draw_all_values(self):
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise abandonment.Abandoned()
+ elif self._terminated:
+ all_values = tuple(self._values)
+ self._values = None
+ return all_values
+ else:
+ self._condition.wait()
+
+
+def _run_request_pipe_thread(request_iterator, request_consumer,
+ servicer_context):
+ thread_joined = threading.Event()
+
+ def pipe_requests():
+ for request in request_iterator:
+ if not servicer_context.is_active() or thread_joined.is_set():
+ return
+ request_consumer.consume(request)
+ if not servicer_context.is_active() or thread_joined.is_set():
+ return
+ request_consumer.terminate()
+
+ request_pipe_thread = threading.Thread(target=pipe_requests)
+ request_pipe_thread.daemon = True
+ request_pipe_thread.start()
+
+
+def _adapt_unary_unary_event(unary_unary_event):
+
+ def adaptation(request, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ unary_unary_event(request, callback.consume_and_terminate,
+ _FaceServicerContext(servicer_context))
+ return callback.draw_all_values()[0]
+
+ return adaptation
+
+
+def _adapt_unary_stream_event(unary_stream_event):
+
+ def adaptation(request, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ unary_stream_event(request, callback,
+ _FaceServicerContext(servicer_context))
+ while True:
+ response = callback.draw_one_value()
+ if response is None:
+ return
+ else:
+ yield response
+
+ return adaptation
+
+
+def _adapt_stream_unary_event(stream_unary_event):
+
+ def adaptation(request_iterator, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ request_consumer = stream_unary_event(
+ callback.consume_and_terminate,
+ _FaceServicerContext(servicer_context))
+ _run_request_pipe_thread(request_iterator, request_consumer,
+ servicer_context)
+ return callback.draw_all_values()[0]
+
+ return adaptation
+
+
+def _adapt_stream_stream_event(stream_stream_event):
+
+ def adaptation(request_iterator, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ request_consumer = stream_stream_event(
+ callback, _FaceServicerContext(servicer_context))
+ _run_request_pipe_thread(request_iterator, request_consumer,
+ servicer_context)
+ while True:
+ response = callback.draw_one_value()
+ if response is None:
+ return
+ else:
+ yield response
+
+ return adaptation
+
+
+class _SimpleMethodHandler(
+ collections.namedtuple('_MethodHandler', (
+ 'request_streaming',
+ 'response_streaming',
+ 'request_deserializer',
+ 'response_serializer',
+ 'unary_unary',
+ 'unary_stream',
+ 'stream_unary',
+ 'stream_stream',
+ )), grpc.RpcMethodHandler):
+ pass
+
+
+def _simple_method_handler(implementation, request_deserializer,
+ response_serializer):
+ if implementation.style is style.Service.INLINE:
+ if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
+ return _SimpleMethodHandler(
+ False, False, request_deserializer, response_serializer,
+ _adapt_unary_request_inline(implementation.unary_unary_inline),
+ None, None, None)
+ elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
+ return _SimpleMethodHandler(
+ False, True, request_deserializer, response_serializer, None,
+ _adapt_unary_request_inline(implementation.unary_stream_inline),
+ None, None)
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
+ return _SimpleMethodHandler(
+ True, False, request_deserializer, response_serializer, None,
+ None,
+ _adapt_stream_request_inline(
+ implementation.stream_unary_inline), None)
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
+ return _SimpleMethodHandler(
+ True, True, request_deserializer, response_serializer, None,
+ None, None,
+ _adapt_stream_request_inline(
+ implementation.stream_stream_inline))
+ elif implementation.style is style.Service.EVENT:
+ if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
+ return _SimpleMethodHandler(
+ False, False, request_deserializer, response_serializer,
+ _adapt_unary_unary_event(implementation.unary_unary_event),
+ None, None, None)
+ elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
+ return _SimpleMethodHandler(
+ False, True, request_deserializer, response_serializer, None,
+ _adapt_unary_stream_event(implementation.unary_stream_event),
+ None, None)
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
+ return _SimpleMethodHandler(
+ True, False, request_deserializer, response_serializer, None,
+ None,
+ _adapt_stream_unary_event(implementation.stream_unary_event),
+ None)
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
+ return _SimpleMethodHandler(
+ True, True, request_deserializer, response_serializer, None,
+ None, None,
+ _adapt_stream_stream_event(implementation.stream_stream_event))
+ raise ValueError()
+
+
+def _flatten_method_pair_map(method_pair_map):
+ method_pair_map = method_pair_map or {}
+ flat_map = {}
+ for method_pair in method_pair_map:
+ method = _common.fully_qualified_method(method_pair[0], method_pair[1])
+ flat_map[method] = method_pair_map[method_pair]
+ return flat_map
+
+
+class _GenericRpcHandler(grpc.GenericRpcHandler):
+
+ def __init__(self, method_implementations, multi_method_implementation,
+ request_deserializers, response_serializers):
+ self._method_implementations = _flatten_method_pair_map(
+ method_implementations)
+ self._request_deserializers = _flatten_method_pair_map(
+ request_deserializers)
+ self._response_serializers = _flatten_method_pair_map(
+ response_serializers)
+ self._multi_method_implementation = multi_method_implementation
+
+ def service(self, handler_call_details):
+ method_implementation = self._method_implementations.get(
+ handler_call_details.method)
+ if method_implementation is not None:
+ return _simple_method_handler(
+ method_implementation,
+ self._request_deserializers.get(handler_call_details.method),
+ self._response_serializers.get(handler_call_details.method))
+ elif self._multi_method_implementation is None:
+ return None
+ else:
+ try:
+ return None #TODO(nathaniel): call the multimethod.
+ except face.NoSuchMethodError:
+ return None
+
+
+class _Server(interfaces.Server):
+
+ def __init__(self, grpc_server):
+ self._grpc_server = grpc_server
+
+ def add_insecure_port(self, address):
+ return self._grpc_server.add_insecure_port(address)
+
+ def add_secure_port(self, address, server_credentials):
+ return self._grpc_server.add_secure_port(address, server_credentials)
+
+ def start(self):
+ self._grpc_server.start()
+
+ def stop(self, grace):
+ return self._grpc_server.stop(grace)
+
+ def __enter__(self):
+ self._grpc_server.start()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._grpc_server.stop(None)
+ return False
+
+
+def server(service_implementations, multi_method_implementation,
+ request_deserializers, response_serializers, thread_pool,
+ thread_pool_size):
+ generic_rpc_handler = _GenericRpcHandler(service_implementations,
+ multi_method_implementation,
+ request_deserializers,
+ response_serializers)
+ if thread_pool is None:
+ effective_thread_pool = logging_pool.pool(
+ _DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size
+ )
+ else:
+ effective_thread_pool = thread_pool
+ return _Server(
+ grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,)))
diff --git a/venv/Lib/site-packages/grpc/beta/implementations.py b/venv/Lib/site-packages/grpc/beta/implementations.py
new file mode 100644
index 000000000..c5507b543
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/implementations.py
@@ -0,0 +1,310 @@
+# Copyright 2015-2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Entry points into the Beta API of gRPC Python."""
+
+# threading is referenced from specification in this module.
+import threading # pylint: disable=unused-import
+
+# interfaces, cardinality, and face are referenced from specification in this
+# module.
+import grpc
+from grpc import _auth
+from grpc.beta import _client_adaptations
+from grpc.beta import _metadata
+from grpc.beta import _server_adaptations
+from grpc.beta import interfaces # pylint: disable=unused-import
+from grpc.framework.common import cardinality # pylint: disable=unused-import
+from grpc.framework.interfaces.face import face # pylint: disable=unused-import
+
+# pylint: disable=too-many-arguments
+
+ChannelCredentials = grpc.ChannelCredentials
+ssl_channel_credentials = grpc.ssl_channel_credentials
+CallCredentials = grpc.CallCredentials
+
+
+def metadata_call_credentials(metadata_plugin, name=None):
+
+ def plugin(context, callback):
+
+ def wrapped_callback(beta_metadata, error):
+ callback(_metadata.unbeta(beta_metadata), error)
+
+ metadata_plugin(context, wrapped_callback)
+
+ return grpc.metadata_call_credentials(plugin, name=name)
+
+
+def google_call_credentials(credentials):
+ """Construct CallCredentials from GoogleCredentials.
+
+ Args:
+ credentials: A GoogleCredentials object from the oauth2client library.
+
+ Returns:
+ A CallCredentials object for use in a GRPCCallOptions object.
+ """
+ return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
+
+
+access_token_call_credentials = grpc.access_token_call_credentials
+composite_call_credentials = grpc.composite_call_credentials
+composite_channel_credentials = grpc.composite_channel_credentials
+
+
+class Channel(object):
+ """A channel to a remote host through which RPCs may be conducted.
+
+ Only the "subscribe" and "unsubscribe" methods are supported for application
+ use. This class' instance constructor and all other attributes are
+ unsupported.
+ """
+
+ def __init__(self, channel):
+ self._channel = channel
+
+ def subscribe(self, callback, try_to_connect=None):
+ """Subscribes to this Channel's connectivity.
+
+ Args:
+ callback: A callable to be invoked and passed an
+ interfaces.ChannelConnectivity identifying this Channel's connectivity.
+ The callable will be invoked immediately upon subscription and again for
+ every change to this Channel's connectivity thereafter until it is
+ unsubscribed.
+ try_to_connect: A boolean indicating whether or not this Channel should
+ attempt to connect if it is not already connected and ready to conduct
+ RPCs.
+ """
+ self._channel.subscribe(callback, try_to_connect=try_to_connect)
+
+ def unsubscribe(self, callback):
+ """Unsubscribes a callback from this Channel's connectivity.
+
+ Args:
+ callback: A callable previously registered with this Channel from having
+ been passed to its "subscribe" method.
+ """
+ self._channel.unsubscribe(callback)
+
+
+def insecure_channel(host, port):
+ """Creates an insecure Channel to a remote host.
+
+ Args:
+ host: The name of the remote host to which to connect.
+ port: The port of the remote host to which to connect.
+ If None only the 'host' part will be used.
+
+ Returns:
+ A Channel to the remote host through which RPCs may be conducted.
+ """
+ channel = grpc.insecure_channel(host if port is None else '%s:%d' %
+ (host, port))
+ return Channel(channel)
+
+
+def secure_channel(host, port, channel_credentials):
+ """Creates a secure Channel to a remote host.
+
+ Args:
+ host: The name of the remote host to which to connect.
+ port: The port of the remote host to which to connect.
+ If None only the 'host' part will be used.
+ channel_credentials: A ChannelCredentials.
+
+ Returns:
+ A secure Channel to the remote host through which RPCs may be conducted.
+ """
+ channel = grpc.secure_channel(
+ host if port is None else '%s:%d' % (host, port), channel_credentials)
+ return Channel(channel)
+
+
+class StubOptions(object):
+ """A value encapsulating the various options for creation of a Stub.
+
+ This class and its instances have no supported interface - it exists to define
+ the type of its instances and its instances exist to be passed to other
+ functions.
+ """
+
+ def __init__(self, host, request_serializers, response_deserializers,
+ metadata_transformer, thread_pool, thread_pool_size):
+ self.host = host
+ self.request_serializers = request_serializers
+ self.response_deserializers = response_deserializers
+ self.metadata_transformer = metadata_transformer
+ self.thread_pool = thread_pool
+ self.thread_pool_size = thread_pool_size
+
+
+_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
+
+
+def stub_options(host=None,
+ request_serializers=None,
+ response_deserializers=None,
+ metadata_transformer=None,
+ thread_pool=None,
+ thread_pool_size=None):
+ """Creates a StubOptions value to be passed at stub creation.
+
+ All parameters are optional and should always be passed by keyword.
+
+ Args:
+ host: A host string to set on RPC calls.
+ request_serializers: A dictionary from service name-method name pair to
+ request serialization behavior.
+ response_deserializers: A dictionary from service name-method name pair to
+ response deserialization behavior.
+ metadata_transformer: A callable that given a metadata object produces
+ another metadata object to be used in the underlying communication on the
+ wire.
+ thread_pool: A thread pool to use in stubs.
+ thread_pool_size: The size of thread pool to create for use in stubs;
+ ignored if thread_pool has been passed.
+
+ Returns:
+ A StubOptions value created from the passed parameters.
+ """
+ return StubOptions(host, request_serializers, response_deserializers,
+ metadata_transformer, thread_pool, thread_pool_size)
+
+
+def generic_stub(channel, options=None):
+ """Creates a face.GenericStub on which RPCs can be made.
+
+ Args:
+ channel: A Channel for use by the created stub.
+ options: A StubOptions customizing the created stub.
+
+ Returns:
+ A face.GenericStub on which RPCs can be made.
+ """
+ effective_options = _EMPTY_STUB_OPTIONS if options is None else options
+ return _client_adaptations.generic_stub(
+ channel._channel, # pylint: disable=protected-access
+ effective_options.host,
+ effective_options.metadata_transformer,
+ effective_options.request_serializers,
+ effective_options.response_deserializers)
+
+
+def dynamic_stub(channel, service, cardinalities, options=None):
+ """Creates a face.DynamicStub with which RPCs can be invoked.
+
+ Args:
+ channel: A Channel for the returned face.DynamicStub to use.
+ service: The package-qualified full name of the service.
+ cardinalities: A dictionary from RPC method name to cardinality.Cardinality
+ value identifying the cardinality of the RPC method.
+ options: An optional StubOptions value further customizing the functionality
+ of the returned face.DynamicStub.
+
+ Returns:
+ A face.DynamicStub with which RPCs can be invoked.
+ """
+ effective_options = _EMPTY_STUB_OPTIONS if options is None else options
+ return _client_adaptations.dynamic_stub(
+ channel._channel, # pylint: disable=protected-access
+ service,
+ cardinalities,
+ effective_options.host,
+ effective_options.metadata_transformer,
+ effective_options.request_serializers,
+ effective_options.response_deserializers)
+
+
+ServerCredentials = grpc.ServerCredentials
+ssl_server_credentials = grpc.ssl_server_credentials
+
+
+class ServerOptions(object):
+ """A value encapsulating the various options for creation of a Server.
+
+ This class and its instances have no supported interface - it exists to define
+ the type of its instances and its instances exist to be passed to other
+ functions.
+ """
+
+ def __init__(self, multi_method_implementation, request_deserializers,
+ response_serializers, thread_pool, thread_pool_size,
+ default_timeout, maximum_timeout):
+ self.multi_method_implementation = multi_method_implementation
+ self.request_deserializers = request_deserializers
+ self.response_serializers = response_serializers
+ self.thread_pool = thread_pool
+ self.thread_pool_size = thread_pool_size
+ self.default_timeout = default_timeout
+ self.maximum_timeout = maximum_timeout
+
+
+_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
+
+
+def server_options(multi_method_implementation=None,
+ request_deserializers=None,
+ response_serializers=None,
+ thread_pool=None,
+ thread_pool_size=None,
+ default_timeout=None,
+ maximum_timeout=None):
+ """Creates a ServerOptions value to be passed at server creation.
+
+ All parameters are optional and should always be passed by keyword.
+
+ Args:
+ multi_method_implementation: A face.MultiMethodImplementation to be called
+ to service an RPC if the server has no specific method implementation for
+ the name of the RPC for which service was requested.
+ request_deserializers: A dictionary from service name-method name pair to
+ request deserialization behavior.
+ response_serializers: A dictionary from service name-method name pair to
+ response serialization behavior.
+ thread_pool: A thread pool to use in stubs.
+ thread_pool_size: The size of thread pool to create for use in stubs;
+ ignored if thread_pool has been passed.
+ default_timeout: A duration in seconds to allow for RPC service when
+ servicing RPCs that did not include a timeout value when invoked.
+ maximum_timeout: A duration in seconds to allow for RPC service when
+ servicing RPCs no matter what timeout value was passed when the RPC was
+ invoked.
+
+ Returns:
+ A StubOptions value created from the passed parameters.
+ """
+ return ServerOptions(multi_method_implementation, request_deserializers,
+ response_serializers, thread_pool, thread_pool_size,
+ default_timeout, maximum_timeout)
+
+
+def server(service_implementations, options=None):
+ """Creates an interfaces.Server with which RPCs can be serviced.
+
+ Args:
+ service_implementations: A dictionary from service name-method name pair to
+ face.MethodImplementation.
+ options: An optional ServerOptions value further customizing the
+ functionality of the returned Server.
+
+ Returns:
+ An interfaces.Server with which RPCs can be serviced.
+ """
+ effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
+ return _server_adaptations.server(
+ service_implementations, effective_options.multi_method_implementation,
+ effective_options.request_deserializers,
+ effective_options.response_serializers, effective_options.thread_pool,
+ effective_options.thread_pool_size)
diff --git a/venv/Lib/site-packages/grpc/beta/interfaces.py b/venv/Lib/site-packages/grpc/beta/interfaces.py
new file mode 100644
index 000000000..fb8266c74
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/interfaces.py
@@ -0,0 +1,165 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Constants and interfaces of the Beta API of gRPC Python."""
+
+import abc
+
+import six
+
+import grpc
+
+ChannelConnectivity = grpc.ChannelConnectivity
+# FATAL_FAILURE was a Beta-API name for SHUTDOWN
+ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
+
+StatusCode = grpc.StatusCode
+
+
+class GRPCCallOptions(object):
+ """A value encapsulating gRPC-specific options passed on RPC invocation.
+
+ This class and its instances have no supported interface - it exists to
+ define the type of its instances and its instances exist to be passed to
+ other functions.
+ """
+
+ def __init__(self, disable_compression, subcall_of, credentials):
+ self.disable_compression = disable_compression
+ self.subcall_of = subcall_of
+ self.credentials = credentials
+
+
+def grpc_call_options(disable_compression=False, credentials=None):
+ """Creates a GRPCCallOptions value to be passed at RPC invocation.
+
+ All parameters are optional and should always be passed by keyword.
+
+ Args:
+ disable_compression: A boolean indicating whether or not compression should
+ be disabled for the request object of the RPC. Only valid for
+ request-unary RPCs.
+ credentials: A CallCredentials object to use for the invoked RPC.
+ """
+ return GRPCCallOptions(disable_compression, None, credentials)
+
+
+GRPCAuthMetadataContext = grpc.AuthMetadataContext
+GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
+GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
+
+
+class GRPCServicerContext(six.with_metaclass(abc.ABCMeta)):
+ """Exposes gRPC-specific options and behaviors to code servicing RPCs."""
+
+ @abc.abstractmethod
+ def peer(self):
+ """Identifies the peer that invoked the RPC being serviced.
+
+ Returns:
+ A string identifying the peer that invoked the RPC being serviced.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def disable_next_response_compression(self):
+ """Disables compression of the next response passed by the application."""
+ raise NotImplementedError()
+
+
+class GRPCInvocationContext(six.with_metaclass(abc.ABCMeta)):
+ """Exposes gRPC-specific options and behaviors to code invoking RPCs."""
+
+ @abc.abstractmethod
+ def disable_next_request_compression(self):
+ """Disables compression of the next request passed by the application."""
+ raise NotImplementedError()
+
+
+class Server(six.with_metaclass(abc.ABCMeta)):
+ """Services RPCs."""
+
+ @abc.abstractmethod
+ def add_insecure_port(self, address):
+ """Reserves a port for insecure RPC service once this Server becomes active.
+
+ This method may only be called before calling this Server's start method is
+ called.
+
+ Args:
+ address: The address for which to open a port.
+
+ Returns:
+ An integer port on which RPCs will be serviced after this link has been
+ started. This is typically the same number as the port number contained
+ in the passed address, but will likely be different if the port number
+ contained in the passed address was zero.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_secure_port(self, address, server_credentials):
+ """Reserves a port for secure RPC service after this Server becomes active.
+
+ This method may only be called before calling this Server's start method is
+ called.
+
+ Args:
+ address: The address for which to open a port.
+ server_credentials: A ServerCredentials.
+
+ Returns:
+ An integer port on which RPCs will be serviced after this link has been
+ started. This is typically the same number as the port number contained
+ in the passed address, but will likely be different if the port number
+ contained in the passed address was zero.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def start(self):
+ """Starts this Server's service of RPCs.
+
+ This method may only be called while the server is not serving RPCs (i.e. it
+ is not idempotent).
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stop(self, grace):
+ """Stops this Server's service of RPCs.
+
+ All calls to this method immediately stop service of new RPCs. When existing
+ RPCs are aborted is controlled by the grace period parameter passed to this
+ method.
+
+ This method may be called at any time and is idempotent. Passing a smaller
+ grace value than has been passed in a previous call will have the effect of
+ stopping the Server sooner. Passing a larger grace value than has been
+ passed in a previous call will not have the effect of stopping the server
+ later.
+
+ Args:
+ grace: A duration of time in seconds to allow existing RPCs to complete
+ before being aborted by this Server's stopping. May be zero for
+ immediate abortion of all in-progress RPCs.
+
+ Returns:
+ A threading.Event that will be set when this Server has completely
+ stopped. The returned event may not be set until after the full grace
+ period (if some ongoing RPC continues for the full length of the period)
+ of it may be set much sooner (such as if this Server had no RPCs underway
+ at the time it was stopped or if all RPCs that it had underway completed
+ very early in the grace period).
+ """
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/grpc/beta/utilities.py b/venv/Lib/site-packages/grpc/beta/utilities.py
new file mode 100644
index 000000000..fe3ce606c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/beta/utilities.py
@@ -0,0 +1,149 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for the gRPC Python Beta API."""
+
+import threading
+import time
+
+# implementations is referenced from specification in this module.
+from grpc.beta import implementations # pylint: disable=unused-import
+from grpc.beta import interfaces
+from grpc.framework.foundation import callable_util
+from grpc.framework.foundation import future
+
+_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
+ 'Exception calling connectivity future "done" callback!')
+
+
+class _ChannelReadyFuture(future.Future):
+
+ def __init__(self, channel):
+ self._condition = threading.Condition()
+ self._channel = channel
+
+ self._matured = False
+ self._cancelled = False
+ self._done_callbacks = []
+
+ def _block(self, timeout):
+ until = None if timeout is None else time.time() + timeout
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise future.CancelledError()
+ elif self._matured:
+ return
+ else:
+ if until is None:
+ self._condition.wait()
+ else:
+ remaining = until - time.time()
+ if remaining < 0:
+ raise future.TimeoutError()
+ else:
+ self._condition.wait(timeout=remaining)
+
+ def _update(self, connectivity):
+ with self._condition:
+ if (not self._cancelled and
+ connectivity is interfaces.ChannelConnectivity.READY):
+ self._matured = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return
+
+ for done_callback in done_callbacks:
+ callable_util.call_logging_exceptions(
+ done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
+
+ def cancel(self):
+ with self._condition:
+ if not self._matured:
+ self._cancelled = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return False
+
+ for done_callback in done_callbacks:
+ callable_util.call_logging_exceptions(
+ done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
+
+ return True
+
+ def cancelled(self):
+ with self._condition:
+ return self._cancelled
+
+ def running(self):
+ with self._condition:
+ return not self._cancelled and not self._matured
+
+ def done(self):
+ with self._condition:
+ return self._cancelled or self._matured
+
+ def result(self, timeout=None):
+ self._block(timeout)
+ return None
+
+ def exception(self, timeout=None):
+ self._block(timeout)
+ return None
+
+ def traceback(self, timeout=None):
+ self._block(timeout)
+ return None
+
+ def add_done_callback(self, fn):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._done_callbacks.append(fn)
+ return
+
+ fn(self)
+
+ def start(self):
+ with self._condition:
+ self._channel.subscribe(self._update, try_to_connect=True)
+
+ def __del__(self):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._channel.unsubscribe(self._update)
+
+
+def channel_ready_future(channel):
+ """Creates a future.Future tracking when an implementations.Channel is ready.
+
+ Cancelling the returned future.Future does not tell the given
+ implementations.Channel to abandon attempts it may have been making to
+ connect; cancelling merely deactivates the return future.Future's
+ subscription to the given implementations.Channel's connectivity.
+
+ Args:
+ channel: An implementations.Channel.
+
+ Returns:
+ A future.Future that matures when the given Channel has connectivity
+ interfaces.ChannelConnectivity.READY.
+ """
+ ready_future = _ChannelReadyFuture(channel)
+ ready_future.start()
+ return ready_future
diff --git a/venv/Lib/site-packages/grpc/experimental/__init__.py b/venv/Lib/site-packages/grpc/experimental/__init__.py
new file mode 100644
index 000000000..a4e2660fb
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/experimental/__init__.py
@@ -0,0 +1,127 @@
+# Copyright 2018 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""gRPC's experimental APIs.
+
+These APIs are subject to be removed during any minor version release.
+"""
+
+import copy
+import functools
+import sys
+import warnings
+
+import grpc
+
+_EXPERIMENTAL_APIS_USED = set()
+
+
+class ChannelOptions(object):
+ """Indicates a channel option unique to gRPC Python.
+
+ This enumeration is part of an EXPERIMENTAL API.
+
+ Attributes:
+ SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread.
+ """
+ SingleThreadedUnaryStream = "SingleThreadedUnaryStream"
+
+
+class UsageError(Exception):
+ """Raised by the gRPC library to indicate usage not allowed by the API."""
+
+
+_insecure_channel_credentials_sentinel = object()
+_insecure_channel_credentials = grpc.ChannelCredentials(
+ _insecure_channel_credentials_sentinel)
+
+
+def insecure_channel_credentials():
+ """Creates a ChannelCredentials for use with an insecure channel.
+
+ THIS IS AN EXPERIMENTAL API.
+
+ This is not for use with secure_channel function. Intead, this should be
+ used with grpc.unary_unary, grpc.unary_stream, grpc.stream_unary, or
+ grpc.stream_stream.
+ """
+ return _insecure_channel_credentials
+
+
+class ExperimentalApiWarning(Warning):
+ """A warning that an API is experimental."""
+
+
+def _warn_experimental(api_name, stack_offset):
+ if api_name not in _EXPERIMENTAL_APIS_USED:
+ _EXPERIMENTAL_APIS_USED.add(api_name)
+ msg = ("'{}' is an experimental API. It is subject to change or ".
+ format(api_name) +
+ "removal between minor releases. Proceed with caution.")
+ warnings.warn(msg, ExperimentalApiWarning, stacklevel=2 + stack_offset)
+
+
+def experimental_api(f):
+
+ @functools.wraps(f)
+ def _wrapper(*args, **kwargs):
+ _warn_experimental(f.__name__, 1)
+ return f(*args, **kwargs)
+
+ return _wrapper
+
+
+def wrap_server_method_handler(wrapper, handler):
+ """Wraps the server method handler function.
+
+ The server implementation requires all server handlers being wrapped as
+ RpcMethodHandler objects. This helper function ease the pain of writing
+ server handler wrappers.
+
+ Args:
+ wrapper: A wrapper function that takes in a method handler behavior
+ (the actual function) and returns a wrapped function.
+ handler: A RpcMethodHandler object to be wrapped.
+
+ Returns:
+ A newly created RpcMethodHandler.
+ """
+ if not handler:
+ return None
+
+ if not handler.request_streaming:
+ if not handler.response_streaming:
+ # NOTE(lidiz) _replace is a public API:
+ # https://docs.python.org/dev/library/collections.html
+ return handler._replace(unary_unary=wrapper(handler.unary_unary))
+ else:
+ return handler._replace(unary_stream=wrapper(handler.unary_stream))
+ else:
+ if not handler.response_streaming:
+ return handler._replace(stream_unary=wrapper(handler.stream_unary))
+ else:
+ return handler._replace(
+ stream_stream=wrapper(handler.stream_stream))
+
+
+__all__ = (
+ 'ChannelOptions',
+ 'ExperimentalApiWarning',
+ 'UsageError',
+ 'insecure_channel_credentials',
+ 'wrap_server_method_handler',
+)
+
+if sys.version_info > (3, 6):
+ from grpc._simple_stubs import unary_unary, unary_stream, stream_unary, stream_stream
+ __all__ = __all__ + (unary_unary, unary_stream, stream_unary, stream_stream)
diff --git a/venv/Lib/site-packages/grpc/experimental/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/experimental/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..91c414c92
Binary files /dev/null and b/venv/Lib/site-packages/grpc/experimental/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/experimental/__pycache__/gevent.cpython-36.pyc b/venv/Lib/site-packages/grpc/experimental/__pycache__/gevent.cpython-36.pyc
new file mode 100644
index 000000000..641e70792
Binary files /dev/null and b/venv/Lib/site-packages/grpc/experimental/__pycache__/gevent.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/experimental/__pycache__/session_cache.cpython-36.pyc b/venv/Lib/site-packages/grpc/experimental/__pycache__/session_cache.cpython-36.pyc
new file mode 100644
index 000000000..56ac959f4
Binary files /dev/null and b/venv/Lib/site-packages/grpc/experimental/__pycache__/session_cache.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/experimental/aio/__init__.py b/venv/Lib/site-packages/grpc/experimental/aio/__init__.py
new file mode 100644
index 000000000..576cb8dcd
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/experimental/aio/__init__.py
@@ -0,0 +1,16 @@
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Alias of grpc.aio to keep backward compatibility."""
+
+from grpc.aio import *
diff --git a/venv/Lib/site-packages/grpc/experimental/aio/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/experimental/aio/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..3a8a0027a
Binary files /dev/null and b/venv/Lib/site-packages/grpc/experimental/aio/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/experimental/gevent.py b/venv/Lib/site-packages/grpc/experimental/gevent.py
new file mode 100644
index 000000000..159d612b4
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/experimental/gevent.py
@@ -0,0 +1,27 @@
+# Copyright 2018 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""gRPC's Python gEvent APIs."""
+
+from grpc._cython import cygrpc as _cygrpc
+
+
+def init_gevent():
+ """Patches gRPC's libraries to be compatible with gevent.
+
+ This must be called AFTER the python standard lib has been patched,
+ but BEFORE creating and gRPC objects.
+
+ In order for progress to be made, the application must drive the event loop.
+ """
+ _cygrpc.init_grpc_gevent()
diff --git a/venv/Lib/site-packages/grpc/experimental/session_cache.py b/venv/Lib/site-packages/grpc/experimental/session_cache.py
new file mode 100644
index 000000000..5c55f7c32
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/experimental/session_cache.py
@@ -0,0 +1,45 @@
+# Copyright 2018 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""gRPC's APIs for TLS Session Resumption support"""
+
+from grpc._cython import cygrpc as _cygrpc
+
+
+def ssl_session_cache_lru(capacity):
+ """Creates an SSLSessionCache with LRU replacement policy
+
+ Args:
+ capacity: Size of the cache
+
+ Returns:
+ An SSLSessionCache with LRU replacement policy that can be passed as a value for
+ the grpc.ssl_session_cache option to a grpc.Channel. SSL session caches are used
+ to store session tickets, which clients can present to resume previous TLS sessions
+ with a server.
+ """
+ return SSLSessionCache(_cygrpc.SSLSessionCacheLRU(capacity))
+
+
+class SSLSessionCache(object):
+ """An encapsulation of a session cache used for TLS session resumption.
+
+ Instances of this class can be passed to a Channel as values for the
+ grpc.ssl_session_cache option
+ """
+
+ def __init__(self, cache):
+ self._cache = cache
+
+ def __int__(self):
+ return int(self._cache)
diff --git a/venv/Lib/site-packages/grpc/framework/__init__.py b/venv/Lib/site-packages/grpc/framework/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/framework/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..ad5edeff7
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/common/__init__.py b/venv/Lib/site-packages/grpc/framework/common/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/common/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/framework/common/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/common/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..63d016d6d
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/common/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/common/__pycache__/cardinality.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/common/__pycache__/cardinality.cpython-36.pyc
new file mode 100644
index 000000000..67cec495e
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/common/__pycache__/cardinality.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/common/__pycache__/style.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/common/__pycache__/style.cpython-36.pyc
new file mode 100644
index 000000000..ca30be92f
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/common/__pycache__/style.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/common/cardinality.py b/venv/Lib/site-packages/grpc/framework/common/cardinality.py
new file mode 100644
index 000000000..c98735622
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/common/cardinality.py
@@ -0,0 +1,26 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Defines an enum for classifying RPC methods by streaming semantics."""
+
+import enum
+
+
+@enum.unique
+class Cardinality(enum.Enum):
+ """Describes the streaming semantics of an RPC method."""
+
+ UNARY_UNARY = 'request-unary/response-unary'
+ UNARY_STREAM = 'request-unary/response-streaming'
+ STREAM_UNARY = 'request-streaming/response-unary'
+ STREAM_STREAM = 'request-streaming/response-streaming'
diff --git a/venv/Lib/site-packages/grpc/framework/common/style.py b/venv/Lib/site-packages/grpc/framework/common/style.py
new file mode 100644
index 000000000..f6138d417
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/common/style.py
@@ -0,0 +1,24 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Defines an enum for classifying RPC methods by control flow semantics."""
+
+import enum
+
+
+@enum.unique
+class Service(enum.Enum):
+ """Describes the control flow style of RPC method implementation."""
+
+ INLINE = 'inline'
+ EVENT = 'event'
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__init__.py b/venv/Lib/site-packages/grpc/framework/foundation/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..f2c706426
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/abandonment.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/abandonment.cpython-36.pyc
new file mode 100644
index 000000000..3fb254d39
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/abandonment.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/callable_util.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/callable_util.cpython-36.pyc
new file mode 100644
index 000000000..84fcd387e
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/callable_util.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/future.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/future.cpython-36.pyc
new file mode 100644
index 000000000..0d5cf7227
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/future.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/logging_pool.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/logging_pool.cpython-36.pyc
new file mode 100644
index 000000000..020beff75
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/logging_pool.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/stream.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/stream.cpython-36.pyc
new file mode 100644
index 000000000..06d972ae7
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/stream.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/stream_util.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/stream_util.cpython-36.pyc
new file mode 100644
index 000000000..66ecbdbd5
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/foundation/__pycache__/stream_util.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/abandonment.py b/venv/Lib/site-packages/grpc/framework/foundation/abandonment.py
new file mode 100644
index 000000000..660ce991c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/abandonment.py
@@ -0,0 +1,22 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for indicating abandonment of computation."""
+
+
+class Abandoned(Exception):
+ """Indicates that some computation is being abandoned.
+
+ Abandoning a computation is different than returning a value or raising
+ an exception indicating some operational or programming defect.
+ """
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/callable_util.py b/venv/Lib/site-packages/grpc/framework/foundation/callable_util.py
new file mode 100644
index 000000000..24daf3406
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/callable_util.py
@@ -0,0 +1,96 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for working with callables."""
+
+import abc
+import collections
+import enum
+import functools
+import logging
+
+import six
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class Outcome(six.with_metaclass(abc.ABCMeta)):
+ """A sum type describing the outcome of some call.
+
+ Attributes:
+ kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
+ call returned a value or raised an exception.
+ return_value: The value returned by the call. Must be present if kind is
+ Kind.RETURNED.
+ exception: The exception raised by the call. Must be present if kind is
+ Kind.RAISED.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+ """Identifies the general kind of the outcome of some call."""
+
+ RETURNED = object()
+ RAISED = object()
+
+
+class _EasyOutcome(
+ collections.namedtuple('_EasyOutcome',
+ ['kind', 'return_value', 'exception']), Outcome):
+ """A trivial implementation of Outcome."""
+
+
+def _call_logging_exceptions(behavior, message, *args, **kwargs):
+ try:
+ return _EasyOutcome(Outcome.Kind.RETURNED, behavior(*args, **kwargs),
+ None)
+ except Exception as e: # pylint: disable=broad-except
+ _LOGGER.exception(message)
+ return _EasyOutcome(Outcome.Kind.RAISED, None, e)
+
+
+def with_exceptions_logged(behavior, message):
+ """Wraps a callable in a try-except that logs any exceptions it raises.
+
+ Args:
+ behavior: Any callable.
+ message: A string to log if the behavior raises an exception.
+
+ Returns:
+ A callable that when executed invokes the given behavior. The returned
+ callable takes the same arguments as the given behavior but returns a
+ future.Outcome describing whether the given behavior returned a value or
+ raised an exception.
+ """
+
+ @functools.wraps(behavior)
+ def wrapped_behavior(*args, **kwargs):
+ return _call_logging_exceptions(behavior, message, *args, **kwargs)
+
+ return wrapped_behavior
+
+
+def call_logging_exceptions(behavior, message, *args, **kwargs):
+ """Calls a behavior in a try-except that logs any exceptions it raises.
+
+ Args:
+ behavior: Any callable.
+ message: A string to log if the behavior raises an exception.
+ *args: Positional arguments to pass to the given behavior.
+ **kwargs: Keyword arguments to pass to the given behavior.
+
+ Returns:
+ An Outcome describing whether the given behavior returned a value or raised
+ an exception.
+ """
+ return _call_logging_exceptions(behavior, message, *args, **kwargs)
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/future.py b/venv/Lib/site-packages/grpc/framework/foundation/future.py
new file mode 100644
index 000000000..d11679cc3
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/future.py
@@ -0,0 +1,221 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A Future interface.
+
+Python doesn't have a Future interface in its standard library. In the absence
+of such a standard, three separate, incompatible implementations
+(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
+interface attempts to be as compatible as possible with
+concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
+method.
+
+Unlike the concrete and implemented Future classes listed above, the Future
+class defined in this module is an entirely abstract interface that anyone may
+implement and use.
+
+The one known incompatibility between this interface and the interface of
+concurrent.futures.Future is that this interface defines its own CancelledError
+and TimeoutError exceptions rather than raising the implementation-private
+concurrent.futures._base.CancelledError and the
+built-in-but-only-in-3.3-and-later TimeoutError.
+"""
+
+import abc
+
+import six
+
+
+class TimeoutError(Exception):
+ """Indicates that a particular call timed out."""
+
+
+class CancelledError(Exception):
+ """Indicates that the computation underlying a Future was cancelled."""
+
+
+class Future(six.with_metaclass(abc.ABCMeta)):
+ """A representation of a computation in another control flow.
+
+ Computations represented by a Future may be yet to be begun, may be ongoing,
+ or may have already completed.
+ """
+
+ # NOTE(nathaniel): This isn't the return type that I would want to have if it
+ # were up to me. Were this interface being written from scratch, the return
+ # type of this method would probably be a sum type like:
+ #
+ # NOT_COMMENCED
+ # COMMENCED_AND_NOT_COMPLETED
+ # PARTIAL_RESULT
+ # COMPLETED
+ # UNCANCELLABLE
+ # NOT_IMMEDIATELY_DETERMINABLE
+ @abc.abstractmethod
+ def cancel(self):
+ """Attempts to cancel the computation.
+
+ This method does not block.
+
+ Returns:
+ True if the computation has not yet begun, will not be allowed to take
+ place, and determination of both was possible without blocking. False
+ under all other circumstances including but not limited to the
+ computation's already having begun, the computation's already having
+ finished, and the computation's having been scheduled for execution on a
+ remote system for which a determination of whether or not it commenced
+ before being cancelled cannot be made without blocking.
+ """
+ raise NotImplementedError()
+
+ # NOTE(nathaniel): Here too this isn't the return type that I'd want this
+ # method to have if it were up to me. I think I'd go with another sum type
+ # like:
+ #
+ # NOT_CANCELLED (this object's cancel method hasn't been called)
+ # NOT_COMMENCED
+ # COMMENCED_AND_NOT_COMPLETED
+ # PARTIAL_RESULT
+ # COMPLETED
+ # UNCANCELLABLE
+ # NOT_IMMEDIATELY_DETERMINABLE
+ #
+ # Notice how giving the cancel method the right semantics obviates most
+ # reasons for this method to exist.
+ @abc.abstractmethod
+ def cancelled(self):
+ """Describes whether the computation was cancelled.
+
+ This method does not block.
+
+ Returns:
+ True if the computation was cancelled any time before its result became
+ immediately available. False under all other circumstances including but
+ not limited to this object's cancel method not having been called and
+ the computation's result having become immediately available.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ """Describes whether the computation is taking place.
+
+ This method does not block.
+
+ Returns:
+ True if the computation is scheduled to take place in the future or is
+ taking place now, or False if the computation took place in the past or
+ was cancelled.
+ """
+ raise NotImplementedError()
+
+ # NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
+ # would rather this only returned True in cases in which the underlying
+ # computation completed successfully. A computation's having been cancelled
+ # conflicts with considering that computation "done".
+ @abc.abstractmethod
+ def done(self):
+ """Describes whether the computation has taken place.
+
+ This method does not block.
+
+ Returns:
+ True if the computation is known to have either completed or have been
+ unscheduled or interrupted. False if the computation may possibly be
+ executing or scheduled to execute later.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
+ """Accesses the outcome of the computation or raises its exception.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ finish or be cancelled, or None if this method should block until the
+ computation has finished or is cancelled no matter how long that takes.
+
+ Returns:
+ The return value of the computation.
+
+ Raises:
+ TimeoutError: If a timeout value is passed and the computation does not
+ terminate within the allotted time.
+ CancelledError: If the computation was cancelled.
+ Exception: If the computation raised an exception, this call will raise
+ the same exception.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ """Return the exception raised by the computation.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ terminate or be cancelled, or None if this method should block until
+ the computation is terminated or is cancelled no matter how long that
+ takes.
+
+ Returns:
+ The exception raised by the computation, or None if the computation did
+ not raise an exception.
+
+ Raises:
+ TimeoutError: If a timeout value is passed and the computation does not
+ terminate within the allotted time.
+ CancelledError: If the computation was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def traceback(self, timeout=None):
+ """Access the traceback of the exception raised by the computation.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ terminate or be cancelled, or None if this method should block until
+ the computation is terminated or is cancelled no matter how long that
+ takes.
+
+ Returns:
+ The traceback of the exception raised by the computation, or None if the
+ computation did not raise an exception.
+
+ Raises:
+ TimeoutError: If a timeout value is passed and the computation does not
+ terminate within the allotted time.
+ CancelledError: If the computation was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ """Adds a function to be called at completion of the computation.
+
+ The callback will be passed this Future object describing the outcome of
+ the computation.
+
+ If the computation has already completed, the callback will be called
+ immediately.
+
+ Args:
+ fn: A callable taking this Future object as its single parameter.
+ """
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/logging_pool.py b/venv/Lib/site-packages/grpc/framework/foundation/logging_pool.py
new file mode 100644
index 000000000..421999fb1
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/logging_pool.py
@@ -0,0 +1,72 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A thread pool that logs exceptions raised by tasks executed within it."""
+
+import logging
+
+from concurrent import futures
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def _wrap(behavior):
+ """Wraps an arbitrary callable behavior in exception-logging."""
+
+ def _wrapping(*args, **kwargs):
+ try:
+ return behavior(*args, **kwargs)
+ except Exception:
+ _LOGGER.exception(
+ 'Unexpected exception from %s executed in logging pool!',
+ behavior)
+ raise
+
+ return _wrapping
+
+
+class _LoggingPool(object):
+ """An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
+
+ def __init__(self, backing_pool):
+ self._backing_pool = backing_pool
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._backing_pool.shutdown(wait=True)
+
+ def submit(self, fn, *args, **kwargs):
+ return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
+
+ def map(self, func, *iterables, **kwargs):
+ return self._backing_pool.map(_wrap(func),
+ *iterables,
+ timeout=kwargs.get('timeout', None))
+
+ def shutdown(self, wait=True):
+ self._backing_pool.shutdown(wait=wait)
+
+
+def pool(max_workers):
+ """Creates a thread pool that logs exceptions raised by the tasks within it.
+
+ Args:
+ max_workers: The maximum number of worker threads to allow the pool.
+
+ Returns:
+ A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
+ raised by the tasks executed within it.
+ """
+ return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/stream.py b/venv/Lib/site-packages/grpc/framework/foundation/stream.py
new file mode 100644
index 000000000..fd47977b8
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/stream.py
@@ -0,0 +1,45 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Interfaces related to streams of values or objects."""
+
+import abc
+
+import six
+
+
+class Consumer(six.with_metaclass(abc.ABCMeta)):
+ """Interface for consumers of finite streams of values or objects."""
+
+ @abc.abstractmethod
+ def consume(self, value):
+ """Accepts a value.
+
+ Args:
+ value: Any value accepted by this Consumer.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def terminate(self):
+ """Indicates to this Consumer that no more values will be supplied."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def consume_and_terminate(self, value):
+ """Supplies a value and signals that no more values will be supplied.
+
+ Args:
+ value: Any value accepted by this Consumer.
+ """
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/grpc/framework/foundation/stream_util.py b/venv/Lib/site-packages/grpc/framework/foundation/stream_util.py
new file mode 100644
index 000000000..1faaf29bd
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/foundation/stream_util.py
@@ -0,0 +1,148 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Helpful utilities related to the stream module."""
+
+import logging
+import threading
+
+from grpc.framework.foundation import stream
+
+_NO_VALUE = object()
+_LOGGER = logging.getLogger(__name__)
+
+
+class TransformingConsumer(stream.Consumer):
+ """A stream.Consumer that passes a transformation of its input to another."""
+
+ def __init__(self, transformation, downstream):
+ self._transformation = transformation
+ self._downstream = downstream
+
+ def consume(self, value):
+ self._downstream.consume(self._transformation(value))
+
+ def terminate(self):
+ self._downstream.terminate()
+
+ def consume_and_terminate(self, value):
+ self._downstream.consume_and_terminate(self._transformation(value))
+
+
+class IterableConsumer(stream.Consumer):
+ """A Consumer that when iterated over emits the values it has consumed."""
+
+ def __init__(self):
+ self._condition = threading.Condition()
+ self._values = []
+ self._active = True
+
+ def consume(self, value):
+ with self._condition:
+ if self._active:
+ self._values.append(value)
+ self._condition.notify()
+
+ def terminate(self):
+ with self._condition:
+ self._active = False
+ self._condition.notify()
+
+ def consume_and_terminate(self, value):
+ with self._condition:
+ if self._active:
+ self._values.append(value)
+ self._active = False
+ self._condition.notify()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return self.next()
+
+ def next(self):
+ with self._condition:
+ while self._active and not self._values:
+ self._condition.wait()
+ if self._values:
+ return self._values.pop(0)
+ else:
+ raise StopIteration()
+
+
+class ThreadSwitchingConsumer(stream.Consumer):
+ """A Consumer decorator that affords serialization and asynchrony."""
+
+ def __init__(self, sink, pool):
+ self._lock = threading.Lock()
+ self._sink = sink
+ self._pool = pool
+ # True if self._spin has been submitted to the pool to be called once and
+ # that call has not yet returned, False otherwise.
+ self._spinning = False
+ self._values = []
+ self._active = True
+
+ def _spin(self, sink, value, terminate):
+ while True:
+ try:
+ if value is _NO_VALUE:
+ sink.terminate()
+ elif terminate:
+ sink.consume_and_terminate(value)
+ else:
+ sink.consume(value)
+ except Exception as e: # pylint:disable=broad-except
+ _LOGGER.exception(e)
+
+ with self._lock:
+ if terminate:
+ self._spinning = False
+ return
+ elif self._values:
+ value = self._values.pop(0)
+ terminate = not self._values and not self._active
+ elif not self._active:
+ value = _NO_VALUE
+ terminate = True
+ else:
+ self._spinning = False
+ return
+
+ def consume(self, value):
+ with self._lock:
+ if self._active:
+ if self._spinning:
+ self._values.append(value)
+ else:
+ self._pool.submit(self._spin, self._sink, value, False)
+ self._spinning = True
+
+ def terminate(self):
+ with self._lock:
+ if self._active:
+ self._active = False
+ if not self._spinning:
+ self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
+ self._spinning = True
+
+ def consume_and_terminate(self, value):
+ with self._lock:
+ if self._active:
+ self._active = False
+ if self._spinning:
+ self._values.append(value)
+ else:
+ self._pool.submit(self._spin, self._sink, value, True)
+ self._spinning = True
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/__init__.py b/venv/Lib/site-packages/grpc/framework/interfaces/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..70be0398c
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/base/__init__.py b/venv/Lib/site-packages/grpc/framework/interfaces/base/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/base/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..16d205029
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/base.cpython-36.pyc
new file mode 100644
index 000000000..0692bc445
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/base.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/utilities.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/utilities.cpython-36.pyc
new file mode 100644
index 000000000..8b72e9f4e
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/base/__pycache__/utilities.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/base/base.py b/venv/Lib/site-packages/grpc/framework/interfaces/base/base.py
new file mode 100644
index 000000000..82c44f911
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/base/base.py
@@ -0,0 +1,330 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""The base interface of RPC Framework.
+
+Implementations of this interface support the conduct of "operations":
+exchanges between two distinct ends of an arbitrary number of data payloads
+and metadata such as a name for the operation, initial and terminal metadata
+in each direction, and flow control. These operations may be used for transfers
+of data, remote procedure calls, status indication, or anything else
+applications choose.
+"""
+
+# threading is referenced from specification in this module.
+import abc
+import enum
+import threading # pylint: disable=unused-import
+
+import six
+
+# abandonment is referenced from specification in this module.
+from grpc.framework.foundation import abandonment # pylint: disable=unused-import
+
+# pylint: disable=too-many-arguments
+
+
+class NoSuchMethodError(Exception):
+ """Indicates that an unrecognized operation has been called.
+
+ Attributes:
+ code: A code value to communicate to the other side of the operation
+ along with indication of operation termination. May be None.
+ details: A details value to communicate to the other side of the
+ operation along with indication of operation termination. May be None.
+ """
+
+ def __init__(self, code, details):
+ """Constructor.
+
+ Args:
+ code: A code value to communicate to the other side of the operation
+ along with indication of operation termination. May be None.
+ details: A details value to communicate to the other side of the
+ operation along with indication of operation termination. May be None.
+ """
+ super(NoSuchMethodError, self).__init__()
+ self.code = code
+ self.details = details
+
+
+class Outcome(object):
+ """The outcome of an operation.
+
+ Attributes:
+ kind: A Kind value coarsely identifying how the operation terminated.
+ code: An application-specific code value or None if no such value was
+ provided.
+ details: An application-specific details value or None if no such value was
+ provided.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+ """Ways in which an operation can terminate."""
+
+ COMPLETED = 'completed'
+ CANCELLED = 'cancelled'
+ EXPIRED = 'expired'
+ LOCAL_SHUTDOWN = 'local shutdown'
+ REMOTE_SHUTDOWN = 'remote shutdown'
+ RECEPTION_FAILURE = 'reception failure'
+ TRANSMISSION_FAILURE = 'transmission failure'
+ LOCAL_FAILURE = 'local failure'
+ REMOTE_FAILURE = 'remote failure'
+
+
+class Completion(six.with_metaclass(abc.ABCMeta)):
+ """An aggregate of the values exchanged upon operation completion.
+
+ Attributes:
+ terminal_metadata: A terminal metadata value for the operaton.
+ code: A code value for the operation.
+ message: A message value for the operation.
+ """
+
+
+class OperationContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides operation-related information and action."""
+
+ @abc.abstractmethod
+ def outcome(self):
+ """Indicates the operation's outcome (or that the operation is ongoing).
+
+ Returns:
+ None if the operation is still active or the Outcome value for the
+ operation if it has terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_termination_callback(self, callback):
+ """Adds a function to be called upon operation termination.
+
+ Args:
+ callback: A callable to be passed an Outcome value on operation
+ termination.
+
+ Returns:
+ None if the operation has not yet terminated and the passed callback will
+ later be called when it does terminate, or if the operation has already
+ terminated an Outcome value describing the operation termination and the
+ passed callback will not be called as a result of this method call.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def time_remaining(self):
+ """Describes the length of allowed time remaining for the operation.
+
+ Returns:
+ A nonnegative float indicating the length of allowed time in seconds
+ remaining for the operation to complete before it is considered to have
+ timed out. Zero is returned if the operation has terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Cancels the operation if the operation has not yet terminated."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def fail(self, exception):
+ """Indicates that the operation has failed.
+
+ Args:
+ exception: An exception germane to the operation failure. May be None.
+ """
+ raise NotImplementedError()
+
+
+class Operator(six.with_metaclass(abc.ABCMeta)):
+ """An interface through which to participate in an operation."""
+
+ @abc.abstractmethod
+ def advance(self,
+ initial_metadata=None,
+ payload=None,
+ completion=None,
+ allowance=None):
+ """Progresses the operation.
+
+ Args:
+ initial_metadata: An initial metadata value. Only one may ever be
+ communicated in each direction for an operation, and they must be
+ communicated no later than either the first payload or the completion.
+ payload: A payload value.
+ completion: A Completion value. May only ever be non-None once in either
+ direction, and no payloads may be passed after it has been communicated.
+ allowance: A positive integer communicating the number of additional
+ payloads allowed to be passed by the remote side of the operation.
+ """
+ raise NotImplementedError()
+
+
+class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)):
+ """A means of receiving protocol values during an operation."""
+
+ @abc.abstractmethod
+ def context(self, protocol_context):
+ """Accepts the protocol context object for the operation.
+
+ Args:
+ protocol_context: The protocol context object for the operation.
+ """
+ raise NotImplementedError()
+
+
+class Subscription(six.with_metaclass(abc.ABCMeta)):
+ """Describes customer code's interest in values from the other side.
+
+ Attributes:
+ kind: A Kind value describing the overall kind of this value.
+ termination_callback: A callable to be passed the Outcome associated with
+ the operation after it has terminated. Must be non-None if kind is
+ Kind.TERMINATION_ONLY. Must be None otherwise.
+ allowance: A callable behavior that accepts positive integers representing
+ the number of additional payloads allowed to be passed to the other side
+ of the operation. Must be None if kind is Kind.FULL. Must not be None
+ otherwise.
+ operator: An Operator to be passed values from the other side of the
+ operation. Must be non-None if kind is Kind.FULL. Must be None otherwise.
+ protocol_receiver: A ProtocolReceiver to be passed protocol objects as they
+ become available during the operation. Must be non-None if kind is
+ Kind.FULL.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+
+ NONE = 'none'
+ TERMINATION_ONLY = 'termination only'
+ FULL = 'full'
+
+
+class Servicer(six.with_metaclass(abc.ABCMeta)):
+ """Interface for service implementations."""
+
+ @abc.abstractmethod
+ def service(self, group, method, context, output_operator):
+ """Services an operation.
+
+ Args:
+ group: The group identifier of the operation to be serviced.
+ method: The method identifier of the operation to be serviced.
+ context: An OperationContext object affording contextual information and
+ actions.
+ output_operator: An Operator that will accept output values of the
+ operation.
+
+ Returns:
+ A Subscription via which this object may or may not accept more values of
+ the operation.
+
+ Raises:
+ NoSuchMethodError: If this Servicer does not handle operations with the
+ given group and method.
+ abandonment.Abandoned: If the operation has been aborted and there no
+ longer is any reason to service the operation.
+ """
+ raise NotImplementedError()
+
+
+class End(six.with_metaclass(abc.ABCMeta)):
+ """Common type for entry-point objects on both sides of an operation."""
+
+ @abc.abstractmethod
+ def start(self):
+ """Starts this object's service of operations."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stop(self, grace):
+ """Stops this object's service of operations.
+
+ This object will refuse service of new operations as soon as this method is
+ called but operations under way at the time of the call may be given a
+ grace period during which they are allowed to finish.
+
+ Args:
+ grace: A duration of time in seconds to allow ongoing operations to
+ terminate before being forcefully terminated by the stopping of this
+ End. May be zero to terminate all ongoing operations and immediately
+ stop.
+
+ Returns:
+ A threading.Event that will be set to indicate all operations having
+ terminated and this End having completely stopped. The returned event
+ may not be set until after the full grace period (if some ongoing
+ operation continues for the full length of the period) or it may be set
+ much sooner (if for example this End had no operations in progress at
+ the time its stop method was called).
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def operate(self,
+ group,
+ method,
+ subscription,
+ timeout,
+ initial_metadata=None,
+ payload=None,
+ completion=None,
+ protocol_options=None):
+ """Commences an operation.
+
+ Args:
+ group: The group identifier of the invoked operation.
+ method: The method identifier of the invoked operation.
+ subscription: A Subscription to which the results of the operation will be
+ passed.
+ timeout: A length of time in seconds to allow for the operation.
+ initial_metadata: An initial metadata value to be sent to the other side
+ of the operation. May be None if the initial metadata will be later
+ passed via the returned operator or if there will be no initial metadata
+ passed at all.
+ payload: An initial payload for the operation.
+ completion: A Completion value indicating the end of transmission to the
+ other side of the operation.
+ protocol_options: A value specified by the provider of a Base interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A pair of objects affording information about the operation and action
+ continuing the operation. The first element of the returned pair is an
+ OperationContext for the operation and the second element of the
+ returned pair is an Operator to which operation values not passed in
+ this call should later be passed.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def operation_stats(self):
+ """Reports the number of terminated operations broken down by outcome.
+
+ Returns:
+ A dictionary from Outcome.Kind value to an integer identifying the number
+ of operations that terminated with that outcome kind.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_idle_action(self, action):
+ """Adds an action to be called when this End has no ongoing operations.
+
+ Args:
+ action: A callable that accepts no arguments.
+ """
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/base/utilities.py b/venv/Lib/site-packages/grpc/framework/interfaces/base/utilities.py
new file mode 100644
index 000000000..281db62b5
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/base/utilities.py
@@ -0,0 +1,71 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for use with the base interface of RPC Framework."""
+
+import collections
+
+from grpc.framework.interfaces.base import base
+
+
+class _Completion(base.Completion,
+ collections.namedtuple('_Completion', (
+ 'terminal_metadata',
+ 'code',
+ 'message',
+ ))):
+ """A trivial implementation of base.Completion."""
+
+
+class _Subscription(base.Subscription,
+ collections.namedtuple('_Subscription', (
+ 'kind',
+ 'termination_callback',
+ 'allowance',
+ 'operator',
+ 'protocol_receiver',
+ ))):
+ """A trivial implementation of base.Subscription."""
+
+
+_NONE_SUBSCRIPTION = _Subscription(base.Subscription.Kind.NONE, None, None,
+ None, None)
+
+
+def completion(terminal_metadata, code, message):
+ """Creates a base.Completion aggregating the given operation values.
+
+ Args:
+ terminal_metadata: A terminal metadata value for an operaton.
+ code: A code value for an operation.
+ message: A message value for an operation.
+
+ Returns:
+ A base.Completion aggregating the given operation values.
+ """
+ return _Completion(terminal_metadata, code, message)
+
+
+def full_subscription(operator, protocol_receiver):
+ """Creates a "full" base.Subscription for the given base.Operator.
+
+ Args:
+ operator: A base.Operator to be used in an operation.
+ protocol_receiver: A base.ProtocolReceiver to be used in an operation.
+
+ Returns:
+ A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
+ base.Operator and base.ProtocolReceiver.
+ """
+ return _Subscription(base.Subscription.Kind.FULL, None, None, operator,
+ protocol_receiver)
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/face/__init__.py b/venv/Lib/site-packages/grpc/framework/interfaces/face/__init__.py
new file mode 100644
index 000000000..5fb4f3c3c
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/face/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..8e5d0e581
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/face.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/face.cpython-36.pyc
new file mode 100644
index 000000000..9177c3321
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/face.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/utilities.cpython-36.pyc b/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/utilities.cpython-36.pyc
new file mode 100644
index 000000000..969bb3ac1
Binary files /dev/null and b/venv/Lib/site-packages/grpc/framework/interfaces/face/__pycache__/utilities.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/face/face.py b/venv/Lib/site-packages/grpc/framework/interfaces/face/face.py
new file mode 100644
index 000000000..5b47f11d0
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/face/face.py
@@ -0,0 +1,1052 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Interfaces defining the Face layer of RPC Framework."""
+
+import abc
+import collections
+import enum
+
+import six
+
+# cardinality, style, abandonment, future, and stream are
+# referenced from specification in this module.
+from grpc.framework.common import cardinality # pylint: disable=unused-import
+from grpc.framework.common import style # pylint: disable=unused-import
+from grpc.framework.foundation import abandonment # pylint: disable=unused-import
+from grpc.framework.foundation import future # pylint: disable=unused-import
+from grpc.framework.foundation import stream # pylint: disable=unused-import
+
+# pylint: disable=too-many-arguments
+
+
+class NoSuchMethodError(Exception):
+ """Raised by customer code to indicate an unrecognized method.
+
+ Attributes:
+ group: The group of the unrecognized method.
+ name: The name of the unrecognized method.
+ """
+
+ def __init__(self, group, method):
+ """Constructor.
+
+ Args:
+ group: The group identifier of the unrecognized RPC name.
+ method: The method identifier of the unrecognized RPC name.
+ """
+ super(NoSuchMethodError, self).__init__()
+ self.group = group
+ self.method = method
+
+ def __repr__(self):
+ return 'face.NoSuchMethodError(%s, %s)' % (
+ self.group,
+ self.method,
+ )
+
+
+class Abortion(
+ collections.namedtuple('Abortion', (
+ 'kind',
+ 'initial_metadata',
+ 'terminal_metadata',
+ 'code',
+ 'details',
+ ))):
+ """A value describing RPC abortion.
+
+ Attributes:
+ kind: A Kind value identifying how the RPC failed.
+ initial_metadata: The initial metadata from the other side of the RPC or
+ None if no initial metadata value was received.
+ terminal_metadata: The terminal metadata from the other side of the RPC or
+ None if no terminal metadata value was received.
+ code: The code value from the other side of the RPC or None if no code value
+ was received.
+ details: The details value from the other side of the RPC or None if no
+ details value was received.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+ """Types of RPC abortion."""
+
+ CANCELLED = 'cancelled'
+ EXPIRED = 'expired'
+ LOCAL_SHUTDOWN = 'local shutdown'
+ REMOTE_SHUTDOWN = 'remote shutdown'
+ NETWORK_FAILURE = 'network failure'
+ LOCAL_FAILURE = 'local failure'
+ REMOTE_FAILURE = 'remote failure'
+
+
+class AbortionError(six.with_metaclass(abc.ABCMeta, Exception)):
+ """Common super type for exceptions indicating RPC abortion.
+
+ initial_metadata: The initial metadata from the other side of the RPC or
+ None if no initial metadata value was received.
+ terminal_metadata: The terminal metadata from the other side of the RPC or
+ None if no terminal metadata value was received.
+ code: The code value from the other side of the RPC or None if no code value
+ was received.
+ details: The details value from the other side of the RPC or None if no
+ details value was received.
+ """
+
+ def __init__(self, initial_metadata, terminal_metadata, code, details):
+ super(AbortionError, self).__init__()
+ self.initial_metadata = initial_metadata
+ self.terminal_metadata = terminal_metadata
+ self.code = code
+ self.details = details
+
+ def __str__(self):
+ return '%s(code=%s, details="%s")' % (self.__class__.__name__,
+ self.code, self.details)
+
+
+class CancellationError(AbortionError):
+ """Indicates that an RPC has been cancelled."""
+
+
+class ExpirationError(AbortionError):
+ """Indicates that an RPC has expired ("timed out")."""
+
+
+class LocalShutdownError(AbortionError):
+ """Indicates that an RPC has terminated due to local shutdown of RPCs."""
+
+
+class RemoteShutdownError(AbortionError):
+ """Indicates that an RPC has terminated due to remote shutdown of RPCs."""
+
+
+class NetworkError(AbortionError):
+ """Indicates that some error occurred on the network."""
+
+
+class LocalError(AbortionError):
+ """Indicates that an RPC has terminated due to a local defect."""
+
+
+class RemoteError(AbortionError):
+ """Indicates that an RPC has terminated due to a remote defect."""
+
+
+class RpcContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides RPC-related information and action."""
+
+ @abc.abstractmethod
+ def is_active(self):
+ """Describes whether the RPC is active or has terminated."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def time_remaining(self):
+ """Describes the length of allowed time remaining for the RPC.
+
+ Returns:
+ A nonnegative float indicating the length of allowed time in seconds
+ remaining for the RPC to complete before it is considered to have timed
+ out.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_abortion_callback(self, abortion_callback):
+ """Registers a callback to be called if the RPC is aborted.
+
+ Args:
+ abortion_callback: A callable to be called and passed an Abortion value
+ in the event of RPC abortion.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Cancels the RPC.
+
+ Idempotent and has no effect if the RPC has already terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def protocol_context(self):
+ """Accesses a custom object specified by an implementation provider.
+
+ Returns:
+ A value specified by the provider of a Face interface implementation
+ affording custom state and behavior.
+ """
+ raise NotImplementedError()
+
+
+class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """Invocation-side utility object for an RPC."""
+
+ @abc.abstractmethod
+ def initial_metadata(self):
+ """Accesses the initial metadata from the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The initial metadata object emitted by the service-side of the RPC, or
+ None if there was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def terminal_metadata(self):
+ """Accesses the terminal metadata from the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The terminal metadata object emitted by the service-side of the RPC, or
+ None if there was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def code(self):
+ """Accesses the code emitted by the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The code object emitted by the service-side of the RPC, or None if there
+ was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def details(self):
+ """Accesses the details value emitted by the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The details value emitted by the service-side of the RPC, or None if there
+ was no such value.
+ """
+ raise NotImplementedError()
+
+
+class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """A context object passed to method implementations."""
+
+ @abc.abstractmethod
+ def invocation_metadata(self):
+ """Accesses the metadata from the invocation-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the invocation-side of the RPC.
+
+ Returns:
+ The metadata object emitted by the invocation-side of the RPC, or None if
+ there was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def initial_metadata(self, initial_metadata):
+ """Accepts the service-side initial metadata value of the RPC.
+
+ This method need not be called by method implementations if they have no
+ service-side initial metadata to transmit.
+
+ Args:
+ initial_metadata: The service-side initial metadata value of the RPC to
+ be transmitted to the invocation side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def terminal_metadata(self, terminal_metadata):
+ """Accepts the service-side terminal metadata value of the RPC.
+
+ This method need not be called by method implementations if they have no
+ service-side terminal metadata to transmit.
+
+ Args:
+ terminal_metadata: The service-side terminal metadata value of the RPC to
+ be transmitted to the invocation side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def code(self, code):
+ """Accepts the service-side code of the RPC.
+
+ This method need not be called by method implementations if they have no
+ code to transmit.
+
+ Args:
+ code: The code of the RPC to be transmitted to the invocation side of the
+ RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def details(self, details):
+ """Accepts the service-side details of the RPC.
+
+ This method need not be called by method implementations if they have no
+ service-side details to transmit.
+
+ Args:
+ details: The service-side details value of the RPC to be transmitted to
+ the invocation side of the RPC.
+ """
+ raise NotImplementedError()
+
+
+class ResponseReceiver(six.with_metaclass(abc.ABCMeta)):
+ """Invocation-side object used to accept the output of an RPC."""
+
+ @abc.abstractmethod
+ def initial_metadata(self, initial_metadata):
+ """Receives the initial metadata from the service-side of the RPC.
+
+ Args:
+ initial_metadata: The initial metadata object emitted from the
+ service-side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def response(self, response):
+ """Receives a response from the service-side of the RPC.
+
+ Args:
+ response: A response object emitted from the service-side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def complete(self, terminal_metadata, code, details):
+ """Receives the completion values emitted from the service-side of the RPC.
+
+ Args:
+ terminal_metadata: The terminal metadata object emitted from the
+ service-side of the RPC.
+ code: The code object emitted from the service-side of the RPC.
+ details: The details object emitted from the service-side of the RPC.
+ """
+ raise NotImplementedError()
+
+
+class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a unary-unary RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self, request, timeout, metadata=None, protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call for the RPC.
+ """
+ raise NotImplementedError()
+
+
+class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a unary-stream RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self, request, timeout, metadata=None, protocol_options=None):
+ """Invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call object for the RPC.
+ """
+ raise NotImplementedError()
+
+
+class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a stream-unary RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A single object that is both a Call object for the RPC and a
+ stream.Consumer to which the request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+
+class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a stream-stream RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A single object that is both a Call object for the RPC and a
+ stream.Consumer to which the request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+
+class MethodImplementation(six.with_metaclass(abc.ABCMeta)):
+ """A sum type that describes a method implementation.
+
+ Attributes:
+ cardinality: A cardinality.Cardinality value.
+ style: A style.Service value.
+ unary_unary_inline: The implementation of the method as a callable value
+ that takes a request value and a ServicerContext object and returns a
+ response value. Only non-None if cardinality is
+ cardinality.Cardinality.UNARY_UNARY and style is style.Service.INLINE.
+ unary_stream_inline: The implementation of the method as a callable value
+ that takes a request value and a ServicerContext object and returns an
+ iterator of response values. Only non-None if cardinality is
+ cardinality.Cardinality.UNARY_STREAM and style is style.Service.INLINE.
+ stream_unary_inline: The implementation of the method as a callable value
+ that takes an iterator of request values and a ServicerContext object and
+ returns a response value. Only non-None if cardinality is
+ cardinality.Cardinality.STREAM_UNARY and style is style.Service.INLINE.
+ stream_stream_inline: The implementation of the method as a callable value
+ that takes an iterator of request values and a ServicerContext object and
+ returns an iterator of response values. Only non-None if cardinality is
+ cardinality.Cardinality.STREAM_STREAM and style is style.Service.INLINE.
+ unary_unary_event: The implementation of the method as a callable value that
+ takes a request value, a response callback to which to pass the response
+ value of the RPC, and a ServicerContext. Only non-None if cardinality is
+ cardinality.Cardinality.UNARY_UNARY and style is style.Service.EVENT.
+ unary_stream_event: The implementation of the method as a callable value
+ that takes a request value, a stream.Consumer to which to pass the
+ response values of the RPC, and a ServicerContext. Only non-None if
+ cardinality is cardinality.Cardinality.UNARY_STREAM and style is
+ style.Service.EVENT.
+ stream_unary_event: The implementation of the method as a callable value
+ that takes a response callback to which to pass the response value of the
+ RPC and a ServicerContext and returns a stream.Consumer to which the
+ request values of the RPC should be passed. Only non-None if cardinality
+ is cardinality.Cardinality.STREAM_UNARY and style is style.Service.EVENT.
+ stream_stream_event: The implementation of the method as a callable value
+ that takes a stream.Consumer to which to pass the response values of the
+ RPC and a ServicerContext and returns a stream.Consumer to which the
+ request values of the RPC should be passed. Only non-None if cardinality
+ is cardinality.Cardinality.STREAM_STREAM and style is
+ style.Service.EVENT.
+ """
+
+
+class MultiMethodImplementation(six.with_metaclass(abc.ABCMeta)):
+ """A general type able to service many methods."""
+
+ @abc.abstractmethod
+ def service(self, group, method, response_consumer, context):
+ """Services an RPC.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ response_consumer: A stream.Consumer to be called to accept the response
+ values of the RPC.
+ context: a ServicerContext object.
+
+ Returns:
+ A stream.Consumer with which to accept the request values of the RPC. The
+ consumer returned from this method may or may not be invoked to
+ completion: in the case of RPC abortion, RPC Framework will simply stop
+ passing values to this object. Implementations must not assume that this
+ object will be called to completion of the request stream or even called
+ at all.
+
+ Raises:
+ abandonment.Abandoned: May or may not be raised when the RPC has been
+ aborted.
+ NoSuchMethodError: If this MultiMethod does not recognize the given group
+ and name for the RPC and is not able to service the RPC.
+ """
+ raise NotImplementedError()
+
+
+class GenericStub(six.with_metaclass(abc.ABCMeta)):
+ """Affords RPC invocation via generic methods."""
+
+ @abc.abstractmethod
+ def blocking_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Invokes a unary-request-unary-response method.
+
+ This method blocks until either returning the response value of the RPC
+ (in the event of RPC completion) or raising an exception (in the event of
+ RPC abortion).
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a unary-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def inline_unary_stream(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a unary-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def blocking_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Invokes a stream-request-unary-response method.
+
+ This method blocks until either returning the response value of the RPC
+ (in the event of RPC completion) or raising an exception (in the event of
+ RPC abortion).
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a stream-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def inline_stream_stream(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a stream-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_unary_unary(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call for the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_unary_stream(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call for the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_stream_unary(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A pair of a Call object for the RPC and a stream.Consumer to which the
+ request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_stream_stream(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A pair of a Call object for the RPC and a stream.Consumer to which the
+ request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_unary(self, group, method):
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A UnaryUnaryMultiCallable value for the named unary-unary method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_stream(self, group, method):
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A UnaryStreamMultiCallable value for the name unary-stream method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_unary(self, group, method):
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A StreamUnaryMultiCallable value for the named stream-unary method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_stream(self, group, method):
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A StreamStreamMultiCallable value for the named stream-stream method.
+ """
+ raise NotImplementedError()
+
+
+class DynamicStub(six.with_metaclass(abc.ABCMeta)):
+ """Affords RPC invocation via attributes corresponding to afforded methods.
+
+ Instances of this type may be scoped to a single group so that attribute
+ access is unambiguous.
+
+ Instances of this type respond to attribute access as follows: if the
+ requested attribute is the name of a unary-unary method, the value of the
+ attribute will be a UnaryUnaryMultiCallable with which to invoke an RPC; if
+ the requested attribute is the name of a unary-stream method, the value of the
+ attribute will be a UnaryStreamMultiCallable with which to invoke an RPC; if
+ the requested attribute is the name of a stream-unary method, the value of the
+ attribute will be a StreamUnaryMultiCallable with which to invoke an RPC; and
+ if the requested attribute is the name of a stream-stream method, the value of
+ the attribute will be a StreamStreamMultiCallable with which to invoke an RPC.
+ """
diff --git a/venv/Lib/site-packages/grpc/framework/interfaces/face/utilities.py b/venv/Lib/site-packages/grpc/framework/interfaces/face/utilities.py
new file mode 100644
index 000000000..f27bd6761
--- /dev/null
+++ b/venv/Lib/site-packages/grpc/framework/interfaces/face/utilities.py
@@ -0,0 +1,168 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for RPC Framework's Face interface."""
+
+import collections
+
+# stream is referenced from specification in this module.
+from grpc.framework.common import cardinality
+from grpc.framework.common import style
+from grpc.framework.foundation import stream # pylint: disable=unused-import
+from grpc.framework.interfaces.face import face
+
+
+class _MethodImplementation(face.MethodImplementation,
+ collections.namedtuple('_MethodImplementation', [
+ 'cardinality',
+ 'style',
+ 'unary_unary_inline',
+ 'unary_stream_inline',
+ 'stream_unary_inline',
+ 'stream_stream_inline',
+ 'unary_unary_event',
+ 'unary_stream_event',
+ 'stream_unary_event',
+ 'stream_stream_event',
+ ])):
+ pass
+
+
+def unary_unary_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-unary RPC method as a callable value
+ that takes a request value and an face.ServicerContext object and
+ returns a response value.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
+ style.Service.INLINE, behavior, None, None,
+ None, None, None, None, None)
+
+
+def unary_stream_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-stream RPC method as a callable
+ value that takes a request value and an face.ServicerContext object and
+ returns an iterator of response values.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
+ style.Service.INLINE, None, behavior, None,
+ None, None, None, None, None)
+
+
+def stream_unary_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-unary RPC method as a callable
+ value that takes an iterator of request values and an
+ face.ServicerContext object and returns a response value.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
+ style.Service.INLINE, None, None, behavior,
+ None, None, None, None, None)
+
+
+def stream_stream_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-stream RPC method as a callable
+ value that takes an iterator of request values and an
+ face.ServicerContext object and returns an iterator of response values.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
+ style.Service.INLINE, None, None, None,
+ behavior, None, None, None, None)
+
+
+def unary_unary_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-unary RPC method as a callable
+ value that takes a request value, a response callback to which to pass
+ the response value of the RPC, and an face.ServicerContext.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
+ style.Service.EVENT, None, None, None, None,
+ behavior, None, None, None)
+
+
+def unary_stream_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-stream RPC method as a callable
+ value that takes a request value, a stream.Consumer to which to pass the
+ the response values of the RPC, and an face.ServicerContext.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
+ style.Service.EVENT, None, None, None, None,
+ None, behavior, None, None)
+
+
+def stream_unary_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-unary RPC method as a callable
+ value that takes a response callback to which to pass the response value
+ of the RPC and an face.ServicerContext and returns a stream.Consumer to
+ which the request values of the RPC should be passed.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
+ style.Service.EVENT, None, None, None, None,
+ None, None, behavior, None)
+
+
+def stream_stream_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-stream RPC method as a callable
+ value that takes a stream.Consumer to which to pass the response values
+ of the RPC and an face.ServicerContext and returns a stream.Consumer to
+ which the request values of the RPC should be passed.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
+ style.Service.EVENT, None, None, None, None,
+ None, None, None, behavior)
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/DESCRIPTION.rst b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/DESCRIPTION.rst
new file mode 100644
index 000000000..4a5d7abb8
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/DESCRIPTION.rst
@@ -0,0 +1,117 @@
+gRPC Python
+===========
+
+|compat_check_pypi|
+
+Package for gRPC Python.
+
+.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=grpcio
+ :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=grpcio
+
+Supported Python Versions
+-------------------------
+Python >= 3.5
+
+Installation
+------------
+
+gRPC Python is available for Linux, macOS, and Windows.
+
+Installing From PyPI
+~~~~~~~~~~~~~~~~~~~~
+
+If you are installing locally...
+
+::
+
+ $ pip install grpcio
+
+Else system wide (on Ubuntu)...
+
+::
+
+ $ sudo pip install grpcio
+
+If you're on Windows make sure that you installed the :code:`pip.exe` component
+when you installed Python (if not go back and install it!) then invoke:
+
+::
+
+ $ pip.exe install grpcio
+
+Windows users may need to invoke :code:`pip.exe` from a command line ran as
+administrator.
+
+n.b. On Windows and on Mac OS X one *must* have a recent release of :code:`pip`
+to retrieve the proper wheel from PyPI. Be sure to upgrade to the latest
+version!
+
+Installing From Source
+~~~~~~~~~~~~~~~~~~~~~~
+
+Building from source requires that you have the Python headers (usually a
+package named :code:`python-dev`).
+
+::
+
+ $ export REPO_ROOT=grpc # REPO_ROOT can be any directory of your choice
+ $ git clone -b RELEASE_TAG_HERE https://github.com/grpc/grpc $REPO_ROOT
+ $ cd $REPO_ROOT
+ $ git submodule update --init
+
+ # For the next two commands do `sudo pip install` if you get permission-denied errors
+ $ pip install -rrequirements.txt
+ $ GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .
+
+You cannot currently install Python from source on Windows. Things might work
+out for you in MSYS2 (follow the Linux instructions), but it isn't officially
+supported at the moment.
+
+Troubleshooting
+~~~~~~~~~~~~~~~
+
+Help, I ...
+
+* **... see a** :code:`pkg_resources.VersionConflict` **when I try to install
+ grpc**
+
+ This is likely because :code:`pip` doesn't own the offending dependency,
+ which in turn is likely because your operating system's package manager owns
+ it. You'll need to force the installation of the dependency:
+
+ :code:`pip install --ignore-installed $OFFENDING_DEPENDENCY`
+
+ For example, if you get an error like the following:
+
+ ::
+
+ Traceback (most recent call last):
+ File "", line 17, in
+ ...
+ File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 509, in find
+ raise VersionConflict(dist, req)
+ pkg_resources.VersionConflict: (six 1.8.0 (/usr/lib/python2.7/dist-packages), Requirement.parse('six>=1.10'))
+
+ You can fix it by doing:
+
+ ::
+
+ sudo pip install --ignore-installed six
+
+* **... see the following error on some platforms**
+
+ ::
+
+ /tmp/pip-build-U8pSsr/cython/Cython/Plex/Scanners.c:4:20: fatal error: Python.h: No such file or directory
+ #include "Python.h"
+ ^
+ compilation terminated.
+
+ You can fix it by installing `python-dev` package. i.e
+
+ ::
+
+ sudo apt-get install python-dev
+
+
+
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/INSTALLER b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/METADATA b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/METADATA
new file mode 100644
index 000000000..bf6d02713
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/METADATA
@@ -0,0 +1,143 @@
+Metadata-Version: 2.0
+Name: grpcio
+Version: 1.32.0
+Summary: HTTP/2-based RPC framework
+Home-page: https://grpc.io
+Author: The gRPC Authors
+Author-email: grpc-io@googlegroups.com
+License: Apache License 2.0
+Description-Content-Type: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: License :: OSI Approved :: Apache Software License
+Requires-Dist: six (>=1.5.2)
+Requires-Dist: futures (>=2.2.0); python_version < "3.2"
+Requires-Dist: enum34 (>=1.0.4); python_version < "3.4"
+Provides-Extra: protobuf
+Requires-Dist: grpcio-tools (>=1.32.0); extra == 'protobuf'
+
+gRPC Python
+===========
+
+|compat_check_pypi|
+
+Package for gRPC Python.
+
+.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=grpcio
+ :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=grpcio
+
+Supported Python Versions
+-------------------------
+Python >= 3.5
+
+Installation
+------------
+
+gRPC Python is available for Linux, macOS, and Windows.
+
+Installing From PyPI
+~~~~~~~~~~~~~~~~~~~~
+
+If you are installing locally...
+
+::
+
+ $ pip install grpcio
+
+Else system wide (on Ubuntu)...
+
+::
+
+ $ sudo pip install grpcio
+
+If you're on Windows make sure that you installed the :code:`pip.exe` component
+when you installed Python (if not go back and install it!) then invoke:
+
+::
+
+ $ pip.exe install grpcio
+
+Windows users may need to invoke :code:`pip.exe` from a command line ran as
+administrator.
+
+n.b. On Windows and on Mac OS X one *must* have a recent release of :code:`pip`
+to retrieve the proper wheel from PyPI. Be sure to upgrade to the latest
+version!
+
+Installing From Source
+~~~~~~~~~~~~~~~~~~~~~~
+
+Building from source requires that you have the Python headers (usually a
+package named :code:`python-dev`).
+
+::
+
+ $ export REPO_ROOT=grpc # REPO_ROOT can be any directory of your choice
+ $ git clone -b RELEASE_TAG_HERE https://github.com/grpc/grpc $REPO_ROOT
+ $ cd $REPO_ROOT
+ $ git submodule update --init
+
+ # For the next two commands do `sudo pip install` if you get permission-denied errors
+ $ pip install -rrequirements.txt
+ $ GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .
+
+You cannot currently install Python from source on Windows. Things might work
+out for you in MSYS2 (follow the Linux instructions), but it isn't officially
+supported at the moment.
+
+Troubleshooting
+~~~~~~~~~~~~~~~
+
+Help, I ...
+
+* **... see a** :code:`pkg_resources.VersionConflict` **when I try to install
+ grpc**
+
+ This is likely because :code:`pip` doesn't own the offending dependency,
+ which in turn is likely because your operating system's package manager owns
+ it. You'll need to force the installation of the dependency:
+
+ :code:`pip install --ignore-installed $OFFENDING_DEPENDENCY`
+
+ For example, if you get an error like the following:
+
+ ::
+
+ Traceback (most recent call last):
+ File "", line 17, in
+ ...
+ File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 509, in find
+ raise VersionConflict(dist, req)
+ pkg_resources.VersionConflict: (six 1.8.0 (/usr/lib/python2.7/dist-packages), Requirement.parse('six>=1.10'))
+
+ You can fix it by doing:
+
+ ::
+
+ sudo pip install --ignore-installed six
+
+* **... see the following error on some platforms**
+
+ ::
+
+ /tmp/pip-build-U8pSsr/cython/Cython/Plex/Scanners.c:4:20: fatal error: Python.h: No such file or directory
+ #include "Python.h"
+ ^
+ compilation terminated.
+
+ You can fix it by installing `python-dev` package. i.e
+
+ ::
+
+ sudo apt-get install python-dev
+
+
+
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/RECORD b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/RECORD
new file mode 100644
index 000000000..d42461950
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/RECORD
@@ -0,0 +1,117 @@
+grpc/__init__.py,sha256=C_OkIKy3gUNWrOg1lA17wZdJHf5BVljgmsgiHHcRzN0,82325
+grpc/__pycache__/__init__.cpython-36.pyc,,
+grpc/__pycache__/_auth.cpython-36.pyc,,
+grpc/__pycache__/_channel.cpython-36.pyc,,
+grpc/__pycache__/_common.cpython-36.pyc,,
+grpc/__pycache__/_compression.cpython-36.pyc,,
+grpc/__pycache__/_grpcio_metadata.cpython-36.pyc,,
+grpc/__pycache__/_interceptor.cpython-36.pyc,,
+grpc/__pycache__/_plugin_wrapping.cpython-36.pyc,,
+grpc/__pycache__/_runtime_protos.cpython-36.pyc,,
+grpc/__pycache__/_server.cpython-36.pyc,,
+grpc/__pycache__/_simple_stubs.cpython-36.pyc,,
+grpc/__pycache__/_utilities.cpython-36.pyc,,
+grpc/_auth.py,sha256=6S0D1ZiKMgisKca0p2VElxTp7JHHf9pffz00qbVUaLU,2226
+grpc/_channel.py,sha256=Qwdk-gjk41o456ReGOBsmlVCFWMYG9GPXTKPAFdJ6PI,59644
+grpc/_common.py,sha256=piEec2i5lLBfDsmjwLN2RM-f8i4ZoxkAc8AJ1-PAUxg,6422
+grpc/_compression.py,sha256=_24RjhRL9BjGV8BjBWH9eOnX8FEqPxtjR-gqBgVETeQ,1750
+grpc/_cython/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/_cython/__pycache__/__init__.cpython-36.pyc,,
+grpc/_cython/_credentials/roots.pem,sha256=KtN_WlmTMzhyA3SU6D3Ysspj6TueNbhqddCoDLljVrE,288872
+grpc/_cython/_cygrpc/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/_cython/_cygrpc/__pycache__/__init__.cpython-36.pyc,,
+grpc/_cython/cygrpc.cp36-win32.pyd,sha256=9eom_DDZ9TG9fZ3geJQ6439ehhKu2x5tJaAWRi0ETr0,4609024
+grpc/_grpcio_metadata.py,sha256=YFBmgWVEA0WVUklyHzJDKiRecac4RDmVcgdE6hZ5SJE,26
+grpc/_interceptor.py,sha256=r_O1z2TDIEICD7oeSDuk9qPsRCZmgTqhI8OzoZ1pWi0,20929
+grpc/_plugin_wrapping.py,sha256=e-dNS_Cx0CagXwFHOTlE_k0DfbjAZGOvvUc4q8YzakI,3528
+grpc/_runtime_protos.py,sha256=vUZ0rD2c4xOOazOHw24WG8Kq5q0Wis4DO1LWCM0cFRQ,6322
+grpc/_server.py,sha256=4H8tUQNWVOJGOUCqxwXvuFQwZ2E3RPoQpSzUvNvmX1Q,38453
+grpc/_simple_stubs.py,sha256=bUeyF1IciLLifBJA_Y8DyGRrUQdq4YRkPn9z69sBCN4,24513
+grpc/_utilities.py,sha256=xYAUJ2plE3YCS-Z-hATU44XiS2_8HdRH8Gc_s_KEFgU,5350
+grpc/aio/__init__.py,sha256=dLH4Bfct6NdmSCBDXWUmiC9qcwMdq01GzXUmlwQnqVw,2905
+grpc/aio/__pycache__/__init__.cpython-36.pyc,,
+grpc/aio/__pycache__/_base_call.cpython-36.pyc,,
+grpc/aio/__pycache__/_base_channel.cpython-36.pyc,,
+grpc/aio/__pycache__/_base_server.cpython-36.pyc,,
+grpc/aio/__pycache__/_call.cpython-36.pyc,,
+grpc/aio/__pycache__/_channel.cpython-36.pyc,,
+grpc/aio/__pycache__/_interceptor.cpython-36.pyc,,
+grpc/aio/__pycache__/_metadata.cpython-36.pyc,,
+grpc/aio/__pycache__/_server.cpython-36.pyc,,
+grpc/aio/__pycache__/_typing.cpython-36.pyc,,
+grpc/aio/__pycache__/_utils.cpython-36.pyc,,
+grpc/aio/_base_call.py,sha256=o3gHG0Q7-ONj2CRcK9xngvezuMnSBwUhQD84-oDLUGU,7484
+grpc/aio/_base_channel.py,sha256=ufk6dq9sk2wAuLgXy2JoGT_S7oXb5Xx3cOrjgYC5R-o,13860
+grpc/aio/_base_server.py,sha256=llFtu0Qzq6sHFKgGyIi-Y_S_A6QkBgG5R9W19YUzubI,10285
+grpc/aio/_call.py,sha256=ftPsijjPb8DjW5c969z5JU-Fm6HSp6zYJMos5x7oVYk,24446
+grpc/aio/_channel.py,sha256=YuTGqtYapIQ0JUlPz0Uj2WSjyB6dRX2VdPFkd7ZazEQ,20462
+grpc/aio/_interceptor.py,sha256=ZfVMQYynDrjoGoSkOqoF5ICWu9_Rjfu2lqjkKmz4ehs,40828
+grpc/aio/_metadata.py,sha256=zsh8f5A_spq7oZiPtvqWyB8hIOP20ebqtoV1ugnY2sw,4666
+grpc/aio/_server.py,sha256=MNIHMb7Gn6uzrZIhXYxpM3SCli7h0oqtKVNSDQfVLoc,8892
+grpc/aio/_typing.py,sha256=HXMJY5pWzhfP5jkTZc7TyNYUlol6hplMhsSnbuIIVSU,1318
+grpc/aio/_utils.py,sha256=Fw8FpKtucE2a2VRNF3JyAMzh0BtgNQZKpE5jWlHlwQw,843
+grpc/beta/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/beta/__pycache__/__init__.cpython-36.pyc,,
+grpc/beta/__pycache__/_client_adaptations.cpython-36.pyc,,
+grpc/beta/__pycache__/_metadata.cpython-36.pyc,,
+grpc/beta/__pycache__/_server_adaptations.cpython-36.pyc,,
+grpc/beta/__pycache__/implementations.cpython-36.pyc,,
+grpc/beta/__pycache__/interfaces.cpython-36.pyc,,
+grpc/beta/__pycache__/utilities.cpython-36.pyc,,
+grpc/beta/_client_adaptations.py,sha256=EMkqT0wLwNTqNmmQjha8cmuni76or7SlMKff3SNRpR8,28218
+grpc/beta/_metadata.py,sha256=tPZe4P32VbdFsgMZoGsKqEOT8iODjnwcFeDiH-lFZDs,1658
+grpc/beta/_server_adaptations.py,sha256=ytUc5NfbIJirH4vENQMUiKUQJys9vg-8bFh-Gjax7gw,14202
+grpc/beta/implementations.py,sha256=is0FAkCuyhLiSz5BTZ4OVb86Zhz3n-PVGerdVitPXCY,12126
+grpc/beta/interfaces.py,sha256=fg2M4qgJozMPCI0hRuvCukWgU6vg-wp2BUV4wPol7Yg,6121
+grpc/beta/utilities.py,sha256=7ErgL2XBwlkz-X07H3NLKcUahaXV3jR6WRPfDZRXZnw,5082
+grpc/experimental/__init__.py,sha256=EYjUeVaH5_OAjgKVbo6kWgnC9753a7lKlChdh2QCVdI,4105
+grpc/experimental/__pycache__/__init__.cpython-36.pyc,,
+grpc/experimental/__pycache__/gevent.cpython-36.pyc,,
+grpc/experimental/__pycache__/session_cache.cpython-36.pyc,,
+grpc/experimental/aio/__init__.py,sha256=QoEtaa5C408IeaaSMKvooYNJdWCqyX6X9UYF-maJcIY,676
+grpc/experimental/aio/__pycache__/__init__.cpython-36.pyc,,
+grpc/experimental/gevent.py,sha256=ZmFL0iK7irhC9JtTC2JJP23-IRG3_ZCohRQBhhVWuyM,1000
+grpc/experimental/session_cache.py,sha256=OdASXKtZYY8vP1Yo6GeRi4dfEaVuvyiOK5CknnROExE,1578
+grpc/framework/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/framework/__pycache__/__init__.cpython-36.pyc,,
+grpc/framework/common/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/framework/common/__pycache__/__init__.cpython-36.pyc,,
+grpc/framework/common/__pycache__/cardinality.cpython-36.pyc,,
+grpc/framework/common/__pycache__/style.cpython-36.pyc,,
+grpc/framework/common/cardinality.py,sha256=v0Gfp8V2mrlgJEq6TFdEYvKdtcG5RhPzKyzqEYSKnww,1014
+grpc/framework/common/style.py,sha256=LsshsJwnoHJsXt6NSfrW3lBBKIpHcAa6crKQQXjkcjM,848
+grpc/framework/foundation/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/framework/foundation/__pycache__/__init__.cpython-36.pyc,,
+grpc/framework/foundation/__pycache__/abandonment.cpython-36.pyc,,
+grpc/framework/foundation/__pycache__/callable_util.cpython-36.pyc,,
+grpc/framework/foundation/__pycache__/future.cpython-36.pyc,,
+grpc/framework/foundation/__pycache__/logging_pool.cpython-36.pyc,,
+grpc/framework/foundation/__pycache__/stream.cpython-36.pyc,,
+grpc/framework/foundation/__pycache__/stream_util.cpython-36.pyc,,
+grpc/framework/foundation/abandonment.py,sha256=p1Vg5oK33yGOeX93JgYvdsBXdTtM6lzCIZSXyWcaRQU,894
+grpc/framework/foundation/callable_util.py,sha256=x4qisZhPzIwkAx-HERn8YbOxxM216nvIPg9dAoeu8cY,3247
+grpc/framework/foundation/future.py,sha256=bjR2K231LEpr7GS9-76nFJNAPaSbWHNwr4UopwY880U,8312
+grpc/framework/foundation/logging_pool.py,sha256=uoELGCVmc_GhMxR50BZlNsRMGFGk_ex4VkuVm2hyUk8,2349
+grpc/framework/foundation/stream.py,sha256=cSi1fpCR9xWXvuoi6IIChankcoFDCacuJ7x3brUJ4zg,1434
+grpc/framework/foundation/stream_util.py,sha256=xbo7u8uXLBsQ_vf_NKHsls8mFOmmsaLQni51yk_xLpw,4920
+grpc/framework/interfaces/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/framework/interfaces/__pycache__/__init__.cpython-36.pyc,,
+grpc/framework/interfaces/base/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/framework/interfaces/base/__pycache__/__init__.cpython-36.pyc,,
+grpc/framework/interfaces/base/__pycache__/base.cpython-36.pyc,,
+grpc/framework/interfaces/base/__pycache__/utilities.cpython-36.pyc,,
+grpc/framework/interfaces/base/base.py,sha256=HpVRrT2L8_NH5wKq4z3YdZ8LQDrdvMLGVK1NHfz4DvQ,12523
+grpc/framework/interfaces/base/utilities.py,sha256=EnchGOBWBOMS5j2zpuSZxn0XXi9gmjTVZ8cOTxCG9IM,2526
+grpc/framework/interfaces/face/__init__.py,sha256=v-bMmhfnkYP7kR6Mw9wPLG_cCagk-ZKiUZi6pyap2GU,590
+grpc/framework/interfaces/face/__pycache__/__init__.cpython-36.pyc,,
+grpc/framework/interfaces/face/__pycache__/face.cpython-36.pyc,,
+grpc/framework/interfaces/face/__pycache__/utilities.cpython-36.pyc,,
+grpc/framework/interfaces/face/face.py,sha256=ROSx6G7dF7APvRbZqFbrneBkKWp_ZbhGj42C_0_ms14,40756
+grpc/framework/interfaces/face/utilities.py,sha256=tF6O4ghE7JlbnZ3vvNyLpTuQ3yzrlMjrAaGZvRQba_s,6879
+grpcio-1.32.0.dist-info/DESCRIPTION.rst,sha256=sJ9akK0BDNfSrrm8h6KFTUw7B1IDtEJ3uO4heYq2B0M,2981
+grpcio-1.32.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+grpcio-1.32.0.dist-info/METADATA,sha256=2DskeBI44mqSPZ2nGw2CWMMQyDmCz4iScs_EUNl8cFA,3975
+grpcio-1.32.0.dist-info/RECORD,,
+grpcio-1.32.0.dist-info/WHEEL,sha256=xiHTm3JxoVljPSD6nSGhq3B4VY9iUqMNXwYQ259n1PI,102
+grpcio-1.32.0.dist-info/metadata.json,sha256=dKrpOEptUV-XOsZF03uLk2ihIOpFHDgtttmO1iJ_9pI,1183
+grpcio-1.32.0.dist-info/top_level.txt,sha256=eEd2Jq_aVQFp38bWW8Pfwjz_5iibqeOFT-2zXlPAq_8,5
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/WHEEL b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/WHEEL
new file mode 100644
index 000000000..7872c3318
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.29.0)
+Root-Is-Purelib: false
+Tag: cp36-cp36m-win32
+
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/metadata.json b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/metadata.json
new file mode 100644
index 000000000..8279a6626
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/metadata.json
@@ -0,0 +1 @@
+{"classifiers": ["Development Status :: 5 - Production/Stable", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "License :: OSI Approved :: Apache Software License"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "grpc-io@googlegroups.com", "name": "The gRPC Authors", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://grpc.io"}}}, "extras": ["protobuf"], "generator": "bdist_wheel (0.29.0)", "license": "Apache License 2.0", "metadata_version": "2.0", "name": "grpcio", "run_requires": [{"extra": "protobuf", "requires": ["grpcio-tools (>=1.32.0)"]}, {"requires": ["six (>=1.5.2)"]}, {"environment": "python_version < \"3.2\"", "requires": ["futures (>=2.2.0)"]}, {"environment": "python_version < \"3.4\"", "requires": ["enum34 (>=1.0.4)"]}], "summary": "HTTP/2-based RPC framework", "version": "1.32.0"}
\ No newline at end of file
diff --git a/venv/Lib/site-packages/grpcio-1.32.0.dist-info/top_level.txt b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/top_level.txt
new file mode 100644
index 000000000..3b2fe54cb
--- /dev/null
+++ b/venv/Lib/site-packages/grpcio-1.32.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+grpc
diff --git a/venv/Lib/site-packages/idna-2.10.dist-info/INSTALLER b/venv/Lib/site-packages/idna-2.10.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/idna-2.10.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/idna-2.10.dist-info/LICENSE.rst b/venv/Lib/site-packages/idna-2.10.dist-info/LICENSE.rst
new file mode 100644
index 000000000..63664b82e
--- /dev/null
+++ b/venv/Lib/site-packages/idna-2.10.dist-info/LICENSE.rst
@@ -0,0 +1,34 @@
+License
+-------
+
+License: bsd-3-clause
+
+Copyright (c) 2013-2020, Kim Davies. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+#. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+#. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided with
+ the distribution.
+
+#. Neither the name of the copyright holder nor the names of the
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
+ CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+ DAMAGE.
diff --git a/venv/Lib/site-packages/idna-2.10.dist-info/METADATA b/venv/Lib/site-packages/idna-2.10.dist-info/METADATA
new file mode 100644
index 000000000..f73c0ffef
--- /dev/null
+++ b/venv/Lib/site-packages/idna-2.10.dist-info/METADATA
@@ -0,0 +1,243 @@
+Metadata-Version: 2.1
+Name: idna
+Version: 2.10
+Summary: Internationalized Domain Names in Applications (IDNA)
+Home-page: https://github.com/kjd/idna
+Author: Kim Davies
+Author-email: kim@cynosure.com.au
+License: BSD-like
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: Name Service (DNS)
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+
+Internationalized Domain Names in Applications (IDNA)
+=====================================================
+
+Support for the Internationalised Domain Names in Applications
+(IDNA) protocol as specified in `RFC 5891 `_.
+This is the latest version of the protocol and is sometimes referred to as
+“IDNA 2008”.
+
+This library also provides support for Unicode Technical Standard 46,
+`Unicode IDNA Compatibility Processing `_.
+
+This acts as a suitable replacement for the “encodings.idna” module that
+comes with the Python standard library, but only supports the
+old, deprecated IDNA specification (`RFC 3490 `_).
+
+Basic functions are simply executed:
+
+.. code-block:: pycon
+
+ # Python 3
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+ # Python 2
+ >>> import idna
+ >>> idna.encode(u'ドメイン.テスト')
+ 'xn--eckwd4c7c.xn--zckzah'
+ >>> print idna.decode('xn--eckwd4c7c.xn--zckzah')
+ ドメイン.テスト
+
+Packages
+--------
+
+The latest tagged release version is published in the PyPI repository:
+
+.. image:: https://badge.fury.io/py/idna.svg
+ :target: http://badge.fury.io/py/idna
+
+
+Installation
+------------
+
+To install this library, you can use pip:
+
+.. code-block:: bash
+
+ $ pip install idna
+
+Alternatively, you can install the package using the bundled setup script:
+
+.. code-block:: bash
+
+ $ python setup.py install
+
+This library works with Python 2.7 and Python 3.4 or later.
+
+
+Usage
+-----
+
+For typical usage, the ``encode`` and ``decode`` functions will take a domain
+name argument and perform a conversion to A-labels or U-labels respectively.
+
+.. code-block:: pycon
+
+ # Python 3
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+You may use the codec encoding and decoding methods using the
+``idna.codec`` module:
+
+.. code-block:: pycon
+
+ # Python 2
+ >>> import idna.codec
+ >>> print u'домена.испытание'.encode('idna')
+ xn--80ahd1agd.xn--80akhbyknj4f
+ >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna')
+ домена.испытание
+
+Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel``
+functions if necessary:
+
+.. code-block:: pycon
+
+ # Python 2
+ >>> idna.alabel(u'测试')
+ 'xn--0zwm56d'
+
+Compatibility Mapping (UTS #46)
++++++++++++++++++++++++++++++++
+
+As described in `RFC 5895 `_, the IDNA
+specification no longer normalizes input from different potential ways a user
+may input a domain name. This functionality, known as a “mapping”, is now
+considered by the specification to be a local user-interface issue distinct
+from IDNA conversion functionality.
+
+This library provides one such mapping, that was developed by the Unicode
+Consortium. Known as `Unicode IDNA Compatibility Processing `_,
+it provides for both a regular mapping for typical applications, as well as
+a transitional mapping to help migrate from older IDNA 2003 applications.
+
+For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL
+LETTER K* is not allowed (nor are capital letters in general). UTS 46 will
+convert this into lower case prior to applying the IDNA conversion.
+
+.. code-block:: pycon
+
+ # Python 3
+ >>> import idna
+ >>> idna.encode(u'Königsgäßchen')
+ ...
+ idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed
+ >>> idna.encode('Königsgäßchen', uts46=True)
+ b'xn--knigsgchen-b4a3dun'
+ >>> print(idna.decode('xn--knigsgchen-b4a3dun'))
+ königsgäßchen
+
+Transitional processing provides conversions to help transition from the older
+2003 standard to the current standard. For example, in the original IDNA
+specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two
+*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this
+conversion is not performed.
+
+.. code-block:: pycon
+
+ # Python 2
+ >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True)
+ 'xn--knigsgsschen-lcb0w'
+
+Implementors should use transitional processing with caution, only in rare
+cases where conversion from legacy labels to current labels must be performed
+(i.e. IDNA implementations that pre-date 2008). For typical applications
+that just need to convert labels, transitional processing is unlikely to be
+beneficial and could produce unexpected incompatible results.
+
+``encodings.idna`` Compatibility
+++++++++++++++++++++++++++++++++
+
+Function calls from the Python built-in ``encodings.idna`` module are
+mapped to their IDNA 2008 equivalents using the ``idna.compat`` module.
+Simply substitute the ``import`` clause in your code to refer to the
+new module name.
+
+Exceptions
+----------
+
+All errors raised during the conversion following the specification should
+raise an exception derived from the ``idna.IDNAError`` base class.
+
+More specific exceptions that may be generated as ``idna.IDNABidiError``
+when the error reflects an illegal combination of left-to-right and right-to-left
+characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is
+an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext``
+when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO
+or CONTEXTJ but the contextual requirements are not satisfied.)
+
+Building and Diagnostics
+------------------------
+
+The IDNA and UTS 46 functionality relies upon pre-calculated lookup tables for
+performance. These tables are derived from computing against eligibility criteria
+in the respective standards. These tables are computed using the command-line
+script ``tools/idna-data``.
+
+This tool will fetch relevant tables from the Unicode Consortium and perform the
+required calculations to identify eligibility. It has three main modes:
+
+* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``,
+ the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors
+ who wish to track this library against a different Unicode version may use this tool
+ to manually generate a different version of the ``idnadata.py`` and ``uts46data.py``
+ files.
+
+* ``idna-data make-table``. Generate a table of the IDNA disposition
+ (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC
+ 5892 and the pre-computed tables published by `IANA `_.
+
+* ``idna-data U+0061``. Prints debugging output on the various properties
+ associated with an individual Unicode codepoint (in this case, U+0061), that are
+ used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging
+ or analysis.
+
+The tool accepts a number of arguments, described using ``idna-data -h``. Most notably,
+the ``--version`` argument allows the specification of the version of Unicode to use
+in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata``
+will generate library data against Unicode 9.0.0.
+
+Note that this script requires Python 3, but all generated library data will work
+in Python 2.7.
+
+
+Testing
+-------
+
+The library has a test suite based on each rule of the IDNA specification, as
+well as tests that are provided as part of the Unicode Technical Standard 46,
+`Unicode IDNA Compatibility Processing `_.
+
+The tests are run automatically on each commit at Travis CI:
+
+.. image:: https://travis-ci.org/kjd/idna.svg?branch=master
+ :target: https://travis-ci.org/kjd/idna
+
+
diff --git a/venv/Lib/site-packages/idna-2.10.dist-info/RECORD b/venv/Lib/site-packages/idna-2.10.dist-info/RECORD
new file mode 100644
index 000000000..6e99ad781
--- /dev/null
+++ b/venv/Lib/site-packages/idna-2.10.dist-info/RECORD
@@ -0,0 +1,22 @@
+idna-2.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+idna-2.10.dist-info/LICENSE.rst,sha256=QSAUQg0kc9ugYRfD1Nng7sqm3eDKMM2VH07CvjlCbzI,1565
+idna-2.10.dist-info/METADATA,sha256=ZWCaQDBjdmSvx5EU7Cv6ORC-9NUQ6nXh1eXx38ySe40,9104
+idna-2.10.dist-info/RECORD,,
+idna-2.10.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+idna-2.10.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5
+idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58
+idna/__pycache__/__init__.cpython-36.pyc,,
+idna/__pycache__/codec.cpython-36.pyc,,
+idna/__pycache__/compat.cpython-36.pyc,,
+idna/__pycache__/core.cpython-36.pyc,,
+idna/__pycache__/idnadata.cpython-36.pyc,,
+idna/__pycache__/intranges.cpython-36.pyc,,
+idna/__pycache__/package_data.cpython-36.pyc,,
+idna/__pycache__/uts46data.cpython-36.pyc,,
+idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299
+idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232
+idna/core.py,sha256=jCoaLb3bA2tS_DDx9PpGuNTEZZN2jAzB369aP-IHYRE,11951
+idna/idnadata.py,sha256=gmzFwZWjdms3kKZ_M_vwz7-LP_SCgYfSeE03B21Qpsk,42350
+idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749
+idna/package_data.py,sha256=bxBjpLnE06_1jSYKEy5svOMu1zM3OMztXVUb1tPlcp0,22
+idna/uts46data.py,sha256=lMdw2zdjkH1JUWXPPEfFUSYT3Fyj60bBmfLvvy5m7ko,202084
diff --git a/venv/Lib/site-packages/idna-2.10.dist-info/WHEEL b/venv/Lib/site-packages/idna-2.10.dist-info/WHEEL
new file mode 100644
index 000000000..8b701e93c
--- /dev/null
+++ b/venv/Lib/site-packages/idna-2.10.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/idna-2.10.dist-info/top_level.txt b/venv/Lib/site-packages/idna-2.10.dist-info/top_level.txt
new file mode 100644
index 000000000..c40472e6f
--- /dev/null
+++ b/venv/Lib/site-packages/idna-2.10.dist-info/top_level.txt
@@ -0,0 +1 @@
+idna
diff --git a/venv/Lib/site-packages/idna/__init__.py b/venv/Lib/site-packages/idna/__init__.py
new file mode 100644
index 000000000..847bf9354
--- /dev/null
+++ b/venv/Lib/site-packages/idna/__init__.py
@@ -0,0 +1,2 @@
+from .package_data import __version__
+from .core import *
diff --git a/venv/Lib/site-packages/idna/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..9e0213127
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/codec.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/codec.cpython-36.pyc
new file mode 100644
index 000000000..26e735ff4
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/codec.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/compat.cpython-36.pyc
new file mode 100644
index 000000000..0cb6a43b0
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/compat.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/core.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/core.cpython-36.pyc
new file mode 100644
index 000000000..8f4ceba50
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/core.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-36.pyc
new file mode 100644
index 000000000..b9d14ca27
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/idnadata.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/intranges.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/intranges.cpython-36.pyc
new file mode 100644
index 000000000..9d92cd53f
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/intranges.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/package_data.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/package_data.cpython-36.pyc
new file mode 100644
index 000000000..65fde485a
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/package_data.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/__pycache__/uts46data.cpython-36.pyc b/venv/Lib/site-packages/idna/__pycache__/uts46data.cpython-36.pyc
new file mode 100644
index 000000000..bccac029f
Binary files /dev/null and b/venv/Lib/site-packages/idna/__pycache__/uts46data.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/idna/codec.py b/venv/Lib/site-packages/idna/codec.py
new file mode 100644
index 000000000..98c65ead1
--- /dev/null
+++ b/venv/Lib/site-packages/idna/codec.py
@@ -0,0 +1,118 @@
+from .core import encode, decode, alabel, ulabel, IDNAError
+import codecs
+import re
+
+_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
+
+class Codec(codecs.Codec):
+
+ def encode(self, data, errors='strict'):
+
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return "", 0
+
+ return encode(data), len(data)
+
+ def decode(self, data, errors='strict'):
+
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return u"", 0
+
+ return decode(data), len(data)
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+ def _buffer_encode(self, data, errors, final):
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return ("", 0)
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = u''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = '.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = '.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(alabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ # Join with U+002E
+ result = ".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result, size)
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, data, errors, final):
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return (u"", 0)
+
+ # IDNA allows decoding to operate on Unicode strings, too.
+ if isinstance(data, unicode):
+ labels = _unicode_dots_re.split(data)
+ else:
+ # Must be ASCII string
+ data = str(data)
+ unicode(data, "ascii")
+ labels = data.split(".")
+
+ trailing_dot = u''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = u'.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = u'.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(ulabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ result = u".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+def getregentry():
+ return codecs.CodecInfo(
+ name='idna',
+ encode=Codec().encode,
+ decode=Codec().decode,
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamwriter=StreamWriter,
+ streamreader=StreamReader,
+ )
diff --git a/venv/Lib/site-packages/idna/compat.py b/venv/Lib/site-packages/idna/compat.py
new file mode 100644
index 000000000..4d47f336d
--- /dev/null
+++ b/venv/Lib/site-packages/idna/compat.py
@@ -0,0 +1,12 @@
+from .core import *
+from .codec import *
+
+def ToASCII(label):
+ return encode(label)
+
+def ToUnicode(label):
+ return decode(label)
+
+def nameprep(s):
+ raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
+
diff --git a/venv/Lib/site-packages/idna/core.py b/venv/Lib/site-packages/idna/core.py
new file mode 100644
index 000000000..41ec5c711
--- /dev/null
+++ b/venv/Lib/site-packages/idna/core.py
@@ -0,0 +1,400 @@
+from . import idnadata
+import bisect
+import unicodedata
+import re
+import sys
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b'xn--'
+_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
+
+if sys.version_info[0] >= 3:
+ unicode = str
+ unichr = chr
+
+class IDNAError(UnicodeError):
+ """ Base exception for all IDNA-encoding related problems """
+ pass
+
+
+class IDNABidiError(IDNAError):
+ """ Exception when bidirectional requirements are not satisfied """
+ pass
+
+
+class InvalidCodepoint(IDNAError):
+ """ Exception when a disallowed or unallocated codepoint is used """
+ pass
+
+
+class InvalidCodepointContext(IDNAError):
+ """ Exception when the codepoint is not valid in the context it is used """
+ pass
+
+
+def _combining_class(cp):
+ v = unicodedata.combining(unichr(cp))
+ if v == 0:
+ if not unicodedata.name(unichr(cp)):
+ raise ValueError("Unknown character in unicodedata")
+ return v
+
+def _is_script(cp, script):
+ return intranges_contain(ord(cp), idnadata.scripts[script])
+
+def _punycode(s):
+ return s.encode('punycode')
+
+def _unot(s):
+ return 'U+{0:04X}'.format(s)
+
+
+def valid_label_length(label):
+
+ if len(label) > 63:
+ return False
+ return True
+
+
+def valid_string_length(label, trailing_dot):
+
+ if len(label) > (254 if trailing_dot else 253):
+ return False
+ return True
+
+
+def check_bidi(label, check_ltr=False):
+
+ # Bidi rules should only be applied if string contains RTL characters
+ bidi_label = False
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+ if direction == '':
+ # String likely comes from a newer version of Unicode
+ raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
+ if direction in ['R', 'AL', 'AN']:
+ bidi_label = True
+ if not bidi_label and not check_ltr:
+ return True
+
+ # Bidi rule 1
+ direction = unicodedata.bidirectional(label[0])
+ if direction in ['R', 'AL']:
+ rtl = True
+ elif direction == 'L':
+ rtl = False
+ else:
+ raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))
+
+ valid_ending = False
+ number_type = False
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+
+ if rtl:
+ # Bidi rule 2
+ if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
+ # Bidi rule 3
+ if direction in ['R', 'AL', 'EN', 'AN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+ # Bidi rule 4
+ if direction in ['AN', 'EN']:
+ if not number_type:
+ number_type = direction
+ else:
+ if number_type != direction:
+ raise IDNABidiError('Can not mix numeral types in a right-to-left label')
+ else:
+ # Bidi rule 5
+ if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
+ # Bidi rule 6
+ if direction in ['L', 'EN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+
+ if not valid_ending:
+ raise IDNABidiError('Label ends with illegal codepoint directionality')
+
+ return True
+
+
+def check_initial_combiner(label):
+
+ if unicodedata.category(label[0])[0] == 'M':
+ raise IDNAError('Label begins with an illegal combining character')
+ return True
+
+
+def check_hyphen_ok(label):
+
+ if label[2:4] == '--':
+ raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
+ if label[0] == '-' or label[-1] == '-':
+ raise IDNAError('Label must not start or end with a hyphen')
+ return True
+
+
+def check_nfc(label):
+
+ if unicodedata.normalize('NFC', label) != label:
+ raise IDNAError('Label must be in Normalization Form C')
+
+
+def valid_contextj(label, pos):
+
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x200c:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+
+ ok = False
+ for i in range(pos-1, -1, -1):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('L'), ord('D')]:
+ ok = True
+ break
+
+ if not ok:
+ return False
+
+ ok = False
+ for i in range(pos+1, len(label)):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('R'), ord('D')]:
+ ok = True
+ break
+ return ok
+
+ if cp_value == 0x200d:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+ return False
+
+ else:
+
+ return False
+
+
+def valid_contexto(label, pos, exception=False):
+
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x00b7:
+ if 0 < pos < len(label)-1:
+ if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
+ return True
+ return False
+
+ elif cp_value == 0x0375:
+ if pos < len(label)-1 and len(label) > 1:
+ return _is_script(label[pos + 1], 'Greek')
+ return False
+
+ elif cp_value == 0x05f3 or cp_value == 0x05f4:
+ if pos > 0:
+ return _is_script(label[pos - 1], 'Hebrew')
+ return False
+
+ elif cp_value == 0x30fb:
+ for cp in label:
+ if cp == u'\u30fb':
+ continue
+ if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
+ return True
+ return False
+
+ elif 0x660 <= cp_value <= 0x669:
+ for cp in label:
+ if 0x6f0 <= ord(cp) <= 0x06f9:
+ return False
+ return True
+
+ elif 0x6f0 <= cp_value <= 0x6f9:
+ for cp in label:
+ if 0x660 <= ord(cp) <= 0x0669:
+ return False
+ return True
+
+
+def check_label(label):
+
+ if isinstance(label, (bytes, bytearray)):
+ label = label.decode('utf-8')
+ if len(label) == 0:
+ raise IDNAError('Empty Label')
+
+ check_nfc(label)
+ check_hyphen_ok(label)
+ check_initial_combiner(label)
+
+ for (pos, cp) in enumerate(label):
+ cp_value = ord(cp)
+ if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
+ continue
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
+ try:
+ if not valid_contextj(label, pos):
+ raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ except ValueError:
+ raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
+ if not valid_contexto(label, pos):
+ raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
+ else:
+ raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
+
+ check_bidi(label)
+
+
+def alabel(label):
+
+ try:
+ label = label.encode('ascii')
+ ulabel(label)
+ if not valid_label_length(label):
+ raise IDNAError('Label too long')
+ return label
+ except UnicodeEncodeError:
+ pass
+
+ if not label:
+ raise IDNAError('No Input')
+
+ label = unicode(label)
+ check_label(label)
+ label = _punycode(label)
+ label = _alabel_prefix + label
+
+ if not valid_label_length(label):
+ raise IDNAError('Label too long')
+
+ return label
+
+
+def ulabel(label):
+
+ if not isinstance(label, (bytes, bytearray)):
+ try:
+ label = label.encode('ascii')
+ except UnicodeEncodeError:
+ check_label(label)
+ return label
+
+ label = label.lower()
+ if label.startswith(_alabel_prefix):
+ label = label[len(_alabel_prefix):]
+ if not label:
+ raise IDNAError('Malformed A-label, no Punycode eligible content found')
+ if label.decode('ascii')[-1] == '-':
+ raise IDNAError('A-label must not end with a hyphen')
+ else:
+ check_label(label)
+ return label.decode('ascii')
+
+ label = label.decode('punycode')
+ check_label(label)
+ return label
+
+
+def uts46_remap(domain, std3_rules=True, transitional=False):
+ """Re-map the characters in the string according to UTS46 processing."""
+ from .uts46data import uts46data
+ output = u""
+ try:
+ for pos, char in enumerate(domain):
+ code_point = ord(char)
+ uts46row = uts46data[code_point if code_point < 256 else
+ bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
+ status = uts46row[1]
+ replacement = uts46row[2] if len(uts46row) == 3 else None
+ if (status == "V" or
+ (status == "D" and not transitional) or
+ (status == "3" and not std3_rules and replacement is None)):
+ output += char
+ elif replacement is not None and (status == "M" or
+ (status == "3" and not std3_rules) or
+ (status == "D" and transitional)):
+ output += replacement
+ elif status != "I":
+ raise IndexError()
+ return unicodedata.normalize("NFC", output)
+ except IndexError:
+ raise InvalidCodepoint(
+ "Codepoint {0} not allowed at position {1} in {2}".format(
+ _unot(code_point), pos + 1, repr(domain)))
+
+
+def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
+
+ if isinstance(s, (bytes, bytearray)):
+ s = s.decode("ascii")
+ if uts46:
+ s = uts46_remap(s, std3_rules, transitional)
+ trailing_dot = False
+ result = []
+ if strict:
+ labels = s.split('.')
+ else:
+ labels = _unicode_dots_re.split(s)
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if labels[-1] == '':
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = alabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append(b'')
+ s = b'.'.join(result)
+ if not valid_string_length(s, trailing_dot):
+ raise IDNAError('Domain too long')
+ return s
+
+
+def decode(s, strict=False, uts46=False, std3_rules=False):
+
+ if isinstance(s, (bytes, bytearray)):
+ s = s.decode("ascii")
+ if uts46:
+ s = uts46_remap(s, std3_rules, False)
+ trailing_dot = False
+ result = []
+ if not strict:
+ labels = _unicode_dots_re.split(s)
+ else:
+ labels = s.split(u'.')
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if not labels[-1]:
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = ulabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append(u'')
+ return u'.'.join(result)
diff --git a/venv/Lib/site-packages/idna/idnadata.py b/venv/Lib/site-packages/idna/idnadata.py
new file mode 100644
index 000000000..a284e4c84
--- /dev/null
+++ b/venv/Lib/site-packages/idna/idnadata.py
@@ -0,0 +1,2050 @@
+# This file is automatically generated by tools/idna-data
+
+__version__ = "13.0.0"
+scripts = {
+ 'Greek': (
+ 0x37000000374,
+ 0x37500000378,
+ 0x37a0000037e,
+ 0x37f00000380,
+ 0x38400000385,
+ 0x38600000387,
+ 0x3880000038b,
+ 0x38c0000038d,
+ 0x38e000003a2,
+ 0x3a3000003e2,
+ 0x3f000000400,
+ 0x1d2600001d2b,
+ 0x1d5d00001d62,
+ 0x1d6600001d6b,
+ 0x1dbf00001dc0,
+ 0x1f0000001f16,
+ 0x1f1800001f1e,
+ 0x1f2000001f46,
+ 0x1f4800001f4e,
+ 0x1f5000001f58,
+ 0x1f5900001f5a,
+ 0x1f5b00001f5c,
+ 0x1f5d00001f5e,
+ 0x1f5f00001f7e,
+ 0x1f8000001fb5,
+ 0x1fb600001fc5,
+ 0x1fc600001fd4,
+ 0x1fd600001fdc,
+ 0x1fdd00001ff0,
+ 0x1ff200001ff5,
+ 0x1ff600001fff,
+ 0x212600002127,
+ 0xab650000ab66,
+ 0x101400001018f,
+ 0x101a0000101a1,
+ 0x1d2000001d246,
+ ),
+ 'Han': (
+ 0x2e8000002e9a,
+ 0x2e9b00002ef4,
+ 0x2f0000002fd6,
+ 0x300500003006,
+ 0x300700003008,
+ 0x30210000302a,
+ 0x30380000303c,
+ 0x340000004dc0,
+ 0x4e0000009ffd,
+ 0xf9000000fa6e,
+ 0xfa700000fada,
+ 0x16ff000016ff2,
+ 0x200000002a6de,
+ 0x2a7000002b735,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x2f8000002fa1e,
+ 0x300000003134b,
+ ),
+ 'Hebrew': (
+ 0x591000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f5,
+ 0xfb1d0000fb37,
+ 0xfb380000fb3d,
+ 0xfb3e0000fb3f,
+ 0xfb400000fb42,
+ 0xfb430000fb45,
+ 0xfb460000fb50,
+ ),
+ 'Hiragana': (
+ 0x304100003097,
+ 0x309d000030a0,
+ 0x1b0010001b11f,
+ 0x1b1500001b153,
+ 0x1f2000001f201,
+ ),
+ 'Katakana': (
+ 0x30a1000030fb,
+ 0x30fd00003100,
+ 0x31f000003200,
+ 0x32d0000032ff,
+ 0x330000003358,
+ 0xff660000ff70,
+ 0xff710000ff9e,
+ 0x1b0000001b001,
+ 0x1b1640001b168,
+ ),
+}
+joining_types = {
+ 0x600: 85,
+ 0x601: 85,
+ 0x602: 85,
+ 0x603: 85,
+ 0x604: 85,
+ 0x605: 85,
+ 0x608: 85,
+ 0x60b: 85,
+ 0x620: 68,
+ 0x621: 85,
+ 0x622: 82,
+ 0x623: 82,
+ 0x624: 82,
+ 0x625: 82,
+ 0x626: 68,
+ 0x627: 82,
+ 0x628: 68,
+ 0x629: 82,
+ 0x62a: 68,
+ 0x62b: 68,
+ 0x62c: 68,
+ 0x62d: 68,
+ 0x62e: 68,
+ 0x62f: 82,
+ 0x630: 82,
+ 0x631: 82,
+ 0x632: 82,
+ 0x633: 68,
+ 0x634: 68,
+ 0x635: 68,
+ 0x636: 68,
+ 0x637: 68,
+ 0x638: 68,
+ 0x639: 68,
+ 0x63a: 68,
+ 0x63b: 68,
+ 0x63c: 68,
+ 0x63d: 68,
+ 0x63e: 68,
+ 0x63f: 68,
+ 0x640: 67,
+ 0x641: 68,
+ 0x642: 68,
+ 0x643: 68,
+ 0x644: 68,
+ 0x645: 68,
+ 0x646: 68,
+ 0x647: 68,
+ 0x648: 82,
+ 0x649: 68,
+ 0x64a: 68,
+ 0x66e: 68,
+ 0x66f: 68,
+ 0x671: 82,
+ 0x672: 82,
+ 0x673: 82,
+ 0x674: 85,
+ 0x675: 82,
+ 0x676: 82,
+ 0x677: 82,
+ 0x678: 68,
+ 0x679: 68,
+ 0x67a: 68,
+ 0x67b: 68,
+ 0x67c: 68,
+ 0x67d: 68,
+ 0x67e: 68,
+ 0x67f: 68,
+ 0x680: 68,
+ 0x681: 68,
+ 0x682: 68,
+ 0x683: 68,
+ 0x684: 68,
+ 0x685: 68,
+ 0x686: 68,
+ 0x687: 68,
+ 0x688: 82,
+ 0x689: 82,
+ 0x68a: 82,
+ 0x68b: 82,
+ 0x68c: 82,
+ 0x68d: 82,
+ 0x68e: 82,
+ 0x68f: 82,
+ 0x690: 82,
+ 0x691: 82,
+ 0x692: 82,
+ 0x693: 82,
+ 0x694: 82,
+ 0x695: 82,
+ 0x696: 82,
+ 0x697: 82,
+ 0x698: 82,
+ 0x699: 82,
+ 0x69a: 68,
+ 0x69b: 68,
+ 0x69c: 68,
+ 0x69d: 68,
+ 0x69e: 68,
+ 0x69f: 68,
+ 0x6a0: 68,
+ 0x6a1: 68,
+ 0x6a2: 68,
+ 0x6a3: 68,
+ 0x6a4: 68,
+ 0x6a5: 68,
+ 0x6a6: 68,
+ 0x6a7: 68,
+ 0x6a8: 68,
+ 0x6a9: 68,
+ 0x6aa: 68,
+ 0x6ab: 68,
+ 0x6ac: 68,
+ 0x6ad: 68,
+ 0x6ae: 68,
+ 0x6af: 68,
+ 0x6b0: 68,
+ 0x6b1: 68,
+ 0x6b2: 68,
+ 0x6b3: 68,
+ 0x6b4: 68,
+ 0x6b5: 68,
+ 0x6b6: 68,
+ 0x6b7: 68,
+ 0x6b8: 68,
+ 0x6b9: 68,
+ 0x6ba: 68,
+ 0x6bb: 68,
+ 0x6bc: 68,
+ 0x6bd: 68,
+ 0x6be: 68,
+ 0x6bf: 68,
+ 0x6c0: 82,
+ 0x6c1: 68,
+ 0x6c2: 68,
+ 0x6c3: 82,
+ 0x6c4: 82,
+ 0x6c5: 82,
+ 0x6c6: 82,
+ 0x6c7: 82,
+ 0x6c8: 82,
+ 0x6c9: 82,
+ 0x6ca: 82,
+ 0x6cb: 82,
+ 0x6cc: 68,
+ 0x6cd: 82,
+ 0x6ce: 68,
+ 0x6cf: 82,
+ 0x6d0: 68,
+ 0x6d1: 68,
+ 0x6d2: 82,
+ 0x6d3: 82,
+ 0x6d5: 82,
+ 0x6dd: 85,
+ 0x6ee: 82,
+ 0x6ef: 82,
+ 0x6fa: 68,
+ 0x6fb: 68,
+ 0x6fc: 68,
+ 0x6ff: 68,
+ 0x70f: 84,
+ 0x710: 82,
+ 0x712: 68,
+ 0x713: 68,
+ 0x714: 68,
+ 0x715: 82,
+ 0x716: 82,
+ 0x717: 82,
+ 0x718: 82,
+ 0x719: 82,
+ 0x71a: 68,
+ 0x71b: 68,
+ 0x71c: 68,
+ 0x71d: 68,
+ 0x71e: 82,
+ 0x71f: 68,
+ 0x720: 68,
+ 0x721: 68,
+ 0x722: 68,
+ 0x723: 68,
+ 0x724: 68,
+ 0x725: 68,
+ 0x726: 68,
+ 0x727: 68,
+ 0x728: 82,
+ 0x729: 68,
+ 0x72a: 82,
+ 0x72b: 68,
+ 0x72c: 82,
+ 0x72d: 68,
+ 0x72e: 68,
+ 0x72f: 82,
+ 0x74d: 82,
+ 0x74e: 68,
+ 0x74f: 68,
+ 0x750: 68,
+ 0x751: 68,
+ 0x752: 68,
+ 0x753: 68,
+ 0x754: 68,
+ 0x755: 68,
+ 0x756: 68,
+ 0x757: 68,
+ 0x758: 68,
+ 0x759: 82,
+ 0x75a: 82,
+ 0x75b: 82,
+ 0x75c: 68,
+ 0x75d: 68,
+ 0x75e: 68,
+ 0x75f: 68,
+ 0x760: 68,
+ 0x761: 68,
+ 0x762: 68,
+ 0x763: 68,
+ 0x764: 68,
+ 0x765: 68,
+ 0x766: 68,
+ 0x767: 68,
+ 0x768: 68,
+ 0x769: 68,
+ 0x76a: 68,
+ 0x76b: 82,
+ 0x76c: 82,
+ 0x76d: 68,
+ 0x76e: 68,
+ 0x76f: 68,
+ 0x770: 68,
+ 0x771: 82,
+ 0x772: 68,
+ 0x773: 82,
+ 0x774: 82,
+ 0x775: 68,
+ 0x776: 68,
+ 0x777: 68,
+ 0x778: 82,
+ 0x779: 82,
+ 0x77a: 68,
+ 0x77b: 68,
+ 0x77c: 68,
+ 0x77d: 68,
+ 0x77e: 68,
+ 0x77f: 68,
+ 0x7ca: 68,
+ 0x7cb: 68,
+ 0x7cc: 68,
+ 0x7cd: 68,
+ 0x7ce: 68,
+ 0x7cf: 68,
+ 0x7d0: 68,
+ 0x7d1: 68,
+ 0x7d2: 68,
+ 0x7d3: 68,
+ 0x7d4: 68,
+ 0x7d5: 68,
+ 0x7d6: 68,
+ 0x7d7: 68,
+ 0x7d8: 68,
+ 0x7d9: 68,
+ 0x7da: 68,
+ 0x7db: 68,
+ 0x7dc: 68,
+ 0x7dd: 68,
+ 0x7de: 68,
+ 0x7df: 68,
+ 0x7e0: 68,
+ 0x7e1: 68,
+ 0x7e2: 68,
+ 0x7e3: 68,
+ 0x7e4: 68,
+ 0x7e5: 68,
+ 0x7e6: 68,
+ 0x7e7: 68,
+ 0x7e8: 68,
+ 0x7e9: 68,
+ 0x7ea: 68,
+ 0x7fa: 67,
+ 0x840: 82,
+ 0x841: 68,
+ 0x842: 68,
+ 0x843: 68,
+ 0x844: 68,
+ 0x845: 68,
+ 0x846: 82,
+ 0x847: 82,
+ 0x848: 68,
+ 0x849: 82,
+ 0x84a: 68,
+ 0x84b: 68,
+ 0x84c: 68,
+ 0x84d: 68,
+ 0x84e: 68,
+ 0x84f: 68,
+ 0x850: 68,
+ 0x851: 68,
+ 0x852: 68,
+ 0x853: 68,
+ 0x854: 82,
+ 0x855: 68,
+ 0x856: 82,
+ 0x857: 82,
+ 0x858: 82,
+ 0x860: 68,
+ 0x861: 85,
+ 0x862: 68,
+ 0x863: 68,
+ 0x864: 68,
+ 0x865: 68,
+ 0x866: 85,
+ 0x867: 82,
+ 0x868: 68,
+ 0x869: 82,
+ 0x86a: 82,
+ 0x8a0: 68,
+ 0x8a1: 68,
+ 0x8a2: 68,
+ 0x8a3: 68,
+ 0x8a4: 68,
+ 0x8a5: 68,
+ 0x8a6: 68,
+ 0x8a7: 68,
+ 0x8a8: 68,
+ 0x8a9: 68,
+ 0x8aa: 82,
+ 0x8ab: 82,
+ 0x8ac: 82,
+ 0x8ad: 85,
+ 0x8ae: 82,
+ 0x8af: 68,
+ 0x8b0: 68,
+ 0x8b1: 82,
+ 0x8b2: 82,
+ 0x8b3: 68,
+ 0x8b4: 68,
+ 0x8b6: 68,
+ 0x8b7: 68,
+ 0x8b8: 68,
+ 0x8b9: 82,
+ 0x8ba: 68,
+ 0x8bb: 68,
+ 0x8bc: 68,
+ 0x8bd: 68,
+ 0x8be: 68,
+ 0x8bf: 68,
+ 0x8c0: 68,
+ 0x8c1: 68,
+ 0x8c2: 68,
+ 0x8c3: 68,
+ 0x8c4: 68,
+ 0x8c5: 68,
+ 0x8c6: 68,
+ 0x8c7: 68,
+ 0x8e2: 85,
+ 0x1806: 85,
+ 0x1807: 68,
+ 0x180a: 67,
+ 0x180e: 85,
+ 0x1820: 68,
+ 0x1821: 68,
+ 0x1822: 68,
+ 0x1823: 68,
+ 0x1824: 68,
+ 0x1825: 68,
+ 0x1826: 68,
+ 0x1827: 68,
+ 0x1828: 68,
+ 0x1829: 68,
+ 0x182a: 68,
+ 0x182b: 68,
+ 0x182c: 68,
+ 0x182d: 68,
+ 0x182e: 68,
+ 0x182f: 68,
+ 0x1830: 68,
+ 0x1831: 68,
+ 0x1832: 68,
+ 0x1833: 68,
+ 0x1834: 68,
+ 0x1835: 68,
+ 0x1836: 68,
+ 0x1837: 68,
+ 0x1838: 68,
+ 0x1839: 68,
+ 0x183a: 68,
+ 0x183b: 68,
+ 0x183c: 68,
+ 0x183d: 68,
+ 0x183e: 68,
+ 0x183f: 68,
+ 0x1840: 68,
+ 0x1841: 68,
+ 0x1842: 68,
+ 0x1843: 68,
+ 0x1844: 68,
+ 0x1845: 68,
+ 0x1846: 68,
+ 0x1847: 68,
+ 0x1848: 68,
+ 0x1849: 68,
+ 0x184a: 68,
+ 0x184b: 68,
+ 0x184c: 68,
+ 0x184d: 68,
+ 0x184e: 68,
+ 0x184f: 68,
+ 0x1850: 68,
+ 0x1851: 68,
+ 0x1852: 68,
+ 0x1853: 68,
+ 0x1854: 68,
+ 0x1855: 68,
+ 0x1856: 68,
+ 0x1857: 68,
+ 0x1858: 68,
+ 0x1859: 68,
+ 0x185a: 68,
+ 0x185b: 68,
+ 0x185c: 68,
+ 0x185d: 68,
+ 0x185e: 68,
+ 0x185f: 68,
+ 0x1860: 68,
+ 0x1861: 68,
+ 0x1862: 68,
+ 0x1863: 68,
+ 0x1864: 68,
+ 0x1865: 68,
+ 0x1866: 68,
+ 0x1867: 68,
+ 0x1868: 68,
+ 0x1869: 68,
+ 0x186a: 68,
+ 0x186b: 68,
+ 0x186c: 68,
+ 0x186d: 68,
+ 0x186e: 68,
+ 0x186f: 68,
+ 0x1870: 68,
+ 0x1871: 68,
+ 0x1872: 68,
+ 0x1873: 68,
+ 0x1874: 68,
+ 0x1875: 68,
+ 0x1876: 68,
+ 0x1877: 68,
+ 0x1878: 68,
+ 0x1880: 85,
+ 0x1881: 85,
+ 0x1882: 85,
+ 0x1883: 85,
+ 0x1884: 85,
+ 0x1885: 84,
+ 0x1886: 84,
+ 0x1887: 68,
+ 0x1888: 68,
+ 0x1889: 68,
+ 0x188a: 68,
+ 0x188b: 68,
+ 0x188c: 68,
+ 0x188d: 68,
+ 0x188e: 68,
+ 0x188f: 68,
+ 0x1890: 68,
+ 0x1891: 68,
+ 0x1892: 68,
+ 0x1893: 68,
+ 0x1894: 68,
+ 0x1895: 68,
+ 0x1896: 68,
+ 0x1897: 68,
+ 0x1898: 68,
+ 0x1899: 68,
+ 0x189a: 68,
+ 0x189b: 68,
+ 0x189c: 68,
+ 0x189d: 68,
+ 0x189e: 68,
+ 0x189f: 68,
+ 0x18a0: 68,
+ 0x18a1: 68,
+ 0x18a2: 68,
+ 0x18a3: 68,
+ 0x18a4: 68,
+ 0x18a5: 68,
+ 0x18a6: 68,
+ 0x18a7: 68,
+ 0x18a8: 68,
+ 0x18aa: 68,
+ 0x200c: 85,
+ 0x200d: 67,
+ 0x202f: 85,
+ 0x2066: 85,
+ 0x2067: 85,
+ 0x2068: 85,
+ 0x2069: 85,
+ 0xa840: 68,
+ 0xa841: 68,
+ 0xa842: 68,
+ 0xa843: 68,
+ 0xa844: 68,
+ 0xa845: 68,
+ 0xa846: 68,
+ 0xa847: 68,
+ 0xa848: 68,
+ 0xa849: 68,
+ 0xa84a: 68,
+ 0xa84b: 68,
+ 0xa84c: 68,
+ 0xa84d: 68,
+ 0xa84e: 68,
+ 0xa84f: 68,
+ 0xa850: 68,
+ 0xa851: 68,
+ 0xa852: 68,
+ 0xa853: 68,
+ 0xa854: 68,
+ 0xa855: 68,
+ 0xa856: 68,
+ 0xa857: 68,
+ 0xa858: 68,
+ 0xa859: 68,
+ 0xa85a: 68,
+ 0xa85b: 68,
+ 0xa85c: 68,
+ 0xa85d: 68,
+ 0xa85e: 68,
+ 0xa85f: 68,
+ 0xa860: 68,
+ 0xa861: 68,
+ 0xa862: 68,
+ 0xa863: 68,
+ 0xa864: 68,
+ 0xa865: 68,
+ 0xa866: 68,
+ 0xa867: 68,
+ 0xa868: 68,
+ 0xa869: 68,
+ 0xa86a: 68,
+ 0xa86b: 68,
+ 0xa86c: 68,
+ 0xa86d: 68,
+ 0xa86e: 68,
+ 0xa86f: 68,
+ 0xa870: 68,
+ 0xa871: 68,
+ 0xa872: 76,
+ 0xa873: 85,
+ 0x10ac0: 68,
+ 0x10ac1: 68,
+ 0x10ac2: 68,
+ 0x10ac3: 68,
+ 0x10ac4: 68,
+ 0x10ac5: 82,
+ 0x10ac6: 85,
+ 0x10ac7: 82,
+ 0x10ac8: 85,
+ 0x10ac9: 82,
+ 0x10aca: 82,
+ 0x10acb: 85,
+ 0x10acc: 85,
+ 0x10acd: 76,
+ 0x10ace: 82,
+ 0x10acf: 82,
+ 0x10ad0: 82,
+ 0x10ad1: 82,
+ 0x10ad2: 82,
+ 0x10ad3: 68,
+ 0x10ad4: 68,
+ 0x10ad5: 68,
+ 0x10ad6: 68,
+ 0x10ad7: 76,
+ 0x10ad8: 68,
+ 0x10ad9: 68,
+ 0x10ada: 68,
+ 0x10adb: 68,
+ 0x10adc: 68,
+ 0x10add: 82,
+ 0x10ade: 68,
+ 0x10adf: 68,
+ 0x10ae0: 68,
+ 0x10ae1: 82,
+ 0x10ae2: 85,
+ 0x10ae3: 85,
+ 0x10ae4: 82,
+ 0x10aeb: 68,
+ 0x10aec: 68,
+ 0x10aed: 68,
+ 0x10aee: 68,
+ 0x10aef: 82,
+ 0x10b80: 68,
+ 0x10b81: 82,
+ 0x10b82: 68,
+ 0x10b83: 82,
+ 0x10b84: 82,
+ 0x10b85: 82,
+ 0x10b86: 68,
+ 0x10b87: 68,
+ 0x10b88: 68,
+ 0x10b89: 82,
+ 0x10b8a: 68,
+ 0x10b8b: 68,
+ 0x10b8c: 82,
+ 0x10b8d: 68,
+ 0x10b8e: 82,
+ 0x10b8f: 82,
+ 0x10b90: 68,
+ 0x10b91: 82,
+ 0x10ba9: 82,
+ 0x10baa: 82,
+ 0x10bab: 82,
+ 0x10bac: 82,
+ 0x10bad: 68,
+ 0x10bae: 68,
+ 0x10baf: 85,
+ 0x10d00: 76,
+ 0x10d01: 68,
+ 0x10d02: 68,
+ 0x10d03: 68,
+ 0x10d04: 68,
+ 0x10d05: 68,
+ 0x10d06: 68,
+ 0x10d07: 68,
+ 0x10d08: 68,
+ 0x10d09: 68,
+ 0x10d0a: 68,
+ 0x10d0b: 68,
+ 0x10d0c: 68,
+ 0x10d0d: 68,
+ 0x10d0e: 68,
+ 0x10d0f: 68,
+ 0x10d10: 68,
+ 0x10d11: 68,
+ 0x10d12: 68,
+ 0x10d13: 68,
+ 0x10d14: 68,
+ 0x10d15: 68,
+ 0x10d16: 68,
+ 0x10d17: 68,
+ 0x10d18: 68,
+ 0x10d19: 68,
+ 0x10d1a: 68,
+ 0x10d1b: 68,
+ 0x10d1c: 68,
+ 0x10d1d: 68,
+ 0x10d1e: 68,
+ 0x10d1f: 68,
+ 0x10d20: 68,
+ 0x10d21: 68,
+ 0x10d22: 82,
+ 0x10d23: 68,
+ 0x10f30: 68,
+ 0x10f31: 68,
+ 0x10f32: 68,
+ 0x10f33: 82,
+ 0x10f34: 68,
+ 0x10f35: 68,
+ 0x10f36: 68,
+ 0x10f37: 68,
+ 0x10f38: 68,
+ 0x10f39: 68,
+ 0x10f3a: 68,
+ 0x10f3b: 68,
+ 0x10f3c: 68,
+ 0x10f3d: 68,
+ 0x10f3e: 68,
+ 0x10f3f: 68,
+ 0x10f40: 68,
+ 0x10f41: 68,
+ 0x10f42: 68,
+ 0x10f43: 68,
+ 0x10f44: 68,
+ 0x10f45: 85,
+ 0x10f51: 68,
+ 0x10f52: 68,
+ 0x10f53: 68,
+ 0x10f54: 82,
+ 0x10fb0: 68,
+ 0x10fb1: 85,
+ 0x10fb2: 68,
+ 0x10fb3: 68,
+ 0x10fb4: 82,
+ 0x10fb5: 82,
+ 0x10fb6: 82,
+ 0x10fb7: 85,
+ 0x10fb8: 68,
+ 0x10fb9: 82,
+ 0x10fba: 82,
+ 0x10fbb: 68,
+ 0x10fbc: 68,
+ 0x10fbd: 82,
+ 0x10fbe: 68,
+ 0x10fbf: 68,
+ 0x10fc0: 85,
+ 0x10fc1: 68,
+ 0x10fc2: 82,
+ 0x10fc3: 82,
+ 0x10fc4: 68,
+ 0x10fc5: 85,
+ 0x10fc6: 85,
+ 0x10fc7: 85,
+ 0x10fc8: 85,
+ 0x10fc9: 82,
+ 0x10fca: 68,
+ 0x10fcb: 76,
+ 0x110bd: 85,
+ 0x110cd: 85,
+ 0x1e900: 68,
+ 0x1e901: 68,
+ 0x1e902: 68,
+ 0x1e903: 68,
+ 0x1e904: 68,
+ 0x1e905: 68,
+ 0x1e906: 68,
+ 0x1e907: 68,
+ 0x1e908: 68,
+ 0x1e909: 68,
+ 0x1e90a: 68,
+ 0x1e90b: 68,
+ 0x1e90c: 68,
+ 0x1e90d: 68,
+ 0x1e90e: 68,
+ 0x1e90f: 68,
+ 0x1e910: 68,
+ 0x1e911: 68,
+ 0x1e912: 68,
+ 0x1e913: 68,
+ 0x1e914: 68,
+ 0x1e915: 68,
+ 0x1e916: 68,
+ 0x1e917: 68,
+ 0x1e918: 68,
+ 0x1e919: 68,
+ 0x1e91a: 68,
+ 0x1e91b: 68,
+ 0x1e91c: 68,
+ 0x1e91d: 68,
+ 0x1e91e: 68,
+ 0x1e91f: 68,
+ 0x1e920: 68,
+ 0x1e921: 68,
+ 0x1e922: 68,
+ 0x1e923: 68,
+ 0x1e924: 68,
+ 0x1e925: 68,
+ 0x1e926: 68,
+ 0x1e927: 68,
+ 0x1e928: 68,
+ 0x1e929: 68,
+ 0x1e92a: 68,
+ 0x1e92b: 68,
+ 0x1e92c: 68,
+ 0x1e92d: 68,
+ 0x1e92e: 68,
+ 0x1e92f: 68,
+ 0x1e930: 68,
+ 0x1e931: 68,
+ 0x1e932: 68,
+ 0x1e933: 68,
+ 0x1e934: 68,
+ 0x1e935: 68,
+ 0x1e936: 68,
+ 0x1e937: 68,
+ 0x1e938: 68,
+ 0x1e939: 68,
+ 0x1e93a: 68,
+ 0x1e93b: 68,
+ 0x1e93c: 68,
+ 0x1e93d: 68,
+ 0x1e93e: 68,
+ 0x1e93f: 68,
+ 0x1e940: 68,
+ 0x1e941: 68,
+ 0x1e942: 68,
+ 0x1e943: 68,
+ 0x1e94b: 84,
+}
+codepoint_classes = {
+ 'PVALID': (
+ 0x2d0000002e,
+ 0x300000003a,
+ 0x610000007b,
+ 0xdf000000f7,
+ 0xf800000100,
+ 0x10100000102,
+ 0x10300000104,
+ 0x10500000106,
+ 0x10700000108,
+ 0x1090000010a,
+ 0x10b0000010c,
+ 0x10d0000010e,
+ 0x10f00000110,
+ 0x11100000112,
+ 0x11300000114,
+ 0x11500000116,
+ 0x11700000118,
+ 0x1190000011a,
+ 0x11b0000011c,
+ 0x11d0000011e,
+ 0x11f00000120,
+ 0x12100000122,
+ 0x12300000124,
+ 0x12500000126,
+ 0x12700000128,
+ 0x1290000012a,
+ 0x12b0000012c,
+ 0x12d0000012e,
+ 0x12f00000130,
+ 0x13100000132,
+ 0x13500000136,
+ 0x13700000139,
+ 0x13a0000013b,
+ 0x13c0000013d,
+ 0x13e0000013f,
+ 0x14200000143,
+ 0x14400000145,
+ 0x14600000147,
+ 0x14800000149,
+ 0x14b0000014c,
+ 0x14d0000014e,
+ 0x14f00000150,
+ 0x15100000152,
+ 0x15300000154,
+ 0x15500000156,
+ 0x15700000158,
+ 0x1590000015a,
+ 0x15b0000015c,
+ 0x15d0000015e,
+ 0x15f00000160,
+ 0x16100000162,
+ 0x16300000164,
+ 0x16500000166,
+ 0x16700000168,
+ 0x1690000016a,
+ 0x16b0000016c,
+ 0x16d0000016e,
+ 0x16f00000170,
+ 0x17100000172,
+ 0x17300000174,
+ 0x17500000176,
+ 0x17700000178,
+ 0x17a0000017b,
+ 0x17c0000017d,
+ 0x17e0000017f,
+ 0x18000000181,
+ 0x18300000184,
+ 0x18500000186,
+ 0x18800000189,
+ 0x18c0000018e,
+ 0x19200000193,
+ 0x19500000196,
+ 0x1990000019c,
+ 0x19e0000019f,
+ 0x1a1000001a2,
+ 0x1a3000001a4,
+ 0x1a5000001a6,
+ 0x1a8000001a9,
+ 0x1aa000001ac,
+ 0x1ad000001ae,
+ 0x1b0000001b1,
+ 0x1b4000001b5,
+ 0x1b6000001b7,
+ 0x1b9000001bc,
+ 0x1bd000001c4,
+ 0x1ce000001cf,
+ 0x1d0000001d1,
+ 0x1d2000001d3,
+ 0x1d4000001d5,
+ 0x1d6000001d7,
+ 0x1d8000001d9,
+ 0x1da000001db,
+ 0x1dc000001de,
+ 0x1df000001e0,
+ 0x1e1000001e2,
+ 0x1e3000001e4,
+ 0x1e5000001e6,
+ 0x1e7000001e8,
+ 0x1e9000001ea,
+ 0x1eb000001ec,
+ 0x1ed000001ee,
+ 0x1ef000001f1,
+ 0x1f5000001f6,
+ 0x1f9000001fa,
+ 0x1fb000001fc,
+ 0x1fd000001fe,
+ 0x1ff00000200,
+ 0x20100000202,
+ 0x20300000204,
+ 0x20500000206,
+ 0x20700000208,
+ 0x2090000020a,
+ 0x20b0000020c,
+ 0x20d0000020e,
+ 0x20f00000210,
+ 0x21100000212,
+ 0x21300000214,
+ 0x21500000216,
+ 0x21700000218,
+ 0x2190000021a,
+ 0x21b0000021c,
+ 0x21d0000021e,
+ 0x21f00000220,
+ 0x22100000222,
+ 0x22300000224,
+ 0x22500000226,
+ 0x22700000228,
+ 0x2290000022a,
+ 0x22b0000022c,
+ 0x22d0000022e,
+ 0x22f00000230,
+ 0x23100000232,
+ 0x2330000023a,
+ 0x23c0000023d,
+ 0x23f00000241,
+ 0x24200000243,
+ 0x24700000248,
+ 0x2490000024a,
+ 0x24b0000024c,
+ 0x24d0000024e,
+ 0x24f000002b0,
+ 0x2b9000002c2,
+ 0x2c6000002d2,
+ 0x2ec000002ed,
+ 0x2ee000002ef,
+ 0x30000000340,
+ 0x34200000343,
+ 0x3460000034f,
+ 0x35000000370,
+ 0x37100000372,
+ 0x37300000374,
+ 0x37700000378,
+ 0x37b0000037e,
+ 0x39000000391,
+ 0x3ac000003cf,
+ 0x3d7000003d8,
+ 0x3d9000003da,
+ 0x3db000003dc,
+ 0x3dd000003de,
+ 0x3df000003e0,
+ 0x3e1000003e2,
+ 0x3e3000003e4,
+ 0x3e5000003e6,
+ 0x3e7000003e8,
+ 0x3e9000003ea,
+ 0x3eb000003ec,
+ 0x3ed000003ee,
+ 0x3ef000003f0,
+ 0x3f3000003f4,
+ 0x3f8000003f9,
+ 0x3fb000003fd,
+ 0x43000000460,
+ 0x46100000462,
+ 0x46300000464,
+ 0x46500000466,
+ 0x46700000468,
+ 0x4690000046a,
+ 0x46b0000046c,
+ 0x46d0000046e,
+ 0x46f00000470,
+ 0x47100000472,
+ 0x47300000474,
+ 0x47500000476,
+ 0x47700000478,
+ 0x4790000047a,
+ 0x47b0000047c,
+ 0x47d0000047e,
+ 0x47f00000480,
+ 0x48100000482,
+ 0x48300000488,
+ 0x48b0000048c,
+ 0x48d0000048e,
+ 0x48f00000490,
+ 0x49100000492,
+ 0x49300000494,
+ 0x49500000496,
+ 0x49700000498,
+ 0x4990000049a,
+ 0x49b0000049c,
+ 0x49d0000049e,
+ 0x49f000004a0,
+ 0x4a1000004a2,
+ 0x4a3000004a4,
+ 0x4a5000004a6,
+ 0x4a7000004a8,
+ 0x4a9000004aa,
+ 0x4ab000004ac,
+ 0x4ad000004ae,
+ 0x4af000004b0,
+ 0x4b1000004b2,
+ 0x4b3000004b4,
+ 0x4b5000004b6,
+ 0x4b7000004b8,
+ 0x4b9000004ba,
+ 0x4bb000004bc,
+ 0x4bd000004be,
+ 0x4bf000004c0,
+ 0x4c2000004c3,
+ 0x4c4000004c5,
+ 0x4c6000004c7,
+ 0x4c8000004c9,
+ 0x4ca000004cb,
+ 0x4cc000004cd,
+ 0x4ce000004d0,
+ 0x4d1000004d2,
+ 0x4d3000004d4,
+ 0x4d5000004d6,
+ 0x4d7000004d8,
+ 0x4d9000004da,
+ 0x4db000004dc,
+ 0x4dd000004de,
+ 0x4df000004e0,
+ 0x4e1000004e2,
+ 0x4e3000004e4,
+ 0x4e5000004e6,
+ 0x4e7000004e8,
+ 0x4e9000004ea,
+ 0x4eb000004ec,
+ 0x4ed000004ee,
+ 0x4ef000004f0,
+ 0x4f1000004f2,
+ 0x4f3000004f4,
+ 0x4f5000004f6,
+ 0x4f7000004f8,
+ 0x4f9000004fa,
+ 0x4fb000004fc,
+ 0x4fd000004fe,
+ 0x4ff00000500,
+ 0x50100000502,
+ 0x50300000504,
+ 0x50500000506,
+ 0x50700000508,
+ 0x5090000050a,
+ 0x50b0000050c,
+ 0x50d0000050e,
+ 0x50f00000510,
+ 0x51100000512,
+ 0x51300000514,
+ 0x51500000516,
+ 0x51700000518,
+ 0x5190000051a,
+ 0x51b0000051c,
+ 0x51d0000051e,
+ 0x51f00000520,
+ 0x52100000522,
+ 0x52300000524,
+ 0x52500000526,
+ 0x52700000528,
+ 0x5290000052a,
+ 0x52b0000052c,
+ 0x52d0000052e,
+ 0x52f00000530,
+ 0x5590000055a,
+ 0x56000000587,
+ 0x58800000589,
+ 0x591000005be,
+ 0x5bf000005c0,
+ 0x5c1000005c3,
+ 0x5c4000005c6,
+ 0x5c7000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f3,
+ 0x6100000061b,
+ 0x62000000640,
+ 0x64100000660,
+ 0x66e00000675,
+ 0x679000006d4,
+ 0x6d5000006dd,
+ 0x6df000006e9,
+ 0x6ea000006f0,
+ 0x6fa00000700,
+ 0x7100000074b,
+ 0x74d000007b2,
+ 0x7c0000007f6,
+ 0x7fd000007fe,
+ 0x8000000082e,
+ 0x8400000085c,
+ 0x8600000086b,
+ 0x8a0000008b5,
+ 0x8b6000008c8,
+ 0x8d3000008e2,
+ 0x8e300000958,
+ 0x96000000964,
+ 0x96600000970,
+ 0x97100000984,
+ 0x9850000098d,
+ 0x98f00000991,
+ 0x993000009a9,
+ 0x9aa000009b1,
+ 0x9b2000009b3,
+ 0x9b6000009ba,
+ 0x9bc000009c5,
+ 0x9c7000009c9,
+ 0x9cb000009cf,
+ 0x9d7000009d8,
+ 0x9e0000009e4,
+ 0x9e6000009f2,
+ 0x9fc000009fd,
+ 0x9fe000009ff,
+ 0xa0100000a04,
+ 0xa0500000a0b,
+ 0xa0f00000a11,
+ 0xa1300000a29,
+ 0xa2a00000a31,
+ 0xa3200000a33,
+ 0xa3500000a36,
+ 0xa3800000a3a,
+ 0xa3c00000a3d,
+ 0xa3e00000a43,
+ 0xa4700000a49,
+ 0xa4b00000a4e,
+ 0xa5100000a52,
+ 0xa5c00000a5d,
+ 0xa6600000a76,
+ 0xa8100000a84,
+ 0xa8500000a8e,
+ 0xa8f00000a92,
+ 0xa9300000aa9,
+ 0xaaa00000ab1,
+ 0xab200000ab4,
+ 0xab500000aba,
+ 0xabc00000ac6,
+ 0xac700000aca,
+ 0xacb00000ace,
+ 0xad000000ad1,
+ 0xae000000ae4,
+ 0xae600000af0,
+ 0xaf900000b00,
+ 0xb0100000b04,
+ 0xb0500000b0d,
+ 0xb0f00000b11,
+ 0xb1300000b29,
+ 0xb2a00000b31,
+ 0xb3200000b34,
+ 0xb3500000b3a,
+ 0xb3c00000b45,
+ 0xb4700000b49,
+ 0xb4b00000b4e,
+ 0xb5500000b58,
+ 0xb5f00000b64,
+ 0xb6600000b70,
+ 0xb7100000b72,
+ 0xb8200000b84,
+ 0xb8500000b8b,
+ 0xb8e00000b91,
+ 0xb9200000b96,
+ 0xb9900000b9b,
+ 0xb9c00000b9d,
+ 0xb9e00000ba0,
+ 0xba300000ba5,
+ 0xba800000bab,
+ 0xbae00000bba,
+ 0xbbe00000bc3,
+ 0xbc600000bc9,
+ 0xbca00000bce,
+ 0xbd000000bd1,
+ 0xbd700000bd8,
+ 0xbe600000bf0,
+ 0xc0000000c0d,
+ 0xc0e00000c11,
+ 0xc1200000c29,
+ 0xc2a00000c3a,
+ 0xc3d00000c45,
+ 0xc4600000c49,
+ 0xc4a00000c4e,
+ 0xc5500000c57,
+ 0xc5800000c5b,
+ 0xc6000000c64,
+ 0xc6600000c70,
+ 0xc8000000c84,
+ 0xc8500000c8d,
+ 0xc8e00000c91,
+ 0xc9200000ca9,
+ 0xcaa00000cb4,
+ 0xcb500000cba,
+ 0xcbc00000cc5,
+ 0xcc600000cc9,
+ 0xcca00000cce,
+ 0xcd500000cd7,
+ 0xcde00000cdf,
+ 0xce000000ce4,
+ 0xce600000cf0,
+ 0xcf100000cf3,
+ 0xd0000000d0d,
+ 0xd0e00000d11,
+ 0xd1200000d45,
+ 0xd4600000d49,
+ 0xd4a00000d4f,
+ 0xd5400000d58,
+ 0xd5f00000d64,
+ 0xd6600000d70,
+ 0xd7a00000d80,
+ 0xd8100000d84,
+ 0xd8500000d97,
+ 0xd9a00000db2,
+ 0xdb300000dbc,
+ 0xdbd00000dbe,
+ 0xdc000000dc7,
+ 0xdca00000dcb,
+ 0xdcf00000dd5,
+ 0xdd600000dd7,
+ 0xdd800000de0,
+ 0xde600000df0,
+ 0xdf200000df4,
+ 0xe0100000e33,
+ 0xe3400000e3b,
+ 0xe4000000e4f,
+ 0xe5000000e5a,
+ 0xe8100000e83,
+ 0xe8400000e85,
+ 0xe8600000e8b,
+ 0xe8c00000ea4,
+ 0xea500000ea6,
+ 0xea700000eb3,
+ 0xeb400000ebe,
+ 0xec000000ec5,
+ 0xec600000ec7,
+ 0xec800000ece,
+ 0xed000000eda,
+ 0xede00000ee0,
+ 0xf0000000f01,
+ 0xf0b00000f0c,
+ 0xf1800000f1a,
+ 0xf2000000f2a,
+ 0xf3500000f36,
+ 0xf3700000f38,
+ 0xf3900000f3a,
+ 0xf3e00000f43,
+ 0xf4400000f48,
+ 0xf4900000f4d,
+ 0xf4e00000f52,
+ 0xf5300000f57,
+ 0xf5800000f5c,
+ 0xf5d00000f69,
+ 0xf6a00000f6d,
+ 0xf7100000f73,
+ 0xf7400000f75,
+ 0xf7a00000f81,
+ 0xf8200000f85,
+ 0xf8600000f93,
+ 0xf9400000f98,
+ 0xf9900000f9d,
+ 0xf9e00000fa2,
+ 0xfa300000fa7,
+ 0xfa800000fac,
+ 0xfad00000fb9,
+ 0xfba00000fbd,
+ 0xfc600000fc7,
+ 0x10000000104a,
+ 0x10500000109e,
+ 0x10d0000010fb,
+ 0x10fd00001100,
+ 0x120000001249,
+ 0x124a0000124e,
+ 0x125000001257,
+ 0x125800001259,
+ 0x125a0000125e,
+ 0x126000001289,
+ 0x128a0000128e,
+ 0x1290000012b1,
+ 0x12b2000012b6,
+ 0x12b8000012bf,
+ 0x12c0000012c1,
+ 0x12c2000012c6,
+ 0x12c8000012d7,
+ 0x12d800001311,
+ 0x131200001316,
+ 0x13180000135b,
+ 0x135d00001360,
+ 0x138000001390,
+ 0x13a0000013f6,
+ 0x14010000166d,
+ 0x166f00001680,
+ 0x16810000169b,
+ 0x16a0000016eb,
+ 0x16f1000016f9,
+ 0x17000000170d,
+ 0x170e00001715,
+ 0x172000001735,
+ 0x174000001754,
+ 0x17600000176d,
+ 0x176e00001771,
+ 0x177200001774,
+ 0x1780000017b4,
+ 0x17b6000017d4,
+ 0x17d7000017d8,
+ 0x17dc000017de,
+ 0x17e0000017ea,
+ 0x18100000181a,
+ 0x182000001879,
+ 0x1880000018ab,
+ 0x18b0000018f6,
+ 0x19000000191f,
+ 0x19200000192c,
+ 0x19300000193c,
+ 0x19460000196e,
+ 0x197000001975,
+ 0x1980000019ac,
+ 0x19b0000019ca,
+ 0x19d0000019da,
+ 0x1a0000001a1c,
+ 0x1a2000001a5f,
+ 0x1a6000001a7d,
+ 0x1a7f00001a8a,
+ 0x1a9000001a9a,
+ 0x1aa700001aa8,
+ 0x1ab000001abe,
+ 0x1abf00001ac1,
+ 0x1b0000001b4c,
+ 0x1b5000001b5a,
+ 0x1b6b00001b74,
+ 0x1b8000001bf4,
+ 0x1c0000001c38,
+ 0x1c4000001c4a,
+ 0x1c4d00001c7e,
+ 0x1cd000001cd3,
+ 0x1cd400001cfb,
+ 0x1d0000001d2c,
+ 0x1d2f00001d30,
+ 0x1d3b00001d3c,
+ 0x1d4e00001d4f,
+ 0x1d6b00001d78,
+ 0x1d7900001d9b,
+ 0x1dc000001dfa,
+ 0x1dfb00001e00,
+ 0x1e0100001e02,
+ 0x1e0300001e04,
+ 0x1e0500001e06,
+ 0x1e0700001e08,
+ 0x1e0900001e0a,
+ 0x1e0b00001e0c,
+ 0x1e0d00001e0e,
+ 0x1e0f00001e10,
+ 0x1e1100001e12,
+ 0x1e1300001e14,
+ 0x1e1500001e16,
+ 0x1e1700001e18,
+ 0x1e1900001e1a,
+ 0x1e1b00001e1c,
+ 0x1e1d00001e1e,
+ 0x1e1f00001e20,
+ 0x1e2100001e22,
+ 0x1e2300001e24,
+ 0x1e2500001e26,
+ 0x1e2700001e28,
+ 0x1e2900001e2a,
+ 0x1e2b00001e2c,
+ 0x1e2d00001e2e,
+ 0x1e2f00001e30,
+ 0x1e3100001e32,
+ 0x1e3300001e34,
+ 0x1e3500001e36,
+ 0x1e3700001e38,
+ 0x1e3900001e3a,
+ 0x1e3b00001e3c,
+ 0x1e3d00001e3e,
+ 0x1e3f00001e40,
+ 0x1e4100001e42,
+ 0x1e4300001e44,
+ 0x1e4500001e46,
+ 0x1e4700001e48,
+ 0x1e4900001e4a,
+ 0x1e4b00001e4c,
+ 0x1e4d00001e4e,
+ 0x1e4f00001e50,
+ 0x1e5100001e52,
+ 0x1e5300001e54,
+ 0x1e5500001e56,
+ 0x1e5700001e58,
+ 0x1e5900001e5a,
+ 0x1e5b00001e5c,
+ 0x1e5d00001e5e,
+ 0x1e5f00001e60,
+ 0x1e6100001e62,
+ 0x1e6300001e64,
+ 0x1e6500001e66,
+ 0x1e6700001e68,
+ 0x1e6900001e6a,
+ 0x1e6b00001e6c,
+ 0x1e6d00001e6e,
+ 0x1e6f00001e70,
+ 0x1e7100001e72,
+ 0x1e7300001e74,
+ 0x1e7500001e76,
+ 0x1e7700001e78,
+ 0x1e7900001e7a,
+ 0x1e7b00001e7c,
+ 0x1e7d00001e7e,
+ 0x1e7f00001e80,
+ 0x1e8100001e82,
+ 0x1e8300001e84,
+ 0x1e8500001e86,
+ 0x1e8700001e88,
+ 0x1e8900001e8a,
+ 0x1e8b00001e8c,
+ 0x1e8d00001e8e,
+ 0x1e8f00001e90,
+ 0x1e9100001e92,
+ 0x1e9300001e94,
+ 0x1e9500001e9a,
+ 0x1e9c00001e9e,
+ 0x1e9f00001ea0,
+ 0x1ea100001ea2,
+ 0x1ea300001ea4,
+ 0x1ea500001ea6,
+ 0x1ea700001ea8,
+ 0x1ea900001eaa,
+ 0x1eab00001eac,
+ 0x1ead00001eae,
+ 0x1eaf00001eb0,
+ 0x1eb100001eb2,
+ 0x1eb300001eb4,
+ 0x1eb500001eb6,
+ 0x1eb700001eb8,
+ 0x1eb900001eba,
+ 0x1ebb00001ebc,
+ 0x1ebd00001ebe,
+ 0x1ebf00001ec0,
+ 0x1ec100001ec2,
+ 0x1ec300001ec4,
+ 0x1ec500001ec6,
+ 0x1ec700001ec8,
+ 0x1ec900001eca,
+ 0x1ecb00001ecc,
+ 0x1ecd00001ece,
+ 0x1ecf00001ed0,
+ 0x1ed100001ed2,
+ 0x1ed300001ed4,
+ 0x1ed500001ed6,
+ 0x1ed700001ed8,
+ 0x1ed900001eda,
+ 0x1edb00001edc,
+ 0x1edd00001ede,
+ 0x1edf00001ee0,
+ 0x1ee100001ee2,
+ 0x1ee300001ee4,
+ 0x1ee500001ee6,
+ 0x1ee700001ee8,
+ 0x1ee900001eea,
+ 0x1eeb00001eec,
+ 0x1eed00001eee,
+ 0x1eef00001ef0,
+ 0x1ef100001ef2,
+ 0x1ef300001ef4,
+ 0x1ef500001ef6,
+ 0x1ef700001ef8,
+ 0x1ef900001efa,
+ 0x1efb00001efc,
+ 0x1efd00001efe,
+ 0x1eff00001f08,
+ 0x1f1000001f16,
+ 0x1f2000001f28,
+ 0x1f3000001f38,
+ 0x1f4000001f46,
+ 0x1f5000001f58,
+ 0x1f6000001f68,
+ 0x1f7000001f71,
+ 0x1f7200001f73,
+ 0x1f7400001f75,
+ 0x1f7600001f77,
+ 0x1f7800001f79,
+ 0x1f7a00001f7b,
+ 0x1f7c00001f7d,
+ 0x1fb000001fb2,
+ 0x1fb600001fb7,
+ 0x1fc600001fc7,
+ 0x1fd000001fd3,
+ 0x1fd600001fd8,
+ 0x1fe000001fe3,
+ 0x1fe400001fe8,
+ 0x1ff600001ff7,
+ 0x214e0000214f,
+ 0x218400002185,
+ 0x2c3000002c5f,
+ 0x2c6100002c62,
+ 0x2c6500002c67,
+ 0x2c6800002c69,
+ 0x2c6a00002c6b,
+ 0x2c6c00002c6d,
+ 0x2c7100002c72,
+ 0x2c7300002c75,
+ 0x2c7600002c7c,
+ 0x2c8100002c82,
+ 0x2c8300002c84,
+ 0x2c8500002c86,
+ 0x2c8700002c88,
+ 0x2c8900002c8a,
+ 0x2c8b00002c8c,
+ 0x2c8d00002c8e,
+ 0x2c8f00002c90,
+ 0x2c9100002c92,
+ 0x2c9300002c94,
+ 0x2c9500002c96,
+ 0x2c9700002c98,
+ 0x2c9900002c9a,
+ 0x2c9b00002c9c,
+ 0x2c9d00002c9e,
+ 0x2c9f00002ca0,
+ 0x2ca100002ca2,
+ 0x2ca300002ca4,
+ 0x2ca500002ca6,
+ 0x2ca700002ca8,
+ 0x2ca900002caa,
+ 0x2cab00002cac,
+ 0x2cad00002cae,
+ 0x2caf00002cb0,
+ 0x2cb100002cb2,
+ 0x2cb300002cb4,
+ 0x2cb500002cb6,
+ 0x2cb700002cb8,
+ 0x2cb900002cba,
+ 0x2cbb00002cbc,
+ 0x2cbd00002cbe,
+ 0x2cbf00002cc0,
+ 0x2cc100002cc2,
+ 0x2cc300002cc4,
+ 0x2cc500002cc6,
+ 0x2cc700002cc8,
+ 0x2cc900002cca,
+ 0x2ccb00002ccc,
+ 0x2ccd00002cce,
+ 0x2ccf00002cd0,
+ 0x2cd100002cd2,
+ 0x2cd300002cd4,
+ 0x2cd500002cd6,
+ 0x2cd700002cd8,
+ 0x2cd900002cda,
+ 0x2cdb00002cdc,
+ 0x2cdd00002cde,
+ 0x2cdf00002ce0,
+ 0x2ce100002ce2,
+ 0x2ce300002ce5,
+ 0x2cec00002ced,
+ 0x2cee00002cf2,
+ 0x2cf300002cf4,
+ 0x2d0000002d26,
+ 0x2d2700002d28,
+ 0x2d2d00002d2e,
+ 0x2d3000002d68,
+ 0x2d7f00002d97,
+ 0x2da000002da7,
+ 0x2da800002daf,
+ 0x2db000002db7,
+ 0x2db800002dbf,
+ 0x2dc000002dc7,
+ 0x2dc800002dcf,
+ 0x2dd000002dd7,
+ 0x2dd800002ddf,
+ 0x2de000002e00,
+ 0x2e2f00002e30,
+ 0x300500003008,
+ 0x302a0000302e,
+ 0x303c0000303d,
+ 0x304100003097,
+ 0x30990000309b,
+ 0x309d0000309f,
+ 0x30a1000030fb,
+ 0x30fc000030ff,
+ 0x310500003130,
+ 0x31a0000031c0,
+ 0x31f000003200,
+ 0x340000004dc0,
+ 0x4e0000009ffd,
+ 0xa0000000a48d,
+ 0xa4d00000a4fe,
+ 0xa5000000a60d,
+ 0xa6100000a62c,
+ 0xa6410000a642,
+ 0xa6430000a644,
+ 0xa6450000a646,
+ 0xa6470000a648,
+ 0xa6490000a64a,
+ 0xa64b0000a64c,
+ 0xa64d0000a64e,
+ 0xa64f0000a650,
+ 0xa6510000a652,
+ 0xa6530000a654,
+ 0xa6550000a656,
+ 0xa6570000a658,
+ 0xa6590000a65a,
+ 0xa65b0000a65c,
+ 0xa65d0000a65e,
+ 0xa65f0000a660,
+ 0xa6610000a662,
+ 0xa6630000a664,
+ 0xa6650000a666,
+ 0xa6670000a668,
+ 0xa6690000a66a,
+ 0xa66b0000a66c,
+ 0xa66d0000a670,
+ 0xa6740000a67e,
+ 0xa67f0000a680,
+ 0xa6810000a682,
+ 0xa6830000a684,
+ 0xa6850000a686,
+ 0xa6870000a688,
+ 0xa6890000a68a,
+ 0xa68b0000a68c,
+ 0xa68d0000a68e,
+ 0xa68f0000a690,
+ 0xa6910000a692,
+ 0xa6930000a694,
+ 0xa6950000a696,
+ 0xa6970000a698,
+ 0xa6990000a69a,
+ 0xa69b0000a69c,
+ 0xa69e0000a6e6,
+ 0xa6f00000a6f2,
+ 0xa7170000a720,
+ 0xa7230000a724,
+ 0xa7250000a726,
+ 0xa7270000a728,
+ 0xa7290000a72a,
+ 0xa72b0000a72c,
+ 0xa72d0000a72e,
+ 0xa72f0000a732,
+ 0xa7330000a734,
+ 0xa7350000a736,
+ 0xa7370000a738,
+ 0xa7390000a73a,
+ 0xa73b0000a73c,
+ 0xa73d0000a73e,
+ 0xa73f0000a740,
+ 0xa7410000a742,
+ 0xa7430000a744,
+ 0xa7450000a746,
+ 0xa7470000a748,
+ 0xa7490000a74a,
+ 0xa74b0000a74c,
+ 0xa74d0000a74e,
+ 0xa74f0000a750,
+ 0xa7510000a752,
+ 0xa7530000a754,
+ 0xa7550000a756,
+ 0xa7570000a758,
+ 0xa7590000a75a,
+ 0xa75b0000a75c,
+ 0xa75d0000a75e,
+ 0xa75f0000a760,
+ 0xa7610000a762,
+ 0xa7630000a764,
+ 0xa7650000a766,
+ 0xa7670000a768,
+ 0xa7690000a76a,
+ 0xa76b0000a76c,
+ 0xa76d0000a76e,
+ 0xa76f0000a770,
+ 0xa7710000a779,
+ 0xa77a0000a77b,
+ 0xa77c0000a77d,
+ 0xa77f0000a780,
+ 0xa7810000a782,
+ 0xa7830000a784,
+ 0xa7850000a786,
+ 0xa7870000a789,
+ 0xa78c0000a78d,
+ 0xa78e0000a790,
+ 0xa7910000a792,
+ 0xa7930000a796,
+ 0xa7970000a798,
+ 0xa7990000a79a,
+ 0xa79b0000a79c,
+ 0xa79d0000a79e,
+ 0xa79f0000a7a0,
+ 0xa7a10000a7a2,
+ 0xa7a30000a7a4,
+ 0xa7a50000a7a6,
+ 0xa7a70000a7a8,
+ 0xa7a90000a7aa,
+ 0xa7af0000a7b0,
+ 0xa7b50000a7b6,
+ 0xa7b70000a7b8,
+ 0xa7b90000a7ba,
+ 0xa7bb0000a7bc,
+ 0xa7bd0000a7be,
+ 0xa7bf0000a7c0,
+ 0xa7c30000a7c4,
+ 0xa7c80000a7c9,
+ 0xa7ca0000a7cb,
+ 0xa7f60000a7f8,
+ 0xa7fa0000a828,
+ 0xa82c0000a82d,
+ 0xa8400000a874,
+ 0xa8800000a8c6,
+ 0xa8d00000a8da,
+ 0xa8e00000a8f8,
+ 0xa8fb0000a8fc,
+ 0xa8fd0000a92e,
+ 0xa9300000a954,
+ 0xa9800000a9c1,
+ 0xa9cf0000a9da,
+ 0xa9e00000a9ff,
+ 0xaa000000aa37,
+ 0xaa400000aa4e,
+ 0xaa500000aa5a,
+ 0xaa600000aa77,
+ 0xaa7a0000aac3,
+ 0xaadb0000aade,
+ 0xaae00000aaf0,
+ 0xaaf20000aaf7,
+ 0xab010000ab07,
+ 0xab090000ab0f,
+ 0xab110000ab17,
+ 0xab200000ab27,
+ 0xab280000ab2f,
+ 0xab300000ab5b,
+ 0xab600000ab6a,
+ 0xabc00000abeb,
+ 0xabec0000abee,
+ 0xabf00000abfa,
+ 0xac000000d7a4,
+ 0xfa0e0000fa10,
+ 0xfa110000fa12,
+ 0xfa130000fa15,
+ 0xfa1f0000fa20,
+ 0xfa210000fa22,
+ 0xfa230000fa25,
+ 0xfa270000fa2a,
+ 0xfb1e0000fb1f,
+ 0xfe200000fe30,
+ 0xfe730000fe74,
+ 0x100000001000c,
+ 0x1000d00010027,
+ 0x100280001003b,
+ 0x1003c0001003e,
+ 0x1003f0001004e,
+ 0x100500001005e,
+ 0x10080000100fb,
+ 0x101fd000101fe,
+ 0x102800001029d,
+ 0x102a0000102d1,
+ 0x102e0000102e1,
+ 0x1030000010320,
+ 0x1032d00010341,
+ 0x103420001034a,
+ 0x103500001037b,
+ 0x103800001039e,
+ 0x103a0000103c4,
+ 0x103c8000103d0,
+ 0x104280001049e,
+ 0x104a0000104aa,
+ 0x104d8000104fc,
+ 0x1050000010528,
+ 0x1053000010564,
+ 0x1060000010737,
+ 0x1074000010756,
+ 0x1076000010768,
+ 0x1080000010806,
+ 0x1080800010809,
+ 0x1080a00010836,
+ 0x1083700010839,
+ 0x1083c0001083d,
+ 0x1083f00010856,
+ 0x1086000010877,
+ 0x108800001089f,
+ 0x108e0000108f3,
+ 0x108f4000108f6,
+ 0x1090000010916,
+ 0x109200001093a,
+ 0x10980000109b8,
+ 0x109be000109c0,
+ 0x10a0000010a04,
+ 0x10a0500010a07,
+ 0x10a0c00010a14,
+ 0x10a1500010a18,
+ 0x10a1900010a36,
+ 0x10a3800010a3b,
+ 0x10a3f00010a40,
+ 0x10a6000010a7d,
+ 0x10a8000010a9d,
+ 0x10ac000010ac8,
+ 0x10ac900010ae7,
+ 0x10b0000010b36,
+ 0x10b4000010b56,
+ 0x10b6000010b73,
+ 0x10b8000010b92,
+ 0x10c0000010c49,
+ 0x10cc000010cf3,
+ 0x10d0000010d28,
+ 0x10d3000010d3a,
+ 0x10e8000010eaa,
+ 0x10eab00010ead,
+ 0x10eb000010eb2,
+ 0x10f0000010f1d,
+ 0x10f2700010f28,
+ 0x10f3000010f51,
+ 0x10fb000010fc5,
+ 0x10fe000010ff7,
+ 0x1100000011047,
+ 0x1106600011070,
+ 0x1107f000110bb,
+ 0x110d0000110e9,
+ 0x110f0000110fa,
+ 0x1110000011135,
+ 0x1113600011140,
+ 0x1114400011148,
+ 0x1115000011174,
+ 0x1117600011177,
+ 0x11180000111c5,
+ 0x111c9000111cd,
+ 0x111ce000111db,
+ 0x111dc000111dd,
+ 0x1120000011212,
+ 0x1121300011238,
+ 0x1123e0001123f,
+ 0x1128000011287,
+ 0x1128800011289,
+ 0x1128a0001128e,
+ 0x1128f0001129e,
+ 0x1129f000112a9,
+ 0x112b0000112eb,
+ 0x112f0000112fa,
+ 0x1130000011304,
+ 0x113050001130d,
+ 0x1130f00011311,
+ 0x1131300011329,
+ 0x1132a00011331,
+ 0x1133200011334,
+ 0x113350001133a,
+ 0x1133b00011345,
+ 0x1134700011349,
+ 0x1134b0001134e,
+ 0x1135000011351,
+ 0x1135700011358,
+ 0x1135d00011364,
+ 0x113660001136d,
+ 0x1137000011375,
+ 0x114000001144b,
+ 0x114500001145a,
+ 0x1145e00011462,
+ 0x11480000114c6,
+ 0x114c7000114c8,
+ 0x114d0000114da,
+ 0x11580000115b6,
+ 0x115b8000115c1,
+ 0x115d8000115de,
+ 0x1160000011641,
+ 0x1164400011645,
+ 0x116500001165a,
+ 0x11680000116b9,
+ 0x116c0000116ca,
+ 0x117000001171b,
+ 0x1171d0001172c,
+ 0x117300001173a,
+ 0x118000001183b,
+ 0x118c0000118ea,
+ 0x118ff00011907,
+ 0x119090001190a,
+ 0x1190c00011914,
+ 0x1191500011917,
+ 0x1191800011936,
+ 0x1193700011939,
+ 0x1193b00011944,
+ 0x119500001195a,
+ 0x119a0000119a8,
+ 0x119aa000119d8,
+ 0x119da000119e2,
+ 0x119e3000119e5,
+ 0x11a0000011a3f,
+ 0x11a4700011a48,
+ 0x11a5000011a9a,
+ 0x11a9d00011a9e,
+ 0x11ac000011af9,
+ 0x11c0000011c09,
+ 0x11c0a00011c37,
+ 0x11c3800011c41,
+ 0x11c5000011c5a,
+ 0x11c7200011c90,
+ 0x11c9200011ca8,
+ 0x11ca900011cb7,
+ 0x11d0000011d07,
+ 0x11d0800011d0a,
+ 0x11d0b00011d37,
+ 0x11d3a00011d3b,
+ 0x11d3c00011d3e,
+ 0x11d3f00011d48,
+ 0x11d5000011d5a,
+ 0x11d6000011d66,
+ 0x11d6700011d69,
+ 0x11d6a00011d8f,
+ 0x11d9000011d92,
+ 0x11d9300011d99,
+ 0x11da000011daa,
+ 0x11ee000011ef7,
+ 0x11fb000011fb1,
+ 0x120000001239a,
+ 0x1248000012544,
+ 0x130000001342f,
+ 0x1440000014647,
+ 0x1680000016a39,
+ 0x16a4000016a5f,
+ 0x16a6000016a6a,
+ 0x16ad000016aee,
+ 0x16af000016af5,
+ 0x16b0000016b37,
+ 0x16b4000016b44,
+ 0x16b5000016b5a,
+ 0x16b6300016b78,
+ 0x16b7d00016b90,
+ 0x16e6000016e80,
+ 0x16f0000016f4b,
+ 0x16f4f00016f88,
+ 0x16f8f00016fa0,
+ 0x16fe000016fe2,
+ 0x16fe300016fe5,
+ 0x16ff000016ff2,
+ 0x17000000187f8,
+ 0x1880000018cd6,
+ 0x18d0000018d09,
+ 0x1b0000001b11f,
+ 0x1b1500001b153,
+ 0x1b1640001b168,
+ 0x1b1700001b2fc,
+ 0x1bc000001bc6b,
+ 0x1bc700001bc7d,
+ 0x1bc800001bc89,
+ 0x1bc900001bc9a,
+ 0x1bc9d0001bc9f,
+ 0x1da000001da37,
+ 0x1da3b0001da6d,
+ 0x1da750001da76,
+ 0x1da840001da85,
+ 0x1da9b0001daa0,
+ 0x1daa10001dab0,
+ 0x1e0000001e007,
+ 0x1e0080001e019,
+ 0x1e01b0001e022,
+ 0x1e0230001e025,
+ 0x1e0260001e02b,
+ 0x1e1000001e12d,
+ 0x1e1300001e13e,
+ 0x1e1400001e14a,
+ 0x1e14e0001e14f,
+ 0x1e2c00001e2fa,
+ 0x1e8000001e8c5,
+ 0x1e8d00001e8d7,
+ 0x1e9220001e94c,
+ 0x1e9500001e95a,
+ 0x1fbf00001fbfa,
+ 0x200000002a6de,
+ 0x2a7000002b735,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x300000003134b,
+ ),
+ 'CONTEXTJ': (
+ 0x200c0000200e,
+ ),
+ 'CONTEXTO': (
+ 0xb7000000b8,
+ 0x37500000376,
+ 0x5f3000005f5,
+ 0x6600000066a,
+ 0x6f0000006fa,
+ 0x30fb000030fc,
+ ),
+}
diff --git a/venv/Lib/site-packages/idna/intranges.py b/venv/Lib/site-packages/idna/intranges.py
new file mode 100644
index 000000000..fa8a73566
--- /dev/null
+++ b/venv/Lib/site-packages/idna/intranges.py
@@ -0,0 +1,53 @@
+"""
+Given a list of integers, made up of (hopefully) a small number of long runs
+of consecutive integers, compute a representation of the form
+((start1, end1), (start2, end2) ...). Then answer the question "was x present
+in the original list?" in time O(log(# runs)).
+"""
+
+import bisect
+
+def intranges_from_list(list_):
+ """Represent a list of integers as a sequence of ranges:
+ ((start_0, end_0), (start_1, end_1), ...), such that the original
+ integers are exactly those x such that start_i <= x < end_i for some i.
+
+ Ranges are encoded as single integers (start << 32 | end), not as tuples.
+ """
+
+ sorted_list = sorted(list_)
+ ranges = []
+ last_write = -1
+ for i in range(len(sorted_list)):
+ if i+1 < len(sorted_list):
+ if sorted_list[i] == sorted_list[i+1]-1:
+ continue
+ current_range = sorted_list[last_write+1:i+1]
+ ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
+ last_write = i
+
+ return tuple(ranges)
+
+def _encode_range(start, end):
+ return (start << 32) | end
+
+def _decode_range(r):
+ return (r >> 32), (r & ((1 << 32) - 1))
+
+
+def intranges_contain(int_, ranges):
+ """Determine if `int_` falls into one of the ranges in `ranges`."""
+ tuple_ = _encode_range(int_, 0)
+ pos = bisect.bisect_left(ranges, tuple_)
+ # we could be immediately ahead of a tuple (start, end)
+ # with start < int_ <= end
+ if pos > 0:
+ left, right = _decode_range(ranges[pos-1])
+ if left <= int_ < right:
+ return True
+ # or we could be immediately behind a tuple (int_, end)
+ if pos < len(ranges):
+ left, _ = _decode_range(ranges[pos])
+ if left == int_:
+ return True
+ return False
diff --git a/venv/Lib/site-packages/idna/package_data.py b/venv/Lib/site-packages/idna/package_data.py
new file mode 100644
index 000000000..ce1c521d2
--- /dev/null
+++ b/venv/Lib/site-packages/idna/package_data.py
@@ -0,0 +1,2 @@
+__version__ = '2.10'
+
diff --git a/venv/Lib/site-packages/idna/uts46data.py b/venv/Lib/site-packages/idna/uts46data.py
new file mode 100644
index 000000000..3766dd49f
--- /dev/null
+++ b/venv/Lib/site-packages/idna/uts46data.py
@@ -0,0 +1,8357 @@
+# This file is automatically generated by tools/idna-data
+# vim: set fileencoding=utf-8 :
+
+"""IDNA Mapping Table from UTS46."""
+
+
+__version__ = "13.0.0"
+def _seg_0():
+ return [
+ (0x0, '3'),
+ (0x1, '3'),
+ (0x2, '3'),
+ (0x3, '3'),
+ (0x4, '3'),
+ (0x5, '3'),
+ (0x6, '3'),
+ (0x7, '3'),
+ (0x8, '3'),
+ (0x9, '3'),
+ (0xA, '3'),
+ (0xB, '3'),
+ (0xC, '3'),
+ (0xD, '3'),
+ (0xE, '3'),
+ (0xF, '3'),
+ (0x10, '3'),
+ (0x11, '3'),
+ (0x12, '3'),
+ (0x13, '3'),
+ (0x14, '3'),
+ (0x15, '3'),
+ (0x16, '3'),
+ (0x17, '3'),
+ (0x18, '3'),
+ (0x19, '3'),
+ (0x1A, '3'),
+ (0x1B, '3'),
+ (0x1C, '3'),
+ (0x1D, '3'),
+ (0x1E, '3'),
+ (0x1F, '3'),
+ (0x20, '3'),
+ (0x21, '3'),
+ (0x22, '3'),
+ (0x23, '3'),
+ (0x24, '3'),
+ (0x25, '3'),
+ (0x26, '3'),
+ (0x27, '3'),
+ (0x28, '3'),
+ (0x29, '3'),
+ (0x2A, '3'),
+ (0x2B, '3'),
+ (0x2C, '3'),
+ (0x2D, 'V'),
+ (0x2E, 'V'),
+ (0x2F, '3'),
+ (0x30, 'V'),
+ (0x31, 'V'),
+ (0x32, 'V'),
+ (0x33, 'V'),
+ (0x34, 'V'),
+ (0x35, 'V'),
+ (0x36, 'V'),
+ (0x37, 'V'),
+ (0x38, 'V'),
+ (0x39, 'V'),
+ (0x3A, '3'),
+ (0x3B, '3'),
+ (0x3C, '3'),
+ (0x3D, '3'),
+ (0x3E, '3'),
+ (0x3F, '3'),
+ (0x40, '3'),
+ (0x41, 'M', u'a'),
+ (0x42, 'M', u'b'),
+ (0x43, 'M', u'c'),
+ (0x44, 'M', u'd'),
+ (0x45, 'M', u'e'),
+ (0x46, 'M', u'f'),
+ (0x47, 'M', u'g'),
+ (0x48, 'M', u'h'),
+ (0x49, 'M', u'i'),
+ (0x4A, 'M', u'j'),
+ (0x4B, 'M', u'k'),
+ (0x4C, 'M', u'l'),
+ (0x4D, 'M', u'm'),
+ (0x4E, 'M', u'n'),
+ (0x4F, 'M', u'o'),
+ (0x50, 'M', u'p'),
+ (0x51, 'M', u'q'),
+ (0x52, 'M', u'r'),
+ (0x53, 'M', u's'),
+ (0x54, 'M', u't'),
+ (0x55, 'M', u'u'),
+ (0x56, 'M', u'v'),
+ (0x57, 'M', u'w'),
+ (0x58, 'M', u'x'),
+ (0x59, 'M', u'y'),
+ (0x5A, 'M', u'z'),
+ (0x5B, '3'),
+ (0x5C, '3'),
+ (0x5D, '3'),
+ (0x5E, '3'),
+ (0x5F, '3'),
+ (0x60, '3'),
+ (0x61, 'V'),
+ (0x62, 'V'),
+ (0x63, 'V'),
+ ]
+
+def _seg_1():
+ return [
+ (0x64, 'V'),
+ (0x65, 'V'),
+ (0x66, 'V'),
+ (0x67, 'V'),
+ (0x68, 'V'),
+ (0x69, 'V'),
+ (0x6A, 'V'),
+ (0x6B, 'V'),
+ (0x6C, 'V'),
+ (0x6D, 'V'),
+ (0x6E, 'V'),
+ (0x6F, 'V'),
+ (0x70, 'V'),
+ (0x71, 'V'),
+ (0x72, 'V'),
+ (0x73, 'V'),
+ (0x74, 'V'),
+ (0x75, 'V'),
+ (0x76, 'V'),
+ (0x77, 'V'),
+ (0x78, 'V'),
+ (0x79, 'V'),
+ (0x7A, 'V'),
+ (0x7B, '3'),
+ (0x7C, '3'),
+ (0x7D, '3'),
+ (0x7E, '3'),
+ (0x7F, '3'),
+ (0x80, 'X'),
+ (0x81, 'X'),
+ (0x82, 'X'),
+ (0x83, 'X'),
+ (0x84, 'X'),
+ (0x85, 'X'),
+ (0x86, 'X'),
+ (0x87, 'X'),
+ (0x88, 'X'),
+ (0x89, 'X'),
+ (0x8A, 'X'),
+ (0x8B, 'X'),
+ (0x8C, 'X'),
+ (0x8D, 'X'),
+ (0x8E, 'X'),
+ (0x8F, 'X'),
+ (0x90, 'X'),
+ (0x91, 'X'),
+ (0x92, 'X'),
+ (0x93, 'X'),
+ (0x94, 'X'),
+ (0x95, 'X'),
+ (0x96, 'X'),
+ (0x97, 'X'),
+ (0x98, 'X'),
+ (0x99, 'X'),
+ (0x9A, 'X'),
+ (0x9B, 'X'),
+ (0x9C, 'X'),
+ (0x9D, 'X'),
+ (0x9E, 'X'),
+ (0x9F, 'X'),
+ (0xA0, '3', u' '),
+ (0xA1, 'V'),
+ (0xA2, 'V'),
+ (0xA3, 'V'),
+ (0xA4, 'V'),
+ (0xA5, 'V'),
+ (0xA6, 'V'),
+ (0xA7, 'V'),
+ (0xA8, '3', u' ̈'),
+ (0xA9, 'V'),
+ (0xAA, 'M', u'a'),
+ (0xAB, 'V'),
+ (0xAC, 'V'),
+ (0xAD, 'I'),
+ (0xAE, 'V'),
+ (0xAF, '3', u' ̄'),
+ (0xB0, 'V'),
+ (0xB1, 'V'),
+ (0xB2, 'M', u'2'),
+ (0xB3, 'M', u'3'),
+ (0xB4, '3', u' ́'),
+ (0xB5, 'M', u'μ'),
+ (0xB6, 'V'),
+ (0xB7, 'V'),
+ (0xB8, '3', u' ̧'),
+ (0xB9, 'M', u'1'),
+ (0xBA, 'M', u'o'),
+ (0xBB, 'V'),
+ (0xBC, 'M', u'1⁄4'),
+ (0xBD, 'M', u'1⁄2'),
+ (0xBE, 'M', u'3⁄4'),
+ (0xBF, 'V'),
+ (0xC0, 'M', u'à'),
+ (0xC1, 'M', u'á'),
+ (0xC2, 'M', u'â'),
+ (0xC3, 'M', u'ã'),
+ (0xC4, 'M', u'ä'),
+ (0xC5, 'M', u'å'),
+ (0xC6, 'M', u'æ'),
+ (0xC7, 'M', u'ç'),
+ ]
+
+def _seg_2():
+ return [
+ (0xC8, 'M', u'è'),
+ (0xC9, 'M', u'é'),
+ (0xCA, 'M', u'ê'),
+ (0xCB, 'M', u'ë'),
+ (0xCC, 'M', u'ì'),
+ (0xCD, 'M', u'í'),
+ (0xCE, 'M', u'î'),
+ (0xCF, 'M', u'ï'),
+ (0xD0, 'M', u'ð'),
+ (0xD1, 'M', u'ñ'),
+ (0xD2, 'M', u'ò'),
+ (0xD3, 'M', u'ó'),
+ (0xD4, 'M', u'ô'),
+ (0xD5, 'M', u'õ'),
+ (0xD6, 'M', u'ö'),
+ (0xD7, 'V'),
+ (0xD8, 'M', u'ø'),
+ (0xD9, 'M', u'ù'),
+ (0xDA, 'M', u'ú'),
+ (0xDB, 'M', u'û'),
+ (0xDC, 'M', u'ü'),
+ (0xDD, 'M', u'ý'),
+ (0xDE, 'M', u'þ'),
+ (0xDF, 'D', u'ss'),
+ (0xE0, 'V'),
+ (0xE1, 'V'),
+ (0xE2, 'V'),
+ (0xE3, 'V'),
+ (0xE4, 'V'),
+ (0xE5, 'V'),
+ (0xE6, 'V'),
+ (0xE7, 'V'),
+ (0xE8, 'V'),
+ (0xE9, 'V'),
+ (0xEA, 'V'),
+ (0xEB, 'V'),
+ (0xEC, 'V'),
+ (0xED, 'V'),
+ (0xEE, 'V'),
+ (0xEF, 'V'),
+ (0xF0, 'V'),
+ (0xF1, 'V'),
+ (0xF2, 'V'),
+ (0xF3, 'V'),
+ (0xF4, 'V'),
+ (0xF5, 'V'),
+ (0xF6, 'V'),
+ (0xF7, 'V'),
+ (0xF8, 'V'),
+ (0xF9, 'V'),
+ (0xFA, 'V'),
+ (0xFB, 'V'),
+ (0xFC, 'V'),
+ (0xFD, 'V'),
+ (0xFE, 'V'),
+ (0xFF, 'V'),
+ (0x100, 'M', u'ā'),
+ (0x101, 'V'),
+ (0x102, 'M', u'ă'),
+ (0x103, 'V'),
+ (0x104, 'M', u'ą'),
+ (0x105, 'V'),
+ (0x106, 'M', u'ć'),
+ (0x107, 'V'),
+ (0x108, 'M', u'ĉ'),
+ (0x109, 'V'),
+ (0x10A, 'M', u'ċ'),
+ (0x10B, 'V'),
+ (0x10C, 'M', u'č'),
+ (0x10D, 'V'),
+ (0x10E, 'M', u'ď'),
+ (0x10F, 'V'),
+ (0x110, 'M', u'đ'),
+ (0x111, 'V'),
+ (0x112, 'M', u'ē'),
+ (0x113, 'V'),
+ (0x114, 'M', u'ĕ'),
+ (0x115, 'V'),
+ (0x116, 'M', u'ė'),
+ (0x117, 'V'),
+ (0x118, 'M', u'ę'),
+ (0x119, 'V'),
+ (0x11A, 'M', u'ě'),
+ (0x11B, 'V'),
+ (0x11C, 'M', u'ĝ'),
+ (0x11D, 'V'),
+ (0x11E, 'M', u'ğ'),
+ (0x11F, 'V'),
+ (0x120, 'M', u'ġ'),
+ (0x121, 'V'),
+ (0x122, 'M', u'ģ'),
+ (0x123, 'V'),
+ (0x124, 'M', u'ĥ'),
+ (0x125, 'V'),
+ (0x126, 'M', u'ħ'),
+ (0x127, 'V'),
+ (0x128, 'M', u'ĩ'),
+ (0x129, 'V'),
+ (0x12A, 'M', u'ī'),
+ (0x12B, 'V'),
+ ]
+
+def _seg_3():
+ return [
+ (0x12C, 'M', u'ĭ'),
+ (0x12D, 'V'),
+ (0x12E, 'M', u'į'),
+ (0x12F, 'V'),
+ (0x130, 'M', u'i̇'),
+ (0x131, 'V'),
+ (0x132, 'M', u'ij'),
+ (0x134, 'M', u'ĵ'),
+ (0x135, 'V'),
+ (0x136, 'M', u'ķ'),
+ (0x137, 'V'),
+ (0x139, 'M', u'ĺ'),
+ (0x13A, 'V'),
+ (0x13B, 'M', u'ļ'),
+ (0x13C, 'V'),
+ (0x13D, 'M', u'ľ'),
+ (0x13E, 'V'),
+ (0x13F, 'M', u'l·'),
+ (0x141, 'M', u'ł'),
+ (0x142, 'V'),
+ (0x143, 'M', u'ń'),
+ (0x144, 'V'),
+ (0x145, 'M', u'ņ'),
+ (0x146, 'V'),
+ (0x147, 'M', u'ň'),
+ (0x148, 'V'),
+ (0x149, 'M', u'ʼn'),
+ (0x14A, 'M', u'ŋ'),
+ (0x14B, 'V'),
+ (0x14C, 'M', u'ō'),
+ (0x14D, 'V'),
+ (0x14E, 'M', u'ŏ'),
+ (0x14F, 'V'),
+ (0x150, 'M', u'ő'),
+ (0x151, 'V'),
+ (0x152, 'M', u'œ'),
+ (0x153, 'V'),
+ (0x154, 'M', u'ŕ'),
+ (0x155, 'V'),
+ (0x156, 'M', u'ŗ'),
+ (0x157, 'V'),
+ (0x158, 'M', u'ř'),
+ (0x159, 'V'),
+ (0x15A, 'M', u'ś'),
+ (0x15B, 'V'),
+ (0x15C, 'M', u'ŝ'),
+ (0x15D, 'V'),
+ (0x15E, 'M', u'ş'),
+ (0x15F, 'V'),
+ (0x160, 'M', u'š'),
+ (0x161, 'V'),
+ (0x162, 'M', u'ţ'),
+ (0x163, 'V'),
+ (0x164, 'M', u'ť'),
+ (0x165, 'V'),
+ (0x166, 'M', u'ŧ'),
+ (0x167, 'V'),
+ (0x168, 'M', u'ũ'),
+ (0x169, 'V'),
+ (0x16A, 'M', u'ū'),
+ (0x16B, 'V'),
+ (0x16C, 'M', u'ŭ'),
+ (0x16D, 'V'),
+ (0x16E, 'M', u'ů'),
+ (0x16F, 'V'),
+ (0x170, 'M', u'ű'),
+ (0x171, 'V'),
+ (0x172, 'M', u'ų'),
+ (0x173, 'V'),
+ (0x174, 'M', u'ŵ'),
+ (0x175, 'V'),
+ (0x176, 'M', u'ŷ'),
+ (0x177, 'V'),
+ (0x178, 'M', u'ÿ'),
+ (0x179, 'M', u'ź'),
+ (0x17A, 'V'),
+ (0x17B, 'M', u'ż'),
+ (0x17C, 'V'),
+ (0x17D, 'M', u'ž'),
+ (0x17E, 'V'),
+ (0x17F, 'M', u's'),
+ (0x180, 'V'),
+ (0x181, 'M', u'ɓ'),
+ (0x182, 'M', u'ƃ'),
+ (0x183, 'V'),
+ (0x184, 'M', u'ƅ'),
+ (0x185, 'V'),
+ (0x186, 'M', u'ɔ'),
+ (0x187, 'M', u'ƈ'),
+ (0x188, 'V'),
+ (0x189, 'M', u'ɖ'),
+ (0x18A, 'M', u'ɗ'),
+ (0x18B, 'M', u'ƌ'),
+ (0x18C, 'V'),
+ (0x18E, 'M', u'ǝ'),
+ (0x18F, 'M', u'ə'),
+ (0x190, 'M', u'ɛ'),
+ (0x191, 'M', u'ƒ'),
+ (0x192, 'V'),
+ (0x193, 'M', u'ɠ'),
+ ]
+
+def _seg_4():
+ return [
+ (0x194, 'M', u'ɣ'),
+ (0x195, 'V'),
+ (0x196, 'M', u'ɩ'),
+ (0x197, 'M', u'ɨ'),
+ (0x198, 'M', u'ƙ'),
+ (0x199, 'V'),
+ (0x19C, 'M', u'ɯ'),
+ (0x19D, 'M', u'ɲ'),
+ (0x19E, 'V'),
+ (0x19F, 'M', u'ɵ'),
+ (0x1A0, 'M', u'ơ'),
+ (0x1A1, 'V'),
+ (0x1A2, 'M', u'ƣ'),
+ (0x1A3, 'V'),
+ (0x1A4, 'M', u'ƥ'),
+ (0x1A5, 'V'),
+ (0x1A6, 'M', u'ʀ'),
+ (0x1A7, 'M', u'ƨ'),
+ (0x1A8, 'V'),
+ (0x1A9, 'M', u'ʃ'),
+ (0x1AA, 'V'),
+ (0x1AC, 'M', u'ƭ'),
+ (0x1AD, 'V'),
+ (0x1AE, 'M', u'ʈ'),
+ (0x1AF, 'M', u'ư'),
+ (0x1B0, 'V'),
+ (0x1B1, 'M', u'ʊ'),
+ (0x1B2, 'M', u'ʋ'),
+ (0x1B3, 'M', u'ƴ'),
+ (0x1B4, 'V'),
+ (0x1B5, 'M', u'ƶ'),
+ (0x1B6, 'V'),
+ (0x1B7, 'M', u'ʒ'),
+ (0x1B8, 'M', u'ƹ'),
+ (0x1B9, 'V'),
+ (0x1BC, 'M', u'ƽ'),
+ (0x1BD, 'V'),
+ (0x1C4, 'M', u'dž'),
+ (0x1C7, 'M', u'lj'),
+ (0x1CA, 'M', u'nj'),
+ (0x1CD, 'M', u'ǎ'),
+ (0x1CE, 'V'),
+ (0x1CF, 'M', u'ǐ'),
+ (0x1D0, 'V'),
+ (0x1D1, 'M', u'ǒ'),
+ (0x1D2, 'V'),
+ (0x1D3, 'M', u'ǔ'),
+ (0x1D4, 'V'),
+ (0x1D5, 'M', u'ǖ'),
+ (0x1D6, 'V'),
+ (0x1D7, 'M', u'ǘ'),
+ (0x1D8, 'V'),
+ (0x1D9, 'M', u'ǚ'),
+ (0x1DA, 'V'),
+ (0x1DB, 'M', u'ǜ'),
+ (0x1DC, 'V'),
+ (0x1DE, 'M', u'ǟ'),
+ (0x1DF, 'V'),
+ (0x1E0, 'M', u'ǡ'),
+ (0x1E1, 'V'),
+ (0x1E2, 'M', u'ǣ'),
+ (0x1E3, 'V'),
+ (0x1E4, 'M', u'ǥ'),
+ (0x1E5, 'V'),
+ (0x1E6, 'M', u'ǧ'),
+ (0x1E7, 'V'),
+ (0x1E8, 'M', u'ǩ'),
+ (0x1E9, 'V'),
+ (0x1EA, 'M', u'ǫ'),
+ (0x1EB, 'V'),
+ (0x1EC, 'M', u'ǭ'),
+ (0x1ED, 'V'),
+ (0x1EE, 'M', u'ǯ'),
+ (0x1EF, 'V'),
+ (0x1F1, 'M', u'dz'),
+ (0x1F4, 'M', u'ǵ'),
+ (0x1F5, 'V'),
+ (0x1F6, 'M', u'ƕ'),
+ (0x1F7, 'M', u'ƿ'),
+ (0x1F8, 'M', u'ǹ'),
+ (0x1F9, 'V'),
+ (0x1FA, 'M', u'ǻ'),
+ (0x1FB, 'V'),
+ (0x1FC, 'M', u'ǽ'),
+ (0x1FD, 'V'),
+ (0x1FE, 'M', u'ǿ'),
+ (0x1FF, 'V'),
+ (0x200, 'M', u'ȁ'),
+ (0x201, 'V'),
+ (0x202, 'M', u'ȃ'),
+ (0x203, 'V'),
+ (0x204, 'M', u'ȅ'),
+ (0x205, 'V'),
+ (0x206, 'M', u'ȇ'),
+ (0x207, 'V'),
+ (0x208, 'M', u'ȉ'),
+ (0x209, 'V'),
+ (0x20A, 'M', u'ȋ'),
+ (0x20B, 'V'),
+ (0x20C, 'M', u'ȍ'),
+ ]
+
+def _seg_5():
+ return [
+ (0x20D, 'V'),
+ (0x20E, 'M', u'ȏ'),
+ (0x20F, 'V'),
+ (0x210, 'M', u'ȑ'),
+ (0x211, 'V'),
+ (0x212, 'M', u'ȓ'),
+ (0x213, 'V'),
+ (0x214, 'M', u'ȕ'),
+ (0x215, 'V'),
+ (0x216, 'M', u'ȗ'),
+ (0x217, 'V'),
+ (0x218, 'M', u'ș'),
+ (0x219, 'V'),
+ (0x21A, 'M', u'ț'),
+ (0x21B, 'V'),
+ (0x21C, 'M', u'ȝ'),
+ (0x21D, 'V'),
+ (0x21E, 'M', u'ȟ'),
+ (0x21F, 'V'),
+ (0x220, 'M', u'ƞ'),
+ (0x221, 'V'),
+ (0x222, 'M', u'ȣ'),
+ (0x223, 'V'),
+ (0x224, 'M', u'ȥ'),
+ (0x225, 'V'),
+ (0x226, 'M', u'ȧ'),
+ (0x227, 'V'),
+ (0x228, 'M', u'ȩ'),
+ (0x229, 'V'),
+ (0x22A, 'M', u'ȫ'),
+ (0x22B, 'V'),
+ (0x22C, 'M', u'ȭ'),
+ (0x22D, 'V'),
+ (0x22E, 'M', u'ȯ'),
+ (0x22F, 'V'),
+ (0x230, 'M', u'ȱ'),
+ (0x231, 'V'),
+ (0x232, 'M', u'ȳ'),
+ (0x233, 'V'),
+ (0x23A, 'M', u'ⱥ'),
+ (0x23B, 'M', u'ȼ'),
+ (0x23C, 'V'),
+ (0x23D, 'M', u'ƚ'),
+ (0x23E, 'M', u'ⱦ'),
+ (0x23F, 'V'),
+ (0x241, 'M', u'ɂ'),
+ (0x242, 'V'),
+ (0x243, 'M', u'ƀ'),
+ (0x244, 'M', u'ʉ'),
+ (0x245, 'M', u'ʌ'),
+ (0x246, 'M', u'ɇ'),
+ (0x247, 'V'),
+ (0x248, 'M', u'ɉ'),
+ (0x249, 'V'),
+ (0x24A, 'M', u'ɋ'),
+ (0x24B, 'V'),
+ (0x24C, 'M', u'ɍ'),
+ (0x24D, 'V'),
+ (0x24E, 'M', u'ɏ'),
+ (0x24F, 'V'),
+ (0x2B0, 'M', u'h'),
+ (0x2B1, 'M', u'ɦ'),
+ (0x2B2, 'M', u'j'),
+ (0x2B3, 'M', u'r'),
+ (0x2B4, 'M', u'ɹ'),
+ (0x2B5, 'M', u'ɻ'),
+ (0x2B6, 'M', u'ʁ'),
+ (0x2B7, 'M', u'w'),
+ (0x2B8, 'M', u'y'),
+ (0x2B9, 'V'),
+ (0x2D8, '3', u' ̆'),
+ (0x2D9, '3', u' ̇'),
+ (0x2DA, '3', u' ̊'),
+ (0x2DB, '3', u' ̨'),
+ (0x2DC, '3', u' ̃'),
+ (0x2DD, '3', u' ̋'),
+ (0x2DE, 'V'),
+ (0x2E0, 'M', u'ɣ'),
+ (0x2E1, 'M', u'l'),
+ (0x2E2, 'M', u's'),
+ (0x2E3, 'M', u'x'),
+ (0x2E4, 'M', u'ʕ'),
+ (0x2E5, 'V'),
+ (0x340, 'M', u'̀'),
+ (0x341, 'M', u'́'),
+ (0x342, 'V'),
+ (0x343, 'M', u'̓'),
+ (0x344, 'M', u'̈́'),
+ (0x345, 'M', u'ι'),
+ (0x346, 'V'),
+ (0x34F, 'I'),
+ (0x350, 'V'),
+ (0x370, 'M', u'ͱ'),
+ (0x371, 'V'),
+ (0x372, 'M', u'ͳ'),
+ (0x373, 'V'),
+ (0x374, 'M', u'ʹ'),
+ (0x375, 'V'),
+ (0x376, 'M', u'ͷ'),
+ (0x377, 'V'),
+ ]
+
+def _seg_6():
+ return [
+ (0x378, 'X'),
+ (0x37A, '3', u' ι'),
+ (0x37B, 'V'),
+ (0x37E, '3', u';'),
+ (0x37F, 'M', u'ϳ'),
+ (0x380, 'X'),
+ (0x384, '3', u' ́'),
+ (0x385, '3', u' ̈́'),
+ (0x386, 'M', u'ά'),
+ (0x387, 'M', u'·'),
+ (0x388, 'M', u'έ'),
+ (0x389, 'M', u'ή'),
+ (0x38A, 'M', u'ί'),
+ (0x38B, 'X'),
+ (0x38C, 'M', u'ό'),
+ (0x38D, 'X'),
+ (0x38E, 'M', u'ύ'),
+ (0x38F, 'M', u'ώ'),
+ (0x390, 'V'),
+ (0x391, 'M', u'α'),
+ (0x392, 'M', u'β'),
+ (0x393, 'M', u'γ'),
+ (0x394, 'M', u'δ'),
+ (0x395, 'M', u'ε'),
+ (0x396, 'M', u'ζ'),
+ (0x397, 'M', u'η'),
+ (0x398, 'M', u'θ'),
+ (0x399, 'M', u'ι'),
+ (0x39A, 'M', u'κ'),
+ (0x39B, 'M', u'λ'),
+ (0x39C, 'M', u'μ'),
+ (0x39D, 'M', u'ν'),
+ (0x39E, 'M', u'ξ'),
+ (0x39F, 'M', u'ο'),
+ (0x3A0, 'M', u'π'),
+ (0x3A1, 'M', u'ρ'),
+ (0x3A2, 'X'),
+ (0x3A3, 'M', u'σ'),
+ (0x3A4, 'M', u'τ'),
+ (0x3A5, 'M', u'υ'),
+ (0x3A6, 'M', u'φ'),
+ (0x3A7, 'M', u'χ'),
+ (0x3A8, 'M', u'ψ'),
+ (0x3A9, 'M', u'ω'),
+ (0x3AA, 'M', u'ϊ'),
+ (0x3AB, 'M', u'ϋ'),
+ (0x3AC, 'V'),
+ (0x3C2, 'D', u'σ'),
+ (0x3C3, 'V'),
+ (0x3CF, 'M', u'ϗ'),
+ (0x3D0, 'M', u'β'),
+ (0x3D1, 'M', u'θ'),
+ (0x3D2, 'M', u'υ'),
+ (0x3D3, 'M', u'ύ'),
+ (0x3D4, 'M', u'ϋ'),
+ (0x3D5, 'M', u'φ'),
+ (0x3D6, 'M', u'π'),
+ (0x3D7, 'V'),
+ (0x3D8, 'M', u'ϙ'),
+ (0x3D9, 'V'),
+ (0x3DA, 'M', u'ϛ'),
+ (0x3DB, 'V'),
+ (0x3DC, 'M', u'ϝ'),
+ (0x3DD, 'V'),
+ (0x3DE, 'M', u'ϟ'),
+ (0x3DF, 'V'),
+ (0x3E0, 'M', u'ϡ'),
+ (0x3E1, 'V'),
+ (0x3E2, 'M', u'ϣ'),
+ (0x3E3, 'V'),
+ (0x3E4, 'M', u'ϥ'),
+ (0x3E5, 'V'),
+ (0x3E6, 'M', u'ϧ'),
+ (0x3E7, 'V'),
+ (0x3E8, 'M', u'ϩ'),
+ (0x3E9, 'V'),
+ (0x3EA, 'M', u'ϫ'),
+ (0x3EB, 'V'),
+ (0x3EC, 'M', u'ϭ'),
+ (0x3ED, 'V'),
+ (0x3EE, 'M', u'ϯ'),
+ (0x3EF, 'V'),
+ (0x3F0, 'M', u'κ'),
+ (0x3F1, 'M', u'ρ'),
+ (0x3F2, 'M', u'σ'),
+ (0x3F3, 'V'),
+ (0x3F4, 'M', u'θ'),
+ (0x3F5, 'M', u'ε'),
+ (0x3F6, 'V'),
+ (0x3F7, 'M', u'ϸ'),
+ (0x3F8, 'V'),
+ (0x3F9, 'M', u'σ'),
+ (0x3FA, 'M', u'ϻ'),
+ (0x3FB, 'V'),
+ (0x3FD, 'M', u'ͻ'),
+ (0x3FE, 'M', u'ͼ'),
+ (0x3FF, 'M', u'ͽ'),
+ (0x400, 'M', u'ѐ'),
+ (0x401, 'M', u'ё'),
+ (0x402, 'M', u'ђ'),
+ ]
+
+def _seg_7():
+ return [
+ (0x403, 'M', u'ѓ'),
+ (0x404, 'M', u'є'),
+ (0x405, 'M', u'ѕ'),
+ (0x406, 'M', u'і'),
+ (0x407, 'M', u'ї'),
+ (0x408, 'M', u'ј'),
+ (0x409, 'M', u'љ'),
+ (0x40A, 'M', u'њ'),
+ (0x40B, 'M', u'ћ'),
+ (0x40C, 'M', u'ќ'),
+ (0x40D, 'M', u'ѝ'),
+ (0x40E, 'M', u'ў'),
+ (0x40F, 'M', u'џ'),
+ (0x410, 'M', u'а'),
+ (0x411, 'M', u'б'),
+ (0x412, 'M', u'в'),
+ (0x413, 'M', u'г'),
+ (0x414, 'M', u'д'),
+ (0x415, 'M', u'е'),
+ (0x416, 'M', u'ж'),
+ (0x417, 'M', u'з'),
+ (0x418, 'M', u'и'),
+ (0x419, 'M', u'й'),
+ (0x41A, 'M', u'к'),
+ (0x41B, 'M', u'л'),
+ (0x41C, 'M', u'м'),
+ (0x41D, 'M', u'н'),
+ (0x41E, 'M', u'о'),
+ (0x41F, 'M', u'п'),
+ (0x420, 'M', u'р'),
+ (0x421, 'M', u'с'),
+ (0x422, 'M', u'т'),
+ (0x423, 'M', u'у'),
+ (0x424, 'M', u'ф'),
+ (0x425, 'M', u'х'),
+ (0x426, 'M', u'ц'),
+ (0x427, 'M', u'ч'),
+ (0x428, 'M', u'ш'),
+ (0x429, 'M', u'щ'),
+ (0x42A, 'M', u'ъ'),
+ (0x42B, 'M', u'ы'),
+ (0x42C, 'M', u'ь'),
+ (0x42D, 'M', u'э'),
+ (0x42E, 'M', u'ю'),
+ (0x42F, 'M', u'я'),
+ (0x430, 'V'),
+ (0x460, 'M', u'ѡ'),
+ (0x461, 'V'),
+ (0x462, 'M', u'ѣ'),
+ (0x463, 'V'),
+ (0x464, 'M', u'ѥ'),
+ (0x465, 'V'),
+ (0x466, 'M', u'ѧ'),
+ (0x467, 'V'),
+ (0x468, 'M', u'ѩ'),
+ (0x469, 'V'),
+ (0x46A, 'M', u'ѫ'),
+ (0x46B, 'V'),
+ (0x46C, 'M', u'ѭ'),
+ (0x46D, 'V'),
+ (0x46E, 'M', u'ѯ'),
+ (0x46F, 'V'),
+ (0x470, 'M', u'ѱ'),
+ (0x471, 'V'),
+ (0x472, 'M', u'ѳ'),
+ (0x473, 'V'),
+ (0x474, 'M', u'ѵ'),
+ (0x475, 'V'),
+ (0x476, 'M', u'ѷ'),
+ (0x477, 'V'),
+ (0x478, 'M', u'ѹ'),
+ (0x479, 'V'),
+ (0x47A, 'M', u'ѻ'),
+ (0x47B, 'V'),
+ (0x47C, 'M', u'ѽ'),
+ (0x47D, 'V'),
+ (0x47E, 'M', u'ѿ'),
+ (0x47F, 'V'),
+ (0x480, 'M', u'ҁ'),
+ (0x481, 'V'),
+ (0x48A, 'M', u'ҋ'),
+ (0x48B, 'V'),
+ (0x48C, 'M', u'ҍ'),
+ (0x48D, 'V'),
+ (0x48E, 'M', u'ҏ'),
+ (0x48F, 'V'),
+ (0x490, 'M', u'ґ'),
+ (0x491, 'V'),
+ (0x492, 'M', u'ғ'),
+ (0x493, 'V'),
+ (0x494, 'M', u'ҕ'),
+ (0x495, 'V'),
+ (0x496, 'M', u'җ'),
+ (0x497, 'V'),
+ (0x498, 'M', u'ҙ'),
+ (0x499, 'V'),
+ (0x49A, 'M', u'қ'),
+ (0x49B, 'V'),
+ (0x49C, 'M', u'ҝ'),
+ (0x49D, 'V'),
+ ]
+
+def _seg_8():
+ return [
+ (0x49E, 'M', u'ҟ'),
+ (0x49F, 'V'),
+ (0x4A0, 'M', u'ҡ'),
+ (0x4A1, 'V'),
+ (0x4A2, 'M', u'ң'),
+ (0x4A3, 'V'),
+ (0x4A4, 'M', u'ҥ'),
+ (0x4A5, 'V'),
+ (0x4A6, 'M', u'ҧ'),
+ (0x4A7, 'V'),
+ (0x4A8, 'M', u'ҩ'),
+ (0x4A9, 'V'),
+ (0x4AA, 'M', u'ҫ'),
+ (0x4AB, 'V'),
+ (0x4AC, 'M', u'ҭ'),
+ (0x4AD, 'V'),
+ (0x4AE, 'M', u'ү'),
+ (0x4AF, 'V'),
+ (0x4B0, 'M', u'ұ'),
+ (0x4B1, 'V'),
+ (0x4B2, 'M', u'ҳ'),
+ (0x4B3, 'V'),
+ (0x4B4, 'M', u'ҵ'),
+ (0x4B5, 'V'),
+ (0x4B6, 'M', u'ҷ'),
+ (0x4B7, 'V'),
+ (0x4B8, 'M', u'ҹ'),
+ (0x4B9, 'V'),
+ (0x4BA, 'M', u'һ'),
+ (0x4BB, 'V'),
+ (0x4BC, 'M', u'ҽ'),
+ (0x4BD, 'V'),
+ (0x4BE, 'M', u'ҿ'),
+ (0x4BF, 'V'),
+ (0x4C0, 'X'),
+ (0x4C1, 'M', u'ӂ'),
+ (0x4C2, 'V'),
+ (0x4C3, 'M', u'ӄ'),
+ (0x4C4, 'V'),
+ (0x4C5, 'M', u'ӆ'),
+ (0x4C6, 'V'),
+ (0x4C7, 'M', u'ӈ'),
+ (0x4C8, 'V'),
+ (0x4C9, 'M', u'ӊ'),
+ (0x4CA, 'V'),
+ (0x4CB, 'M', u'ӌ'),
+ (0x4CC, 'V'),
+ (0x4CD, 'M', u'ӎ'),
+ (0x4CE, 'V'),
+ (0x4D0, 'M', u'ӑ'),
+ (0x4D1, 'V'),
+ (0x4D2, 'M', u'ӓ'),
+ (0x4D3, 'V'),
+ (0x4D4, 'M', u'ӕ'),
+ (0x4D5, 'V'),
+ (0x4D6, 'M', u'ӗ'),
+ (0x4D7, 'V'),
+ (0x4D8, 'M', u'ә'),
+ (0x4D9, 'V'),
+ (0x4DA, 'M', u'ӛ'),
+ (0x4DB, 'V'),
+ (0x4DC, 'M', u'ӝ'),
+ (0x4DD, 'V'),
+ (0x4DE, 'M', u'ӟ'),
+ (0x4DF, 'V'),
+ (0x4E0, 'M', u'ӡ'),
+ (0x4E1, 'V'),
+ (0x4E2, 'M', u'ӣ'),
+ (0x4E3, 'V'),
+ (0x4E4, 'M', u'ӥ'),
+ (0x4E5, 'V'),
+ (0x4E6, 'M', u'ӧ'),
+ (0x4E7, 'V'),
+ (0x4E8, 'M', u'ө'),
+ (0x4E9, 'V'),
+ (0x4EA, 'M', u'ӫ'),
+ (0x4EB, 'V'),
+ (0x4EC, 'M', u'ӭ'),
+ (0x4ED, 'V'),
+ (0x4EE, 'M', u'ӯ'),
+ (0x4EF, 'V'),
+ (0x4F0, 'M', u'ӱ'),
+ (0x4F1, 'V'),
+ (0x4F2, 'M', u'ӳ'),
+ (0x4F3, 'V'),
+ (0x4F4, 'M', u'ӵ'),
+ (0x4F5, 'V'),
+ (0x4F6, 'M', u'ӷ'),
+ (0x4F7, 'V'),
+ (0x4F8, 'M', u'ӹ'),
+ (0x4F9, 'V'),
+ (0x4FA, 'M', u'ӻ'),
+ (0x4FB, 'V'),
+ (0x4FC, 'M', u'ӽ'),
+ (0x4FD, 'V'),
+ (0x4FE, 'M', u'ӿ'),
+ (0x4FF, 'V'),
+ (0x500, 'M', u'ԁ'),
+ (0x501, 'V'),
+ (0x502, 'M', u'ԃ'),
+ ]
+
+def _seg_9():
+ return [
+ (0x503, 'V'),
+ (0x504, 'M', u'ԅ'),
+ (0x505, 'V'),
+ (0x506, 'M', u'ԇ'),
+ (0x507, 'V'),
+ (0x508, 'M', u'ԉ'),
+ (0x509, 'V'),
+ (0x50A, 'M', u'ԋ'),
+ (0x50B, 'V'),
+ (0x50C, 'M', u'ԍ'),
+ (0x50D, 'V'),
+ (0x50E, 'M', u'ԏ'),
+ (0x50F, 'V'),
+ (0x510, 'M', u'ԑ'),
+ (0x511, 'V'),
+ (0x512, 'M', u'ԓ'),
+ (0x513, 'V'),
+ (0x514, 'M', u'ԕ'),
+ (0x515, 'V'),
+ (0x516, 'M', u'ԗ'),
+ (0x517, 'V'),
+ (0x518, 'M', u'ԙ'),
+ (0x519, 'V'),
+ (0x51A, 'M', u'ԛ'),
+ (0x51B, 'V'),
+ (0x51C, 'M', u'ԝ'),
+ (0x51D, 'V'),
+ (0x51E, 'M', u'ԟ'),
+ (0x51F, 'V'),
+ (0x520, 'M', u'ԡ'),
+ (0x521, 'V'),
+ (0x522, 'M', u'ԣ'),
+ (0x523, 'V'),
+ (0x524, 'M', u'ԥ'),
+ (0x525, 'V'),
+ (0x526, 'M', u'ԧ'),
+ (0x527, 'V'),
+ (0x528, 'M', u'ԩ'),
+ (0x529, 'V'),
+ (0x52A, 'M', u'ԫ'),
+ (0x52B, 'V'),
+ (0x52C, 'M', u'ԭ'),
+ (0x52D, 'V'),
+ (0x52E, 'M', u'ԯ'),
+ (0x52F, 'V'),
+ (0x530, 'X'),
+ (0x531, 'M', u'ա'),
+ (0x532, 'M', u'բ'),
+ (0x533, 'M', u'գ'),
+ (0x534, 'M', u'դ'),
+ (0x535, 'M', u'ե'),
+ (0x536, 'M', u'զ'),
+ (0x537, 'M', u'է'),
+ (0x538, 'M', u'ը'),
+ (0x539, 'M', u'թ'),
+ (0x53A, 'M', u'ժ'),
+ (0x53B, 'M', u'ի'),
+ (0x53C, 'M', u'լ'),
+ (0x53D, 'M', u'խ'),
+ (0x53E, 'M', u'ծ'),
+ (0x53F, 'M', u'կ'),
+ (0x540, 'M', u'հ'),
+ (0x541, 'M', u'ձ'),
+ (0x542, 'M', u'ղ'),
+ (0x543, 'M', u'ճ'),
+ (0x544, 'M', u'մ'),
+ (0x545, 'M', u'յ'),
+ (0x546, 'M', u'ն'),
+ (0x547, 'M', u'շ'),
+ (0x548, 'M', u'ո'),
+ (0x549, 'M', u'չ'),
+ (0x54A, 'M', u'պ'),
+ (0x54B, 'M', u'ջ'),
+ (0x54C, 'M', u'ռ'),
+ (0x54D, 'M', u'ս'),
+ (0x54E, 'M', u'վ'),
+ (0x54F, 'M', u'տ'),
+ (0x550, 'M', u'ր'),
+ (0x551, 'M', u'ց'),
+ (0x552, 'M', u'ւ'),
+ (0x553, 'M', u'փ'),
+ (0x554, 'M', u'ք'),
+ (0x555, 'M', u'օ'),
+ (0x556, 'M', u'ֆ'),
+ (0x557, 'X'),
+ (0x559, 'V'),
+ (0x587, 'M', u'եւ'),
+ (0x588, 'V'),
+ (0x58B, 'X'),
+ (0x58D, 'V'),
+ (0x590, 'X'),
+ (0x591, 'V'),
+ (0x5C8, 'X'),
+ (0x5D0, 'V'),
+ (0x5EB, 'X'),
+ (0x5EF, 'V'),
+ (0x5F5, 'X'),
+ (0x606, 'V'),
+ (0x61C, 'X'),
+ (0x61E, 'V'),
+ ]
+
+def _seg_10():
+ return [
+ (0x675, 'M', u'اٴ'),
+ (0x676, 'M', u'وٴ'),
+ (0x677, 'M', u'ۇٴ'),
+ (0x678, 'M', u'يٴ'),
+ (0x679, 'V'),
+ (0x6DD, 'X'),
+ (0x6DE, 'V'),
+ (0x70E, 'X'),
+ (0x710, 'V'),
+ (0x74B, 'X'),
+ (0x74D, 'V'),
+ (0x7B2, 'X'),
+ (0x7C0, 'V'),
+ (0x7FB, 'X'),
+ (0x7FD, 'V'),
+ (0x82E, 'X'),
+ (0x830, 'V'),
+ (0x83F, 'X'),
+ (0x840, 'V'),
+ (0x85C, 'X'),
+ (0x85E, 'V'),
+ (0x85F, 'X'),
+ (0x860, 'V'),
+ (0x86B, 'X'),
+ (0x8A0, 'V'),
+ (0x8B5, 'X'),
+ (0x8B6, 'V'),
+ (0x8C8, 'X'),
+ (0x8D3, 'V'),
+ (0x8E2, 'X'),
+ (0x8E3, 'V'),
+ (0x958, 'M', u'क़'),
+ (0x959, 'M', u'ख़'),
+ (0x95A, 'M', u'ग़'),
+ (0x95B, 'M', u'ज़'),
+ (0x95C, 'M', u'ड़'),
+ (0x95D, 'M', u'ढ़'),
+ (0x95E, 'M', u'फ़'),
+ (0x95F, 'M', u'य़'),
+ (0x960, 'V'),
+ (0x984, 'X'),
+ (0x985, 'V'),
+ (0x98D, 'X'),
+ (0x98F, 'V'),
+ (0x991, 'X'),
+ (0x993, 'V'),
+ (0x9A9, 'X'),
+ (0x9AA, 'V'),
+ (0x9B1, 'X'),
+ (0x9B2, 'V'),
+ (0x9B3, 'X'),
+ (0x9B6, 'V'),
+ (0x9BA, 'X'),
+ (0x9BC, 'V'),
+ (0x9C5, 'X'),
+ (0x9C7, 'V'),
+ (0x9C9, 'X'),
+ (0x9CB, 'V'),
+ (0x9CF, 'X'),
+ (0x9D7, 'V'),
+ (0x9D8, 'X'),
+ (0x9DC, 'M', u'ড়'),
+ (0x9DD, 'M', u'ঢ়'),
+ (0x9DE, 'X'),
+ (0x9DF, 'M', u'য়'),
+ (0x9E0, 'V'),
+ (0x9E4, 'X'),
+ (0x9E6, 'V'),
+ (0x9FF, 'X'),
+ (0xA01, 'V'),
+ (0xA04, 'X'),
+ (0xA05, 'V'),
+ (0xA0B, 'X'),
+ (0xA0F, 'V'),
+ (0xA11, 'X'),
+ (0xA13, 'V'),
+ (0xA29, 'X'),
+ (0xA2A, 'V'),
+ (0xA31, 'X'),
+ (0xA32, 'V'),
+ (0xA33, 'M', u'ਲ਼'),
+ (0xA34, 'X'),
+ (0xA35, 'V'),
+ (0xA36, 'M', u'ਸ਼'),
+ (0xA37, 'X'),
+ (0xA38, 'V'),
+ (0xA3A, 'X'),
+ (0xA3C, 'V'),
+ (0xA3D, 'X'),
+ (0xA3E, 'V'),
+ (0xA43, 'X'),
+ (0xA47, 'V'),
+ (0xA49, 'X'),
+ (0xA4B, 'V'),
+ (0xA4E, 'X'),
+ (0xA51, 'V'),
+ (0xA52, 'X'),
+ (0xA59, 'M', u'ਖ਼'),
+ (0xA5A, 'M', u'ਗ਼'),
+ (0xA5B, 'M', u'ਜ਼'),
+ ]
+
+def _seg_11():
+ return [
+ (0xA5C, 'V'),
+ (0xA5D, 'X'),
+ (0xA5E, 'M', u'ਫ਼'),
+ (0xA5F, 'X'),
+ (0xA66, 'V'),
+ (0xA77, 'X'),
+ (0xA81, 'V'),
+ (0xA84, 'X'),
+ (0xA85, 'V'),
+ (0xA8E, 'X'),
+ (0xA8F, 'V'),
+ (0xA92, 'X'),
+ (0xA93, 'V'),
+ (0xAA9, 'X'),
+ (0xAAA, 'V'),
+ (0xAB1, 'X'),
+ (0xAB2, 'V'),
+ (0xAB4, 'X'),
+ (0xAB5, 'V'),
+ (0xABA, 'X'),
+ (0xABC, 'V'),
+ (0xAC6, 'X'),
+ (0xAC7, 'V'),
+ (0xACA, 'X'),
+ (0xACB, 'V'),
+ (0xACE, 'X'),
+ (0xAD0, 'V'),
+ (0xAD1, 'X'),
+ (0xAE0, 'V'),
+ (0xAE4, 'X'),
+ (0xAE6, 'V'),
+ (0xAF2, 'X'),
+ (0xAF9, 'V'),
+ (0xB00, 'X'),
+ (0xB01, 'V'),
+ (0xB04, 'X'),
+ (0xB05, 'V'),
+ (0xB0D, 'X'),
+ (0xB0F, 'V'),
+ (0xB11, 'X'),
+ (0xB13, 'V'),
+ (0xB29, 'X'),
+ (0xB2A, 'V'),
+ (0xB31, 'X'),
+ (0xB32, 'V'),
+ (0xB34, 'X'),
+ (0xB35, 'V'),
+ (0xB3A, 'X'),
+ (0xB3C, 'V'),
+ (0xB45, 'X'),
+ (0xB47, 'V'),
+ (0xB49, 'X'),
+ (0xB4B, 'V'),
+ (0xB4E, 'X'),
+ (0xB55, 'V'),
+ (0xB58, 'X'),
+ (0xB5C, 'M', u'ଡ଼'),
+ (0xB5D, 'M', u'ଢ଼'),
+ (0xB5E, 'X'),
+ (0xB5F, 'V'),
+ (0xB64, 'X'),
+ (0xB66, 'V'),
+ (0xB78, 'X'),
+ (0xB82, 'V'),
+ (0xB84, 'X'),
+ (0xB85, 'V'),
+ (0xB8B, 'X'),
+ (0xB8E, 'V'),
+ (0xB91, 'X'),
+ (0xB92, 'V'),
+ (0xB96, 'X'),
+ (0xB99, 'V'),
+ (0xB9B, 'X'),
+ (0xB9C, 'V'),
+ (0xB9D, 'X'),
+ (0xB9E, 'V'),
+ (0xBA0, 'X'),
+ (0xBA3, 'V'),
+ (0xBA5, 'X'),
+ (0xBA8, 'V'),
+ (0xBAB, 'X'),
+ (0xBAE, 'V'),
+ (0xBBA, 'X'),
+ (0xBBE, 'V'),
+ (0xBC3, 'X'),
+ (0xBC6, 'V'),
+ (0xBC9, 'X'),
+ (0xBCA, 'V'),
+ (0xBCE, 'X'),
+ (0xBD0, 'V'),
+ (0xBD1, 'X'),
+ (0xBD7, 'V'),
+ (0xBD8, 'X'),
+ (0xBE6, 'V'),
+ (0xBFB, 'X'),
+ (0xC00, 'V'),
+ (0xC0D, 'X'),
+ (0xC0E, 'V'),
+ (0xC11, 'X'),
+ (0xC12, 'V'),
+ ]
+
+def _seg_12():
+ return [
+ (0xC29, 'X'),
+ (0xC2A, 'V'),
+ (0xC3A, 'X'),
+ (0xC3D, 'V'),
+ (0xC45, 'X'),
+ (0xC46, 'V'),
+ (0xC49, 'X'),
+ (0xC4A, 'V'),
+ (0xC4E, 'X'),
+ (0xC55, 'V'),
+ (0xC57, 'X'),
+ (0xC58, 'V'),
+ (0xC5B, 'X'),
+ (0xC60, 'V'),
+ (0xC64, 'X'),
+ (0xC66, 'V'),
+ (0xC70, 'X'),
+ (0xC77, 'V'),
+ (0xC8D, 'X'),
+ (0xC8E, 'V'),
+ (0xC91, 'X'),
+ (0xC92, 'V'),
+ (0xCA9, 'X'),
+ (0xCAA, 'V'),
+ (0xCB4, 'X'),
+ (0xCB5, 'V'),
+ (0xCBA, 'X'),
+ (0xCBC, 'V'),
+ (0xCC5, 'X'),
+ (0xCC6, 'V'),
+ (0xCC9, 'X'),
+ (0xCCA, 'V'),
+ (0xCCE, 'X'),
+ (0xCD5, 'V'),
+ (0xCD7, 'X'),
+ (0xCDE, 'V'),
+ (0xCDF, 'X'),
+ (0xCE0, 'V'),
+ (0xCE4, 'X'),
+ (0xCE6, 'V'),
+ (0xCF0, 'X'),
+ (0xCF1, 'V'),
+ (0xCF3, 'X'),
+ (0xD00, 'V'),
+ (0xD0D, 'X'),
+ (0xD0E, 'V'),
+ (0xD11, 'X'),
+ (0xD12, 'V'),
+ (0xD45, 'X'),
+ (0xD46, 'V'),
+ (0xD49, 'X'),
+ (0xD4A, 'V'),
+ (0xD50, 'X'),
+ (0xD54, 'V'),
+ (0xD64, 'X'),
+ (0xD66, 'V'),
+ (0xD80, 'X'),
+ (0xD81, 'V'),
+ (0xD84, 'X'),
+ (0xD85, 'V'),
+ (0xD97, 'X'),
+ (0xD9A, 'V'),
+ (0xDB2, 'X'),
+ (0xDB3, 'V'),
+ (0xDBC, 'X'),
+ (0xDBD, 'V'),
+ (0xDBE, 'X'),
+ (0xDC0, 'V'),
+ (0xDC7, 'X'),
+ (0xDCA, 'V'),
+ (0xDCB, 'X'),
+ (0xDCF, 'V'),
+ (0xDD5, 'X'),
+ (0xDD6, 'V'),
+ (0xDD7, 'X'),
+ (0xDD8, 'V'),
+ (0xDE0, 'X'),
+ (0xDE6, 'V'),
+ (0xDF0, 'X'),
+ (0xDF2, 'V'),
+ (0xDF5, 'X'),
+ (0xE01, 'V'),
+ (0xE33, 'M', u'ํา'),
+ (0xE34, 'V'),
+ (0xE3B, 'X'),
+ (0xE3F, 'V'),
+ (0xE5C, 'X'),
+ (0xE81, 'V'),
+ (0xE83, 'X'),
+ (0xE84, 'V'),
+ (0xE85, 'X'),
+ (0xE86, 'V'),
+ (0xE8B, 'X'),
+ (0xE8C, 'V'),
+ (0xEA4, 'X'),
+ (0xEA5, 'V'),
+ (0xEA6, 'X'),
+ (0xEA7, 'V'),
+ (0xEB3, 'M', u'ໍາ'),
+ (0xEB4, 'V'),
+ ]
+
+def _seg_13():
+ return [
+ (0xEBE, 'X'),
+ (0xEC0, 'V'),
+ (0xEC5, 'X'),
+ (0xEC6, 'V'),
+ (0xEC7, 'X'),
+ (0xEC8, 'V'),
+ (0xECE, 'X'),
+ (0xED0, 'V'),
+ (0xEDA, 'X'),
+ (0xEDC, 'M', u'ຫນ'),
+ (0xEDD, 'M', u'ຫມ'),
+ (0xEDE, 'V'),
+ (0xEE0, 'X'),
+ (0xF00, 'V'),
+ (0xF0C, 'M', u'་'),
+ (0xF0D, 'V'),
+ (0xF43, 'M', u'གྷ'),
+ (0xF44, 'V'),
+ (0xF48, 'X'),
+ (0xF49, 'V'),
+ (0xF4D, 'M', u'ཌྷ'),
+ (0xF4E, 'V'),
+ (0xF52, 'M', u'དྷ'),
+ (0xF53, 'V'),
+ (0xF57, 'M', u'བྷ'),
+ (0xF58, 'V'),
+ (0xF5C, 'M', u'ཛྷ'),
+ (0xF5D, 'V'),
+ (0xF69, 'M', u'ཀྵ'),
+ (0xF6A, 'V'),
+ (0xF6D, 'X'),
+ (0xF71, 'V'),
+ (0xF73, 'M', u'ཱི'),
+ (0xF74, 'V'),
+ (0xF75, 'M', u'ཱུ'),
+ (0xF76, 'M', u'ྲྀ'),
+ (0xF77, 'M', u'ྲཱྀ'),
+ (0xF78, 'M', u'ླྀ'),
+ (0xF79, 'M', u'ླཱྀ'),
+ (0xF7A, 'V'),
+ (0xF81, 'M', u'ཱྀ'),
+ (0xF82, 'V'),
+ (0xF93, 'M', u'ྒྷ'),
+ (0xF94, 'V'),
+ (0xF98, 'X'),
+ (0xF99, 'V'),
+ (0xF9D, 'M', u'ྜྷ'),
+ (0xF9E, 'V'),
+ (0xFA2, 'M', u'ྡྷ'),
+ (0xFA3, 'V'),
+ (0xFA7, 'M', u'ྦྷ'),
+ (0xFA8, 'V'),
+ (0xFAC, 'M', u'ྫྷ'),
+ (0xFAD, 'V'),
+ (0xFB9, 'M', u'ྐྵ'),
+ (0xFBA, 'V'),
+ (0xFBD, 'X'),
+ (0xFBE, 'V'),
+ (0xFCD, 'X'),
+ (0xFCE, 'V'),
+ (0xFDB, 'X'),
+ (0x1000, 'V'),
+ (0x10A0, 'X'),
+ (0x10C7, 'M', u'ⴧ'),
+ (0x10C8, 'X'),
+ (0x10CD, 'M', u'ⴭ'),
+ (0x10CE, 'X'),
+ (0x10D0, 'V'),
+ (0x10FC, 'M', u'ნ'),
+ (0x10FD, 'V'),
+ (0x115F, 'X'),
+ (0x1161, 'V'),
+ (0x1249, 'X'),
+ (0x124A, 'V'),
+ (0x124E, 'X'),
+ (0x1250, 'V'),
+ (0x1257, 'X'),
+ (0x1258, 'V'),
+ (0x1259, 'X'),
+ (0x125A, 'V'),
+ (0x125E, 'X'),
+ (0x1260, 'V'),
+ (0x1289, 'X'),
+ (0x128A, 'V'),
+ (0x128E, 'X'),
+ (0x1290, 'V'),
+ (0x12B1, 'X'),
+ (0x12B2, 'V'),
+ (0x12B6, 'X'),
+ (0x12B8, 'V'),
+ (0x12BF, 'X'),
+ (0x12C0, 'V'),
+ (0x12C1, 'X'),
+ (0x12C2, 'V'),
+ (0x12C6, 'X'),
+ (0x12C8, 'V'),
+ (0x12D7, 'X'),
+ (0x12D8, 'V'),
+ (0x1311, 'X'),
+ (0x1312, 'V'),
+ ]
+
+def _seg_14():
+ return [
+ (0x1316, 'X'),
+ (0x1318, 'V'),
+ (0x135B, 'X'),
+ (0x135D, 'V'),
+ (0x137D, 'X'),
+ (0x1380, 'V'),
+ (0x139A, 'X'),
+ (0x13A0, 'V'),
+ (0x13F6, 'X'),
+ (0x13F8, 'M', u'Ᏸ'),
+ (0x13F9, 'M', u'Ᏹ'),
+ (0x13FA, 'M', u'Ᏺ'),
+ (0x13FB, 'M', u'Ᏻ'),
+ (0x13FC, 'M', u'Ᏼ'),
+ (0x13FD, 'M', u'Ᏽ'),
+ (0x13FE, 'X'),
+ (0x1400, 'V'),
+ (0x1680, 'X'),
+ (0x1681, 'V'),
+ (0x169D, 'X'),
+ (0x16A0, 'V'),
+ (0x16F9, 'X'),
+ (0x1700, 'V'),
+ (0x170D, 'X'),
+ (0x170E, 'V'),
+ (0x1715, 'X'),
+ (0x1720, 'V'),
+ (0x1737, 'X'),
+ (0x1740, 'V'),
+ (0x1754, 'X'),
+ (0x1760, 'V'),
+ (0x176D, 'X'),
+ (0x176E, 'V'),
+ (0x1771, 'X'),
+ (0x1772, 'V'),
+ (0x1774, 'X'),
+ (0x1780, 'V'),
+ (0x17B4, 'X'),
+ (0x17B6, 'V'),
+ (0x17DE, 'X'),
+ (0x17E0, 'V'),
+ (0x17EA, 'X'),
+ (0x17F0, 'V'),
+ (0x17FA, 'X'),
+ (0x1800, 'V'),
+ (0x1806, 'X'),
+ (0x1807, 'V'),
+ (0x180B, 'I'),
+ (0x180E, 'X'),
+ (0x1810, 'V'),
+ (0x181A, 'X'),
+ (0x1820, 'V'),
+ (0x1879, 'X'),
+ (0x1880, 'V'),
+ (0x18AB, 'X'),
+ (0x18B0, 'V'),
+ (0x18F6, 'X'),
+ (0x1900, 'V'),
+ (0x191F, 'X'),
+ (0x1920, 'V'),
+ (0x192C, 'X'),
+ (0x1930, 'V'),
+ (0x193C, 'X'),
+ (0x1940, 'V'),
+ (0x1941, 'X'),
+ (0x1944, 'V'),
+ (0x196E, 'X'),
+ (0x1970, 'V'),
+ (0x1975, 'X'),
+ (0x1980, 'V'),
+ (0x19AC, 'X'),
+ (0x19B0, 'V'),
+ (0x19CA, 'X'),
+ (0x19D0, 'V'),
+ (0x19DB, 'X'),
+ (0x19DE, 'V'),
+ (0x1A1C, 'X'),
+ (0x1A1E, 'V'),
+ (0x1A5F, 'X'),
+ (0x1A60, 'V'),
+ (0x1A7D, 'X'),
+ (0x1A7F, 'V'),
+ (0x1A8A, 'X'),
+ (0x1A90, 'V'),
+ (0x1A9A, 'X'),
+ (0x1AA0, 'V'),
+ (0x1AAE, 'X'),
+ (0x1AB0, 'V'),
+ (0x1AC1, 'X'),
+ (0x1B00, 'V'),
+ (0x1B4C, 'X'),
+ (0x1B50, 'V'),
+ (0x1B7D, 'X'),
+ (0x1B80, 'V'),
+ (0x1BF4, 'X'),
+ (0x1BFC, 'V'),
+ (0x1C38, 'X'),
+ (0x1C3B, 'V'),
+ (0x1C4A, 'X'),
+ (0x1C4D, 'V'),
+ ]
+
+def _seg_15():
+ return [
+ (0x1C80, 'M', u'в'),
+ (0x1C81, 'M', u'д'),
+ (0x1C82, 'M', u'о'),
+ (0x1C83, 'M', u'с'),
+ (0x1C84, 'M', u'т'),
+ (0x1C86, 'M', u'ъ'),
+ (0x1C87, 'M', u'ѣ'),
+ (0x1C88, 'M', u'ꙋ'),
+ (0x1C89, 'X'),
+ (0x1C90, 'M', u'ა'),
+ (0x1C91, 'M', u'ბ'),
+ (0x1C92, 'M', u'გ'),
+ (0x1C93, 'M', u'დ'),
+ (0x1C94, 'M', u'ე'),
+ (0x1C95, 'M', u'ვ'),
+ (0x1C96, 'M', u'ზ'),
+ (0x1C97, 'M', u'თ'),
+ (0x1C98, 'M', u'ი'),
+ (0x1C99, 'M', u'კ'),
+ (0x1C9A, 'M', u'ლ'),
+ (0x1C9B, 'M', u'მ'),
+ (0x1C9C, 'M', u'ნ'),
+ (0x1C9D, 'M', u'ო'),
+ (0x1C9E, 'M', u'პ'),
+ (0x1C9F, 'M', u'ჟ'),
+ (0x1CA0, 'M', u'რ'),
+ (0x1CA1, 'M', u'ს'),
+ (0x1CA2, 'M', u'ტ'),
+ (0x1CA3, 'M', u'უ'),
+ (0x1CA4, 'M', u'ფ'),
+ (0x1CA5, 'M', u'ქ'),
+ (0x1CA6, 'M', u'ღ'),
+ (0x1CA7, 'M', u'ყ'),
+ (0x1CA8, 'M', u'შ'),
+ (0x1CA9, 'M', u'ჩ'),
+ (0x1CAA, 'M', u'ც'),
+ (0x1CAB, 'M', u'ძ'),
+ (0x1CAC, 'M', u'წ'),
+ (0x1CAD, 'M', u'ჭ'),
+ (0x1CAE, 'M', u'ხ'),
+ (0x1CAF, 'M', u'ჯ'),
+ (0x1CB0, 'M', u'ჰ'),
+ (0x1CB1, 'M', u'ჱ'),
+ (0x1CB2, 'M', u'ჲ'),
+ (0x1CB3, 'M', u'ჳ'),
+ (0x1CB4, 'M', u'ჴ'),
+ (0x1CB5, 'M', u'ჵ'),
+ (0x1CB6, 'M', u'ჶ'),
+ (0x1CB7, 'M', u'ჷ'),
+ (0x1CB8, 'M', u'ჸ'),
+ (0x1CB9, 'M', u'ჹ'),
+ (0x1CBA, 'M', u'ჺ'),
+ (0x1CBB, 'X'),
+ (0x1CBD, 'M', u'ჽ'),
+ (0x1CBE, 'M', u'ჾ'),
+ (0x1CBF, 'M', u'ჿ'),
+ (0x1CC0, 'V'),
+ (0x1CC8, 'X'),
+ (0x1CD0, 'V'),
+ (0x1CFB, 'X'),
+ (0x1D00, 'V'),
+ (0x1D2C, 'M', u'a'),
+ (0x1D2D, 'M', u'æ'),
+ (0x1D2E, 'M', u'b'),
+ (0x1D2F, 'V'),
+ (0x1D30, 'M', u'd'),
+ (0x1D31, 'M', u'e'),
+ (0x1D32, 'M', u'ǝ'),
+ (0x1D33, 'M', u'g'),
+ (0x1D34, 'M', u'h'),
+ (0x1D35, 'M', u'i'),
+ (0x1D36, 'M', u'j'),
+ (0x1D37, 'M', u'k'),
+ (0x1D38, 'M', u'l'),
+ (0x1D39, 'M', u'm'),
+ (0x1D3A, 'M', u'n'),
+ (0x1D3B, 'V'),
+ (0x1D3C, 'M', u'o'),
+ (0x1D3D, 'M', u'ȣ'),
+ (0x1D3E, 'M', u'p'),
+ (0x1D3F, 'M', u'r'),
+ (0x1D40, 'M', u't'),
+ (0x1D41, 'M', u'u'),
+ (0x1D42, 'M', u'w'),
+ (0x1D43, 'M', u'a'),
+ (0x1D44, 'M', u'ɐ'),
+ (0x1D45, 'M', u'ɑ'),
+ (0x1D46, 'M', u'ᴂ'),
+ (0x1D47, 'M', u'b'),
+ (0x1D48, 'M', u'd'),
+ (0x1D49, 'M', u'e'),
+ (0x1D4A, 'M', u'ə'),
+ (0x1D4B, 'M', u'ɛ'),
+ (0x1D4C, 'M', u'ɜ'),
+ (0x1D4D, 'M', u'g'),
+ (0x1D4E, 'V'),
+ (0x1D4F, 'M', u'k'),
+ (0x1D50, 'M', u'm'),
+ (0x1D51, 'M', u'ŋ'),
+ (0x1D52, 'M', u'o'),
+ ]
+
+def _seg_16():
+ return [
+ (0x1D53, 'M', u'ɔ'),
+ (0x1D54, 'M', u'ᴖ'),
+ (0x1D55, 'M', u'ᴗ'),
+ (0x1D56, 'M', u'p'),
+ (0x1D57, 'M', u't'),
+ (0x1D58, 'M', u'u'),
+ (0x1D59, 'M', u'ᴝ'),
+ (0x1D5A, 'M', u'ɯ'),
+ (0x1D5B, 'M', u'v'),
+ (0x1D5C, 'M', u'ᴥ'),
+ (0x1D5D, 'M', u'β'),
+ (0x1D5E, 'M', u'γ'),
+ (0x1D5F, 'M', u'δ'),
+ (0x1D60, 'M', u'φ'),
+ (0x1D61, 'M', u'χ'),
+ (0x1D62, 'M', u'i'),
+ (0x1D63, 'M', u'r'),
+ (0x1D64, 'M', u'u'),
+ (0x1D65, 'M', u'v'),
+ (0x1D66, 'M', u'β'),
+ (0x1D67, 'M', u'γ'),
+ (0x1D68, 'M', u'ρ'),
+ (0x1D69, 'M', u'φ'),
+ (0x1D6A, 'M', u'χ'),
+ (0x1D6B, 'V'),
+ (0x1D78, 'M', u'н'),
+ (0x1D79, 'V'),
+ (0x1D9B, 'M', u'ɒ'),
+ (0x1D9C, 'M', u'c'),
+ (0x1D9D, 'M', u'ɕ'),
+ (0x1D9E, 'M', u'ð'),
+ (0x1D9F, 'M', u'ɜ'),
+ (0x1DA0, 'M', u'f'),
+ (0x1DA1, 'M', u'ɟ'),
+ (0x1DA2, 'M', u'ɡ'),
+ (0x1DA3, 'M', u'ɥ'),
+ (0x1DA4, 'M', u'ɨ'),
+ (0x1DA5, 'M', u'ɩ'),
+ (0x1DA6, 'M', u'ɪ'),
+ (0x1DA7, 'M', u'ᵻ'),
+ (0x1DA8, 'M', u'ʝ'),
+ (0x1DA9, 'M', u'ɭ'),
+ (0x1DAA, 'M', u'ᶅ'),
+ (0x1DAB, 'M', u'ʟ'),
+ (0x1DAC, 'M', u'ɱ'),
+ (0x1DAD, 'M', u'ɰ'),
+ (0x1DAE, 'M', u'ɲ'),
+ (0x1DAF, 'M', u'ɳ'),
+ (0x1DB0, 'M', u'ɴ'),
+ (0x1DB1, 'M', u'ɵ'),
+ (0x1DB2, 'M', u'ɸ'),
+ (0x1DB3, 'M', u'ʂ'),
+ (0x1DB4, 'M', u'ʃ'),
+ (0x1DB5, 'M', u'ƫ'),
+ (0x1DB6, 'M', u'ʉ'),
+ (0x1DB7, 'M', u'ʊ'),
+ (0x1DB8, 'M', u'ᴜ'),
+ (0x1DB9, 'M', u'ʋ'),
+ (0x1DBA, 'M', u'ʌ'),
+ (0x1DBB, 'M', u'z'),
+ (0x1DBC, 'M', u'ʐ'),
+ (0x1DBD, 'M', u'ʑ'),
+ (0x1DBE, 'M', u'ʒ'),
+ (0x1DBF, 'M', u'θ'),
+ (0x1DC0, 'V'),
+ (0x1DFA, 'X'),
+ (0x1DFB, 'V'),
+ (0x1E00, 'M', u'ḁ'),
+ (0x1E01, 'V'),
+ (0x1E02, 'M', u'ḃ'),
+ (0x1E03, 'V'),
+ (0x1E04, 'M', u'ḅ'),
+ (0x1E05, 'V'),
+ (0x1E06, 'M', u'ḇ'),
+ (0x1E07, 'V'),
+ (0x1E08, 'M', u'ḉ'),
+ (0x1E09, 'V'),
+ (0x1E0A, 'M', u'ḋ'),
+ (0x1E0B, 'V'),
+ (0x1E0C, 'M', u'ḍ'),
+ (0x1E0D, 'V'),
+ (0x1E0E, 'M', u'ḏ'),
+ (0x1E0F, 'V'),
+ (0x1E10, 'M', u'ḑ'),
+ (0x1E11, 'V'),
+ (0x1E12, 'M', u'ḓ'),
+ (0x1E13, 'V'),
+ (0x1E14, 'M', u'ḕ'),
+ (0x1E15, 'V'),
+ (0x1E16, 'M', u'ḗ'),
+ (0x1E17, 'V'),
+ (0x1E18, 'M', u'ḙ'),
+ (0x1E19, 'V'),
+ (0x1E1A, 'M', u'ḛ'),
+ (0x1E1B, 'V'),
+ (0x1E1C, 'M', u'ḝ'),
+ (0x1E1D, 'V'),
+ (0x1E1E, 'M', u'ḟ'),
+ (0x1E1F, 'V'),
+ (0x1E20, 'M', u'ḡ'),
+ ]
+
+def _seg_17():
+ return [
+ (0x1E21, 'V'),
+ (0x1E22, 'M', u'ḣ'),
+ (0x1E23, 'V'),
+ (0x1E24, 'M', u'ḥ'),
+ (0x1E25, 'V'),
+ (0x1E26, 'M', u'ḧ'),
+ (0x1E27, 'V'),
+ (0x1E28, 'M', u'ḩ'),
+ (0x1E29, 'V'),
+ (0x1E2A, 'M', u'ḫ'),
+ (0x1E2B, 'V'),
+ (0x1E2C, 'M', u'ḭ'),
+ (0x1E2D, 'V'),
+ (0x1E2E, 'M', u'ḯ'),
+ (0x1E2F, 'V'),
+ (0x1E30, 'M', u'ḱ'),
+ (0x1E31, 'V'),
+ (0x1E32, 'M', u'ḳ'),
+ (0x1E33, 'V'),
+ (0x1E34, 'M', u'ḵ'),
+ (0x1E35, 'V'),
+ (0x1E36, 'M', u'ḷ'),
+ (0x1E37, 'V'),
+ (0x1E38, 'M', u'ḹ'),
+ (0x1E39, 'V'),
+ (0x1E3A, 'M', u'ḻ'),
+ (0x1E3B, 'V'),
+ (0x1E3C, 'M', u'ḽ'),
+ (0x1E3D, 'V'),
+ (0x1E3E, 'M', u'ḿ'),
+ (0x1E3F, 'V'),
+ (0x1E40, 'M', u'ṁ'),
+ (0x1E41, 'V'),
+ (0x1E42, 'M', u'ṃ'),
+ (0x1E43, 'V'),
+ (0x1E44, 'M', u'ṅ'),
+ (0x1E45, 'V'),
+ (0x1E46, 'M', u'ṇ'),
+ (0x1E47, 'V'),
+ (0x1E48, 'M', u'ṉ'),
+ (0x1E49, 'V'),
+ (0x1E4A, 'M', u'ṋ'),
+ (0x1E4B, 'V'),
+ (0x1E4C, 'M', u'ṍ'),
+ (0x1E4D, 'V'),
+ (0x1E4E, 'M', u'ṏ'),
+ (0x1E4F, 'V'),
+ (0x1E50, 'M', u'ṑ'),
+ (0x1E51, 'V'),
+ (0x1E52, 'M', u'ṓ'),
+ (0x1E53, 'V'),
+ (0x1E54, 'M', u'ṕ'),
+ (0x1E55, 'V'),
+ (0x1E56, 'M', u'ṗ'),
+ (0x1E57, 'V'),
+ (0x1E58, 'M', u'ṙ'),
+ (0x1E59, 'V'),
+ (0x1E5A, 'M', u'ṛ'),
+ (0x1E5B, 'V'),
+ (0x1E5C, 'M', u'ṝ'),
+ (0x1E5D, 'V'),
+ (0x1E5E, 'M', u'ṟ'),
+ (0x1E5F, 'V'),
+ (0x1E60, 'M', u'ṡ'),
+ (0x1E61, 'V'),
+ (0x1E62, 'M', u'ṣ'),
+ (0x1E63, 'V'),
+ (0x1E64, 'M', u'ṥ'),
+ (0x1E65, 'V'),
+ (0x1E66, 'M', u'ṧ'),
+ (0x1E67, 'V'),
+ (0x1E68, 'M', u'ṩ'),
+ (0x1E69, 'V'),
+ (0x1E6A, 'M', u'ṫ'),
+ (0x1E6B, 'V'),
+ (0x1E6C, 'M', u'ṭ'),
+ (0x1E6D, 'V'),
+ (0x1E6E, 'M', u'ṯ'),
+ (0x1E6F, 'V'),
+ (0x1E70, 'M', u'ṱ'),
+ (0x1E71, 'V'),
+ (0x1E72, 'M', u'ṳ'),
+ (0x1E73, 'V'),
+ (0x1E74, 'M', u'ṵ'),
+ (0x1E75, 'V'),
+ (0x1E76, 'M', u'ṷ'),
+ (0x1E77, 'V'),
+ (0x1E78, 'M', u'ṹ'),
+ (0x1E79, 'V'),
+ (0x1E7A, 'M', u'ṻ'),
+ (0x1E7B, 'V'),
+ (0x1E7C, 'M', u'ṽ'),
+ (0x1E7D, 'V'),
+ (0x1E7E, 'M', u'ṿ'),
+ (0x1E7F, 'V'),
+ (0x1E80, 'M', u'ẁ'),
+ (0x1E81, 'V'),
+ (0x1E82, 'M', u'ẃ'),
+ (0x1E83, 'V'),
+ (0x1E84, 'M', u'ẅ'),
+ ]
+
+def _seg_18():
+ return [
+ (0x1E85, 'V'),
+ (0x1E86, 'M', u'ẇ'),
+ (0x1E87, 'V'),
+ (0x1E88, 'M', u'ẉ'),
+ (0x1E89, 'V'),
+ (0x1E8A, 'M', u'ẋ'),
+ (0x1E8B, 'V'),
+ (0x1E8C, 'M', u'ẍ'),
+ (0x1E8D, 'V'),
+ (0x1E8E, 'M', u'ẏ'),
+ (0x1E8F, 'V'),
+ (0x1E90, 'M', u'ẑ'),
+ (0x1E91, 'V'),
+ (0x1E92, 'M', u'ẓ'),
+ (0x1E93, 'V'),
+ (0x1E94, 'M', u'ẕ'),
+ (0x1E95, 'V'),
+ (0x1E9A, 'M', u'aʾ'),
+ (0x1E9B, 'M', u'ṡ'),
+ (0x1E9C, 'V'),
+ (0x1E9E, 'M', u'ss'),
+ (0x1E9F, 'V'),
+ (0x1EA0, 'M', u'ạ'),
+ (0x1EA1, 'V'),
+ (0x1EA2, 'M', u'ả'),
+ (0x1EA3, 'V'),
+ (0x1EA4, 'M', u'ấ'),
+ (0x1EA5, 'V'),
+ (0x1EA6, 'M', u'ầ'),
+ (0x1EA7, 'V'),
+ (0x1EA8, 'M', u'ẩ'),
+ (0x1EA9, 'V'),
+ (0x1EAA, 'M', u'ẫ'),
+ (0x1EAB, 'V'),
+ (0x1EAC, 'M', u'ậ'),
+ (0x1EAD, 'V'),
+ (0x1EAE, 'M', u'ắ'),
+ (0x1EAF, 'V'),
+ (0x1EB0, 'M', u'ằ'),
+ (0x1EB1, 'V'),
+ (0x1EB2, 'M', u'ẳ'),
+ (0x1EB3, 'V'),
+ (0x1EB4, 'M', u'ẵ'),
+ (0x1EB5, 'V'),
+ (0x1EB6, 'M', u'ặ'),
+ (0x1EB7, 'V'),
+ (0x1EB8, 'M', u'ẹ'),
+ (0x1EB9, 'V'),
+ (0x1EBA, 'M', u'ẻ'),
+ (0x1EBB, 'V'),
+ (0x1EBC, 'M', u'ẽ'),
+ (0x1EBD, 'V'),
+ (0x1EBE, 'M', u'ế'),
+ (0x1EBF, 'V'),
+ (0x1EC0, 'M', u'ề'),
+ (0x1EC1, 'V'),
+ (0x1EC2, 'M', u'ể'),
+ (0x1EC3, 'V'),
+ (0x1EC4, 'M', u'ễ'),
+ (0x1EC5, 'V'),
+ (0x1EC6, 'M', u'ệ'),
+ (0x1EC7, 'V'),
+ (0x1EC8, 'M', u'ỉ'),
+ (0x1EC9, 'V'),
+ (0x1ECA, 'M', u'ị'),
+ (0x1ECB, 'V'),
+ (0x1ECC, 'M', u'ọ'),
+ (0x1ECD, 'V'),
+ (0x1ECE, 'M', u'ỏ'),
+ (0x1ECF, 'V'),
+ (0x1ED0, 'M', u'ố'),
+ (0x1ED1, 'V'),
+ (0x1ED2, 'M', u'ồ'),
+ (0x1ED3, 'V'),
+ (0x1ED4, 'M', u'ổ'),
+ (0x1ED5, 'V'),
+ (0x1ED6, 'M', u'ỗ'),
+ (0x1ED7, 'V'),
+ (0x1ED8, 'M', u'ộ'),
+ (0x1ED9, 'V'),
+ (0x1EDA, 'M', u'ớ'),
+ (0x1EDB, 'V'),
+ (0x1EDC, 'M', u'ờ'),
+ (0x1EDD, 'V'),
+ (0x1EDE, 'M', u'ở'),
+ (0x1EDF, 'V'),
+ (0x1EE0, 'M', u'ỡ'),
+ (0x1EE1, 'V'),
+ (0x1EE2, 'M', u'ợ'),
+ (0x1EE3, 'V'),
+ (0x1EE4, 'M', u'ụ'),
+ (0x1EE5, 'V'),
+ (0x1EE6, 'M', u'ủ'),
+ (0x1EE7, 'V'),
+ (0x1EE8, 'M', u'ứ'),
+ (0x1EE9, 'V'),
+ (0x1EEA, 'M', u'ừ'),
+ (0x1EEB, 'V'),
+ (0x1EEC, 'M', u'ử'),
+ (0x1EED, 'V'),
+ ]
+
+def _seg_19():
+ return [
+ (0x1EEE, 'M', u'ữ'),
+ (0x1EEF, 'V'),
+ (0x1EF0, 'M', u'ự'),
+ (0x1EF1, 'V'),
+ (0x1EF2, 'M', u'ỳ'),
+ (0x1EF3, 'V'),
+ (0x1EF4, 'M', u'ỵ'),
+ (0x1EF5, 'V'),
+ (0x1EF6, 'M', u'ỷ'),
+ (0x1EF7, 'V'),
+ (0x1EF8, 'M', u'ỹ'),
+ (0x1EF9, 'V'),
+ (0x1EFA, 'M', u'ỻ'),
+ (0x1EFB, 'V'),
+ (0x1EFC, 'M', u'ỽ'),
+ (0x1EFD, 'V'),
+ (0x1EFE, 'M', u'ỿ'),
+ (0x1EFF, 'V'),
+ (0x1F08, 'M', u'ἀ'),
+ (0x1F09, 'M', u'ἁ'),
+ (0x1F0A, 'M', u'ἂ'),
+ (0x1F0B, 'M', u'ἃ'),
+ (0x1F0C, 'M', u'ἄ'),
+ (0x1F0D, 'M', u'ἅ'),
+ (0x1F0E, 'M', u'ἆ'),
+ (0x1F0F, 'M', u'ἇ'),
+ (0x1F10, 'V'),
+ (0x1F16, 'X'),
+ (0x1F18, 'M', u'ἐ'),
+ (0x1F19, 'M', u'ἑ'),
+ (0x1F1A, 'M', u'ἒ'),
+ (0x1F1B, 'M', u'ἓ'),
+ (0x1F1C, 'M', u'ἔ'),
+ (0x1F1D, 'M', u'ἕ'),
+ (0x1F1E, 'X'),
+ (0x1F20, 'V'),
+ (0x1F28, 'M', u'ἠ'),
+ (0x1F29, 'M', u'ἡ'),
+ (0x1F2A, 'M', u'ἢ'),
+ (0x1F2B, 'M', u'ἣ'),
+ (0x1F2C, 'M', u'ἤ'),
+ (0x1F2D, 'M', u'ἥ'),
+ (0x1F2E, 'M', u'ἦ'),
+ (0x1F2F, 'M', u'ἧ'),
+ (0x1F30, 'V'),
+ (0x1F38, 'M', u'ἰ'),
+ (0x1F39, 'M', u'ἱ'),
+ (0x1F3A, 'M', u'ἲ'),
+ (0x1F3B, 'M', u'ἳ'),
+ (0x1F3C, 'M', u'ἴ'),
+ (0x1F3D, 'M', u'ἵ'),
+ (0x1F3E, 'M', u'ἶ'),
+ (0x1F3F, 'M', u'ἷ'),
+ (0x1F40, 'V'),
+ (0x1F46, 'X'),
+ (0x1F48, 'M', u'ὀ'),
+ (0x1F49, 'M', u'ὁ'),
+ (0x1F4A, 'M', u'ὂ'),
+ (0x1F4B, 'M', u'ὃ'),
+ (0x1F4C, 'M', u'ὄ'),
+ (0x1F4D, 'M', u'ὅ'),
+ (0x1F4E, 'X'),
+ (0x1F50, 'V'),
+ (0x1F58, 'X'),
+ (0x1F59, 'M', u'ὑ'),
+ (0x1F5A, 'X'),
+ (0x1F5B, 'M', u'ὓ'),
+ (0x1F5C, 'X'),
+ (0x1F5D, 'M', u'ὕ'),
+ (0x1F5E, 'X'),
+ (0x1F5F, 'M', u'ὗ'),
+ (0x1F60, 'V'),
+ (0x1F68, 'M', u'ὠ'),
+ (0x1F69, 'M', u'ὡ'),
+ (0x1F6A, 'M', u'ὢ'),
+ (0x1F6B, 'M', u'ὣ'),
+ (0x1F6C, 'M', u'ὤ'),
+ (0x1F6D, 'M', u'ὥ'),
+ (0x1F6E, 'M', u'ὦ'),
+ (0x1F6F, 'M', u'ὧ'),
+ (0x1F70, 'V'),
+ (0x1F71, 'M', u'ά'),
+ (0x1F72, 'V'),
+ (0x1F73, 'M', u'έ'),
+ (0x1F74, 'V'),
+ (0x1F75, 'M', u'ή'),
+ (0x1F76, 'V'),
+ (0x1F77, 'M', u'ί'),
+ (0x1F78, 'V'),
+ (0x1F79, 'M', u'ό'),
+ (0x1F7A, 'V'),
+ (0x1F7B, 'M', u'ύ'),
+ (0x1F7C, 'V'),
+ (0x1F7D, 'M', u'ώ'),
+ (0x1F7E, 'X'),
+ (0x1F80, 'M', u'ἀι'),
+ (0x1F81, 'M', u'ἁι'),
+ (0x1F82, 'M', u'ἂι'),
+ (0x1F83, 'M', u'ἃι'),
+ (0x1F84, 'M', u'ἄι'),
+ ]
+
+def _seg_20():
+ return [
+ (0x1F85, 'M', u'ἅι'),
+ (0x1F86, 'M', u'ἆι'),
+ (0x1F87, 'M', u'ἇι'),
+ (0x1F88, 'M', u'ἀι'),
+ (0x1F89, 'M', u'ἁι'),
+ (0x1F8A, 'M', u'ἂι'),
+ (0x1F8B, 'M', u'ἃι'),
+ (0x1F8C, 'M', u'ἄι'),
+ (0x1F8D, 'M', u'ἅι'),
+ (0x1F8E, 'M', u'ἆι'),
+ (0x1F8F, 'M', u'ἇι'),
+ (0x1F90, 'M', u'ἠι'),
+ (0x1F91, 'M', u'ἡι'),
+ (0x1F92, 'M', u'ἢι'),
+ (0x1F93, 'M', u'ἣι'),
+ (0x1F94, 'M', u'ἤι'),
+ (0x1F95, 'M', u'ἥι'),
+ (0x1F96, 'M', u'ἦι'),
+ (0x1F97, 'M', u'ἧι'),
+ (0x1F98, 'M', u'ἠι'),
+ (0x1F99, 'M', u'ἡι'),
+ (0x1F9A, 'M', u'ἢι'),
+ (0x1F9B, 'M', u'ἣι'),
+ (0x1F9C, 'M', u'ἤι'),
+ (0x1F9D, 'M', u'ἥι'),
+ (0x1F9E, 'M', u'ἦι'),
+ (0x1F9F, 'M', u'ἧι'),
+ (0x1FA0, 'M', u'ὠι'),
+ (0x1FA1, 'M', u'ὡι'),
+ (0x1FA2, 'M', u'ὢι'),
+ (0x1FA3, 'M', u'ὣι'),
+ (0x1FA4, 'M', u'ὤι'),
+ (0x1FA5, 'M', u'ὥι'),
+ (0x1FA6, 'M', u'ὦι'),
+ (0x1FA7, 'M', u'ὧι'),
+ (0x1FA8, 'M', u'ὠι'),
+ (0x1FA9, 'M', u'ὡι'),
+ (0x1FAA, 'M', u'ὢι'),
+ (0x1FAB, 'M', u'ὣι'),
+ (0x1FAC, 'M', u'ὤι'),
+ (0x1FAD, 'M', u'ὥι'),
+ (0x1FAE, 'M', u'ὦι'),
+ (0x1FAF, 'M', u'ὧι'),
+ (0x1FB0, 'V'),
+ (0x1FB2, 'M', u'ὰι'),
+ (0x1FB3, 'M', u'αι'),
+ (0x1FB4, 'M', u'άι'),
+ (0x1FB5, 'X'),
+ (0x1FB6, 'V'),
+ (0x1FB7, 'M', u'ᾶι'),
+ (0x1FB8, 'M', u'ᾰ'),
+ (0x1FB9, 'M', u'ᾱ'),
+ (0x1FBA, 'M', u'ὰ'),
+ (0x1FBB, 'M', u'ά'),
+ (0x1FBC, 'M', u'αι'),
+ (0x1FBD, '3', u' ̓'),
+ (0x1FBE, 'M', u'ι'),
+ (0x1FBF, '3', u' ̓'),
+ (0x1FC0, '3', u' ͂'),
+ (0x1FC1, '3', u' ̈͂'),
+ (0x1FC2, 'M', u'ὴι'),
+ (0x1FC3, 'M', u'ηι'),
+ (0x1FC4, 'M', u'ήι'),
+ (0x1FC5, 'X'),
+ (0x1FC6, 'V'),
+ (0x1FC7, 'M', u'ῆι'),
+ (0x1FC8, 'M', u'ὲ'),
+ (0x1FC9, 'M', u'έ'),
+ (0x1FCA, 'M', u'ὴ'),
+ (0x1FCB, 'M', u'ή'),
+ (0x1FCC, 'M', u'ηι'),
+ (0x1FCD, '3', u' ̓̀'),
+ (0x1FCE, '3', u' ̓́'),
+ (0x1FCF, '3', u' ̓͂'),
+ (0x1FD0, 'V'),
+ (0x1FD3, 'M', u'ΐ'),
+ (0x1FD4, 'X'),
+ (0x1FD6, 'V'),
+ (0x1FD8, 'M', u'ῐ'),
+ (0x1FD9, 'M', u'ῑ'),
+ (0x1FDA, 'M', u'ὶ'),
+ (0x1FDB, 'M', u'ί'),
+ (0x1FDC, 'X'),
+ (0x1FDD, '3', u' ̔̀'),
+ (0x1FDE, '3', u' ̔́'),
+ (0x1FDF, '3', u' ̔͂'),
+ (0x1FE0, 'V'),
+ (0x1FE3, 'M', u'ΰ'),
+ (0x1FE4, 'V'),
+ (0x1FE8, 'M', u'ῠ'),
+ (0x1FE9, 'M', u'ῡ'),
+ (0x1FEA, 'M', u'ὺ'),
+ (0x1FEB, 'M', u'ύ'),
+ (0x1FEC, 'M', u'ῥ'),
+ (0x1FED, '3', u' ̈̀'),
+ (0x1FEE, '3', u' ̈́'),
+ (0x1FEF, '3', u'`'),
+ (0x1FF0, 'X'),
+ (0x1FF2, 'M', u'ὼι'),
+ (0x1FF3, 'M', u'ωι'),
+ ]
+
+def _seg_21():
+ return [
+ (0x1FF4, 'M', u'ώι'),
+ (0x1FF5, 'X'),
+ (0x1FF6, 'V'),
+ (0x1FF7, 'M', u'ῶι'),
+ (0x1FF8, 'M', u'ὸ'),
+ (0x1FF9, 'M', u'ό'),
+ (0x1FFA, 'M', u'ὼ'),
+ (0x1FFB, 'M', u'ώ'),
+ (0x1FFC, 'M', u'ωι'),
+ (0x1FFD, '3', u' ́'),
+ (0x1FFE, '3', u' ̔'),
+ (0x1FFF, 'X'),
+ (0x2000, '3', u' '),
+ (0x200B, 'I'),
+ (0x200C, 'D', u''),
+ (0x200E, 'X'),
+ (0x2010, 'V'),
+ (0x2011, 'M', u'‐'),
+ (0x2012, 'V'),
+ (0x2017, '3', u' ̳'),
+ (0x2018, 'V'),
+ (0x2024, 'X'),
+ (0x2027, 'V'),
+ (0x2028, 'X'),
+ (0x202F, '3', u' '),
+ (0x2030, 'V'),
+ (0x2033, 'M', u'′′'),
+ (0x2034, 'M', u'′′′'),
+ (0x2035, 'V'),
+ (0x2036, 'M', u'‵‵'),
+ (0x2037, 'M', u'‵‵‵'),
+ (0x2038, 'V'),
+ (0x203C, '3', u'!!'),
+ (0x203D, 'V'),
+ (0x203E, '3', u' ̅'),
+ (0x203F, 'V'),
+ (0x2047, '3', u'??'),
+ (0x2048, '3', u'?!'),
+ (0x2049, '3', u'!?'),
+ (0x204A, 'V'),
+ (0x2057, 'M', u'′′′′'),
+ (0x2058, 'V'),
+ (0x205F, '3', u' '),
+ (0x2060, 'I'),
+ (0x2061, 'X'),
+ (0x2064, 'I'),
+ (0x2065, 'X'),
+ (0x2070, 'M', u'0'),
+ (0x2071, 'M', u'i'),
+ (0x2072, 'X'),
+ (0x2074, 'M', u'4'),
+ (0x2075, 'M', u'5'),
+ (0x2076, 'M', u'6'),
+ (0x2077, 'M', u'7'),
+ (0x2078, 'M', u'8'),
+ (0x2079, 'M', u'9'),
+ (0x207A, '3', u'+'),
+ (0x207B, 'M', u'−'),
+ (0x207C, '3', u'='),
+ (0x207D, '3', u'('),
+ (0x207E, '3', u')'),
+ (0x207F, 'M', u'n'),
+ (0x2080, 'M', u'0'),
+ (0x2081, 'M', u'1'),
+ (0x2082, 'M', u'2'),
+ (0x2083, 'M', u'3'),
+ (0x2084, 'M', u'4'),
+ (0x2085, 'M', u'5'),
+ (0x2086, 'M', u'6'),
+ (0x2087, 'M', u'7'),
+ (0x2088, 'M', u'8'),
+ (0x2089, 'M', u'9'),
+ (0x208A, '3', u'+'),
+ (0x208B, 'M', u'−'),
+ (0x208C, '3', u'='),
+ (0x208D, '3', u'('),
+ (0x208E, '3', u')'),
+ (0x208F, 'X'),
+ (0x2090, 'M', u'a'),
+ (0x2091, 'M', u'e'),
+ (0x2092, 'M', u'o'),
+ (0x2093, 'M', u'x'),
+ (0x2094, 'M', u'ə'),
+ (0x2095, 'M', u'h'),
+ (0x2096, 'M', u'k'),
+ (0x2097, 'M', u'l'),
+ (0x2098, 'M', u'm'),
+ (0x2099, 'M', u'n'),
+ (0x209A, 'M', u'p'),
+ (0x209B, 'M', u's'),
+ (0x209C, 'M', u't'),
+ (0x209D, 'X'),
+ (0x20A0, 'V'),
+ (0x20A8, 'M', u'rs'),
+ (0x20A9, 'V'),
+ (0x20C0, 'X'),
+ (0x20D0, 'V'),
+ (0x20F1, 'X'),
+ (0x2100, '3', u'a/c'),
+ (0x2101, '3', u'a/s'),
+ ]
+
+def _seg_22():
+ return [
+ (0x2102, 'M', u'c'),
+ (0x2103, 'M', u'°c'),
+ (0x2104, 'V'),
+ (0x2105, '3', u'c/o'),
+ (0x2106, '3', u'c/u'),
+ (0x2107, 'M', u'ɛ'),
+ (0x2108, 'V'),
+ (0x2109, 'M', u'°f'),
+ (0x210A, 'M', u'g'),
+ (0x210B, 'M', u'h'),
+ (0x210F, 'M', u'ħ'),
+ (0x2110, 'M', u'i'),
+ (0x2112, 'M', u'l'),
+ (0x2114, 'V'),
+ (0x2115, 'M', u'n'),
+ (0x2116, 'M', u'no'),
+ (0x2117, 'V'),
+ (0x2119, 'M', u'p'),
+ (0x211A, 'M', u'q'),
+ (0x211B, 'M', u'r'),
+ (0x211E, 'V'),
+ (0x2120, 'M', u'sm'),
+ (0x2121, 'M', u'tel'),
+ (0x2122, 'M', u'tm'),
+ (0x2123, 'V'),
+ (0x2124, 'M', u'z'),
+ (0x2125, 'V'),
+ (0x2126, 'M', u'ω'),
+ (0x2127, 'V'),
+ (0x2128, 'M', u'z'),
+ (0x2129, 'V'),
+ (0x212A, 'M', u'k'),
+ (0x212B, 'M', u'å'),
+ (0x212C, 'M', u'b'),
+ (0x212D, 'M', u'c'),
+ (0x212E, 'V'),
+ (0x212F, 'M', u'e'),
+ (0x2131, 'M', u'f'),
+ (0x2132, 'X'),
+ (0x2133, 'M', u'm'),
+ (0x2134, 'M', u'o'),
+ (0x2135, 'M', u'א'),
+ (0x2136, 'M', u'ב'),
+ (0x2137, 'M', u'ג'),
+ (0x2138, 'M', u'ד'),
+ (0x2139, 'M', u'i'),
+ (0x213A, 'V'),
+ (0x213B, 'M', u'fax'),
+ (0x213C, 'M', u'π'),
+ (0x213D, 'M', u'γ'),
+ (0x213F, 'M', u'π'),
+ (0x2140, 'M', u'∑'),
+ (0x2141, 'V'),
+ (0x2145, 'M', u'd'),
+ (0x2147, 'M', u'e'),
+ (0x2148, 'M', u'i'),
+ (0x2149, 'M', u'j'),
+ (0x214A, 'V'),
+ (0x2150, 'M', u'1⁄7'),
+ (0x2151, 'M', u'1⁄9'),
+ (0x2152, 'M', u'1⁄10'),
+ (0x2153, 'M', u'1⁄3'),
+ (0x2154, 'M', u'2⁄3'),
+ (0x2155, 'M', u'1⁄5'),
+ (0x2156, 'M', u'2⁄5'),
+ (0x2157, 'M', u'3⁄5'),
+ (0x2158, 'M', u'4⁄5'),
+ (0x2159, 'M', u'1⁄6'),
+ (0x215A, 'M', u'5⁄6'),
+ (0x215B, 'M', u'1⁄8'),
+ (0x215C, 'M', u'3⁄8'),
+ (0x215D, 'M', u'5⁄8'),
+ (0x215E, 'M', u'7⁄8'),
+ (0x215F, 'M', u'1⁄'),
+ (0x2160, 'M', u'i'),
+ (0x2161, 'M', u'ii'),
+ (0x2162, 'M', u'iii'),
+ (0x2163, 'M', u'iv'),
+ (0x2164, 'M', u'v'),
+ (0x2165, 'M', u'vi'),
+ (0x2166, 'M', u'vii'),
+ (0x2167, 'M', u'viii'),
+ (0x2168, 'M', u'ix'),
+ (0x2169, 'M', u'x'),
+ (0x216A, 'M', u'xi'),
+ (0x216B, 'M', u'xii'),
+ (0x216C, 'M', u'l'),
+ (0x216D, 'M', u'c'),
+ (0x216E, 'M', u'd'),
+ (0x216F, 'M', u'm'),
+ (0x2170, 'M', u'i'),
+ (0x2171, 'M', u'ii'),
+ (0x2172, 'M', u'iii'),
+ (0x2173, 'M', u'iv'),
+ (0x2174, 'M', u'v'),
+ (0x2175, 'M', u'vi'),
+ (0x2176, 'M', u'vii'),
+ (0x2177, 'M', u'viii'),
+ (0x2178, 'M', u'ix'),
+ (0x2179, 'M', u'x'),
+ ]
+
+def _seg_23():
+ return [
+ (0x217A, 'M', u'xi'),
+ (0x217B, 'M', u'xii'),
+ (0x217C, 'M', u'l'),
+ (0x217D, 'M', u'c'),
+ (0x217E, 'M', u'd'),
+ (0x217F, 'M', u'm'),
+ (0x2180, 'V'),
+ (0x2183, 'X'),
+ (0x2184, 'V'),
+ (0x2189, 'M', u'0⁄3'),
+ (0x218A, 'V'),
+ (0x218C, 'X'),
+ (0x2190, 'V'),
+ (0x222C, 'M', u'∫∫'),
+ (0x222D, 'M', u'∫∫∫'),
+ (0x222E, 'V'),
+ (0x222F, 'M', u'∮∮'),
+ (0x2230, 'M', u'∮∮∮'),
+ (0x2231, 'V'),
+ (0x2260, '3'),
+ (0x2261, 'V'),
+ (0x226E, '3'),
+ (0x2270, 'V'),
+ (0x2329, 'M', u'〈'),
+ (0x232A, 'M', u'〉'),
+ (0x232B, 'V'),
+ (0x2427, 'X'),
+ (0x2440, 'V'),
+ (0x244B, 'X'),
+ (0x2460, 'M', u'1'),
+ (0x2461, 'M', u'2'),
+ (0x2462, 'M', u'3'),
+ (0x2463, 'M', u'4'),
+ (0x2464, 'M', u'5'),
+ (0x2465, 'M', u'6'),
+ (0x2466, 'M', u'7'),
+ (0x2467, 'M', u'8'),
+ (0x2468, 'M', u'9'),
+ (0x2469, 'M', u'10'),
+ (0x246A, 'M', u'11'),
+ (0x246B, 'M', u'12'),
+ (0x246C, 'M', u'13'),
+ (0x246D, 'M', u'14'),
+ (0x246E, 'M', u'15'),
+ (0x246F, 'M', u'16'),
+ (0x2470, 'M', u'17'),
+ (0x2471, 'M', u'18'),
+ (0x2472, 'M', u'19'),
+ (0x2473, 'M', u'20'),
+ (0x2474, '3', u'(1)'),
+ (0x2475, '3', u'(2)'),
+ (0x2476, '3', u'(3)'),
+ (0x2477, '3', u'(4)'),
+ (0x2478, '3', u'(5)'),
+ (0x2479, '3', u'(6)'),
+ (0x247A, '3', u'(7)'),
+ (0x247B, '3', u'(8)'),
+ (0x247C, '3', u'(9)'),
+ (0x247D, '3', u'(10)'),
+ (0x247E, '3', u'(11)'),
+ (0x247F, '3', u'(12)'),
+ (0x2480, '3', u'(13)'),
+ (0x2481, '3', u'(14)'),
+ (0x2482, '3', u'(15)'),
+ (0x2483, '3', u'(16)'),
+ (0x2484, '3', u'(17)'),
+ (0x2485, '3', u'(18)'),
+ (0x2486, '3', u'(19)'),
+ (0x2487, '3', u'(20)'),
+ (0x2488, 'X'),
+ (0x249C, '3', u'(a)'),
+ (0x249D, '3', u'(b)'),
+ (0x249E, '3', u'(c)'),
+ (0x249F, '3', u'(d)'),
+ (0x24A0, '3', u'(e)'),
+ (0x24A1, '3', u'(f)'),
+ (0x24A2, '3', u'(g)'),
+ (0x24A3, '3', u'(h)'),
+ (0x24A4, '3', u'(i)'),
+ (0x24A5, '3', u'(j)'),
+ (0x24A6, '3', u'(k)'),
+ (0x24A7, '3', u'(l)'),
+ (0x24A8, '3', u'(m)'),
+ (0x24A9, '3', u'(n)'),
+ (0x24AA, '3', u'(o)'),
+ (0x24AB, '3', u'(p)'),
+ (0x24AC, '3', u'(q)'),
+ (0x24AD, '3', u'(r)'),
+ (0x24AE, '3', u'(s)'),
+ (0x24AF, '3', u'(t)'),
+ (0x24B0, '3', u'(u)'),
+ (0x24B1, '3', u'(v)'),
+ (0x24B2, '3', u'(w)'),
+ (0x24B3, '3', u'(x)'),
+ (0x24B4, '3', u'(y)'),
+ (0x24B5, '3', u'(z)'),
+ (0x24B6, 'M', u'a'),
+ (0x24B7, 'M', u'b'),
+ (0x24B8, 'M', u'c'),
+ (0x24B9, 'M', u'd'),
+ ]
+
+def _seg_24():
+ return [
+ (0x24BA, 'M', u'e'),
+ (0x24BB, 'M', u'f'),
+ (0x24BC, 'M', u'g'),
+ (0x24BD, 'M', u'h'),
+ (0x24BE, 'M', u'i'),
+ (0x24BF, 'M', u'j'),
+ (0x24C0, 'M', u'k'),
+ (0x24C1, 'M', u'l'),
+ (0x24C2, 'M', u'm'),
+ (0x24C3, 'M', u'n'),
+ (0x24C4, 'M', u'o'),
+ (0x24C5, 'M', u'p'),
+ (0x24C6, 'M', u'q'),
+ (0x24C7, 'M', u'r'),
+ (0x24C8, 'M', u's'),
+ (0x24C9, 'M', u't'),
+ (0x24CA, 'M', u'u'),
+ (0x24CB, 'M', u'v'),
+ (0x24CC, 'M', u'w'),
+ (0x24CD, 'M', u'x'),
+ (0x24CE, 'M', u'y'),
+ (0x24CF, 'M', u'z'),
+ (0x24D0, 'M', u'a'),
+ (0x24D1, 'M', u'b'),
+ (0x24D2, 'M', u'c'),
+ (0x24D3, 'M', u'd'),
+ (0x24D4, 'M', u'e'),
+ (0x24D5, 'M', u'f'),
+ (0x24D6, 'M', u'g'),
+ (0x24D7, 'M', u'h'),
+ (0x24D8, 'M', u'i'),
+ (0x24D9, 'M', u'j'),
+ (0x24DA, 'M', u'k'),
+ (0x24DB, 'M', u'l'),
+ (0x24DC, 'M', u'm'),
+ (0x24DD, 'M', u'n'),
+ (0x24DE, 'M', u'o'),
+ (0x24DF, 'M', u'p'),
+ (0x24E0, 'M', u'q'),
+ (0x24E1, 'M', u'r'),
+ (0x24E2, 'M', u's'),
+ (0x24E3, 'M', u't'),
+ (0x24E4, 'M', u'u'),
+ (0x24E5, 'M', u'v'),
+ (0x24E6, 'M', u'w'),
+ (0x24E7, 'M', u'x'),
+ (0x24E8, 'M', u'y'),
+ (0x24E9, 'M', u'z'),
+ (0x24EA, 'M', u'0'),
+ (0x24EB, 'V'),
+ (0x2A0C, 'M', u'∫∫∫∫'),
+ (0x2A0D, 'V'),
+ (0x2A74, '3', u'::='),
+ (0x2A75, '3', u'=='),
+ (0x2A76, '3', u'==='),
+ (0x2A77, 'V'),
+ (0x2ADC, 'M', u'⫝̸'),
+ (0x2ADD, 'V'),
+ (0x2B74, 'X'),
+ (0x2B76, 'V'),
+ (0x2B96, 'X'),
+ (0x2B97, 'V'),
+ (0x2C00, 'M', u'ⰰ'),
+ (0x2C01, 'M', u'ⰱ'),
+ (0x2C02, 'M', u'ⰲ'),
+ (0x2C03, 'M', u'ⰳ'),
+ (0x2C04, 'M', u'ⰴ'),
+ (0x2C05, 'M', u'ⰵ'),
+ (0x2C06, 'M', u'ⰶ'),
+ (0x2C07, 'M', u'ⰷ'),
+ (0x2C08, 'M', u'ⰸ'),
+ (0x2C09, 'M', u'ⰹ'),
+ (0x2C0A, 'M', u'ⰺ'),
+ (0x2C0B, 'M', u'ⰻ'),
+ (0x2C0C, 'M', u'ⰼ'),
+ (0x2C0D, 'M', u'ⰽ'),
+ (0x2C0E, 'M', u'ⰾ'),
+ (0x2C0F, 'M', u'ⰿ'),
+ (0x2C10, 'M', u'ⱀ'),
+ (0x2C11, 'M', u'ⱁ'),
+ (0x2C12, 'M', u'ⱂ'),
+ (0x2C13, 'M', u'ⱃ'),
+ (0x2C14, 'M', u'ⱄ'),
+ (0x2C15, 'M', u'ⱅ'),
+ (0x2C16, 'M', u'ⱆ'),
+ (0x2C17, 'M', u'ⱇ'),
+ (0x2C18, 'M', u'ⱈ'),
+ (0x2C19, 'M', u'ⱉ'),
+ (0x2C1A, 'M', u'ⱊ'),
+ (0x2C1B, 'M', u'ⱋ'),
+ (0x2C1C, 'M', u'ⱌ'),
+ (0x2C1D, 'M', u'ⱍ'),
+ (0x2C1E, 'M', u'ⱎ'),
+ (0x2C1F, 'M', u'ⱏ'),
+ (0x2C20, 'M', u'ⱐ'),
+ (0x2C21, 'M', u'ⱑ'),
+ (0x2C22, 'M', u'ⱒ'),
+ (0x2C23, 'M', u'ⱓ'),
+ (0x2C24, 'M', u'ⱔ'),
+ (0x2C25, 'M', u'ⱕ'),
+ ]
+
+def _seg_25():
+ return [
+ (0x2C26, 'M', u'ⱖ'),
+ (0x2C27, 'M', u'ⱗ'),
+ (0x2C28, 'M', u'ⱘ'),
+ (0x2C29, 'M', u'ⱙ'),
+ (0x2C2A, 'M', u'ⱚ'),
+ (0x2C2B, 'M', u'ⱛ'),
+ (0x2C2C, 'M', u'ⱜ'),
+ (0x2C2D, 'M', u'ⱝ'),
+ (0x2C2E, 'M', u'ⱞ'),
+ (0x2C2F, 'X'),
+ (0x2C30, 'V'),
+ (0x2C5F, 'X'),
+ (0x2C60, 'M', u'ⱡ'),
+ (0x2C61, 'V'),
+ (0x2C62, 'M', u'ɫ'),
+ (0x2C63, 'M', u'ᵽ'),
+ (0x2C64, 'M', u'ɽ'),
+ (0x2C65, 'V'),
+ (0x2C67, 'M', u'ⱨ'),
+ (0x2C68, 'V'),
+ (0x2C69, 'M', u'ⱪ'),
+ (0x2C6A, 'V'),
+ (0x2C6B, 'M', u'ⱬ'),
+ (0x2C6C, 'V'),
+ (0x2C6D, 'M', u'ɑ'),
+ (0x2C6E, 'M', u'ɱ'),
+ (0x2C6F, 'M', u'ɐ'),
+ (0x2C70, 'M', u'ɒ'),
+ (0x2C71, 'V'),
+ (0x2C72, 'M', u'ⱳ'),
+ (0x2C73, 'V'),
+ (0x2C75, 'M', u'ⱶ'),
+ (0x2C76, 'V'),
+ (0x2C7C, 'M', u'j'),
+ (0x2C7D, 'M', u'v'),
+ (0x2C7E, 'M', u'ȿ'),
+ (0x2C7F, 'M', u'ɀ'),
+ (0x2C80, 'M', u'ⲁ'),
+ (0x2C81, 'V'),
+ (0x2C82, 'M', u'ⲃ'),
+ (0x2C83, 'V'),
+ (0x2C84, 'M', u'ⲅ'),
+ (0x2C85, 'V'),
+ (0x2C86, 'M', u'ⲇ'),
+ (0x2C87, 'V'),
+ (0x2C88, 'M', u'ⲉ'),
+ (0x2C89, 'V'),
+ (0x2C8A, 'M', u'ⲋ'),
+ (0x2C8B, 'V'),
+ (0x2C8C, 'M', u'ⲍ'),
+ (0x2C8D, 'V'),
+ (0x2C8E, 'M', u'ⲏ'),
+ (0x2C8F, 'V'),
+ (0x2C90, 'M', u'ⲑ'),
+ (0x2C91, 'V'),
+ (0x2C92, 'M', u'ⲓ'),
+ (0x2C93, 'V'),
+ (0x2C94, 'M', u'ⲕ'),
+ (0x2C95, 'V'),
+ (0x2C96, 'M', u'ⲗ'),
+ (0x2C97, 'V'),
+ (0x2C98, 'M', u'ⲙ'),
+ (0x2C99, 'V'),
+ (0x2C9A, 'M', u'ⲛ'),
+ (0x2C9B, 'V'),
+ (0x2C9C, 'M', u'ⲝ'),
+ (0x2C9D, 'V'),
+ (0x2C9E, 'M', u'ⲟ'),
+ (0x2C9F, 'V'),
+ (0x2CA0, 'M', u'ⲡ'),
+ (0x2CA1, 'V'),
+ (0x2CA2, 'M', u'ⲣ'),
+ (0x2CA3, 'V'),
+ (0x2CA4, 'M', u'ⲥ'),
+ (0x2CA5, 'V'),
+ (0x2CA6, 'M', u'ⲧ'),
+ (0x2CA7, 'V'),
+ (0x2CA8, 'M', u'ⲩ'),
+ (0x2CA9, 'V'),
+ (0x2CAA, 'M', u'ⲫ'),
+ (0x2CAB, 'V'),
+ (0x2CAC, 'M', u'ⲭ'),
+ (0x2CAD, 'V'),
+ (0x2CAE, 'M', u'ⲯ'),
+ (0x2CAF, 'V'),
+ (0x2CB0, 'M', u'ⲱ'),
+ (0x2CB1, 'V'),
+ (0x2CB2, 'M', u'ⲳ'),
+ (0x2CB3, 'V'),
+ (0x2CB4, 'M', u'ⲵ'),
+ (0x2CB5, 'V'),
+ (0x2CB6, 'M', u'ⲷ'),
+ (0x2CB7, 'V'),
+ (0x2CB8, 'M', u'ⲹ'),
+ (0x2CB9, 'V'),
+ (0x2CBA, 'M', u'ⲻ'),
+ (0x2CBB, 'V'),
+ (0x2CBC, 'M', u'ⲽ'),
+ (0x2CBD, 'V'),
+ (0x2CBE, 'M', u'ⲿ'),
+ ]
+
+def _seg_26():
+ return [
+ (0x2CBF, 'V'),
+ (0x2CC0, 'M', u'ⳁ'),
+ (0x2CC1, 'V'),
+ (0x2CC2, 'M', u'ⳃ'),
+ (0x2CC3, 'V'),
+ (0x2CC4, 'M', u'ⳅ'),
+ (0x2CC5, 'V'),
+ (0x2CC6, 'M', u'ⳇ'),
+ (0x2CC7, 'V'),
+ (0x2CC8, 'M', u'ⳉ'),
+ (0x2CC9, 'V'),
+ (0x2CCA, 'M', u'ⳋ'),
+ (0x2CCB, 'V'),
+ (0x2CCC, 'M', u'ⳍ'),
+ (0x2CCD, 'V'),
+ (0x2CCE, 'M', u'ⳏ'),
+ (0x2CCF, 'V'),
+ (0x2CD0, 'M', u'ⳑ'),
+ (0x2CD1, 'V'),
+ (0x2CD2, 'M', u'ⳓ'),
+ (0x2CD3, 'V'),
+ (0x2CD4, 'M', u'ⳕ'),
+ (0x2CD5, 'V'),
+ (0x2CD6, 'M', u'ⳗ'),
+ (0x2CD7, 'V'),
+ (0x2CD8, 'M', u'ⳙ'),
+ (0x2CD9, 'V'),
+ (0x2CDA, 'M', u'ⳛ'),
+ (0x2CDB, 'V'),
+ (0x2CDC, 'M', u'ⳝ'),
+ (0x2CDD, 'V'),
+ (0x2CDE, 'M', u'ⳟ'),
+ (0x2CDF, 'V'),
+ (0x2CE0, 'M', u'ⳡ'),
+ (0x2CE1, 'V'),
+ (0x2CE2, 'M', u'ⳣ'),
+ (0x2CE3, 'V'),
+ (0x2CEB, 'M', u'ⳬ'),
+ (0x2CEC, 'V'),
+ (0x2CED, 'M', u'ⳮ'),
+ (0x2CEE, 'V'),
+ (0x2CF2, 'M', u'ⳳ'),
+ (0x2CF3, 'V'),
+ (0x2CF4, 'X'),
+ (0x2CF9, 'V'),
+ (0x2D26, 'X'),
+ (0x2D27, 'V'),
+ (0x2D28, 'X'),
+ (0x2D2D, 'V'),
+ (0x2D2E, 'X'),
+ (0x2D30, 'V'),
+ (0x2D68, 'X'),
+ (0x2D6F, 'M', u'ⵡ'),
+ (0x2D70, 'V'),
+ (0x2D71, 'X'),
+ (0x2D7F, 'V'),
+ (0x2D97, 'X'),
+ (0x2DA0, 'V'),
+ (0x2DA7, 'X'),
+ (0x2DA8, 'V'),
+ (0x2DAF, 'X'),
+ (0x2DB0, 'V'),
+ (0x2DB7, 'X'),
+ (0x2DB8, 'V'),
+ (0x2DBF, 'X'),
+ (0x2DC0, 'V'),
+ (0x2DC7, 'X'),
+ (0x2DC8, 'V'),
+ (0x2DCF, 'X'),
+ (0x2DD0, 'V'),
+ (0x2DD7, 'X'),
+ (0x2DD8, 'V'),
+ (0x2DDF, 'X'),
+ (0x2DE0, 'V'),
+ (0x2E53, 'X'),
+ (0x2E80, 'V'),
+ (0x2E9A, 'X'),
+ (0x2E9B, 'V'),
+ (0x2E9F, 'M', u'母'),
+ (0x2EA0, 'V'),
+ (0x2EF3, 'M', u'龟'),
+ (0x2EF4, 'X'),
+ (0x2F00, 'M', u'一'),
+ (0x2F01, 'M', u'丨'),
+ (0x2F02, 'M', u'丶'),
+ (0x2F03, 'M', u'丿'),
+ (0x2F04, 'M', u'乙'),
+ (0x2F05, 'M', u'亅'),
+ (0x2F06, 'M', u'二'),
+ (0x2F07, 'M', u'亠'),
+ (0x2F08, 'M', u'人'),
+ (0x2F09, 'M', u'儿'),
+ (0x2F0A, 'M', u'入'),
+ (0x2F0B, 'M', u'八'),
+ (0x2F0C, 'M', u'冂'),
+ (0x2F0D, 'M', u'冖'),
+ (0x2F0E, 'M', u'冫'),
+ (0x2F0F, 'M', u'几'),
+ (0x2F10, 'M', u'凵'),
+ (0x2F11, 'M', u'刀'),
+ ]
+
+def _seg_27():
+ return [
+ (0x2F12, 'M', u'力'),
+ (0x2F13, 'M', u'勹'),
+ (0x2F14, 'M', u'匕'),
+ (0x2F15, 'M', u'匚'),
+ (0x2F16, 'M', u'匸'),
+ (0x2F17, 'M', u'十'),
+ (0x2F18, 'M', u'卜'),
+ (0x2F19, 'M', u'卩'),
+ (0x2F1A, 'M', u'厂'),
+ (0x2F1B, 'M', u'厶'),
+ (0x2F1C, 'M', u'又'),
+ (0x2F1D, 'M', u'口'),
+ (0x2F1E, 'M', u'囗'),
+ (0x2F1F, 'M', u'土'),
+ (0x2F20, 'M', u'士'),
+ (0x2F21, 'M', u'夂'),
+ (0x2F22, 'M', u'夊'),
+ (0x2F23, 'M', u'夕'),
+ (0x2F24, 'M', u'大'),
+ (0x2F25, 'M', u'女'),
+ (0x2F26, 'M', u'子'),
+ (0x2F27, 'M', u'宀'),
+ (0x2F28, 'M', u'寸'),
+ (0x2F29, 'M', u'小'),
+ (0x2F2A, 'M', u'尢'),
+ (0x2F2B, 'M', u'尸'),
+ (0x2F2C, 'M', u'屮'),
+ (0x2F2D, 'M', u'山'),
+ (0x2F2E, 'M', u'巛'),
+ (0x2F2F, 'M', u'工'),
+ (0x2F30, 'M', u'己'),
+ (0x2F31, 'M', u'巾'),
+ (0x2F32, 'M', u'干'),
+ (0x2F33, 'M', u'幺'),
+ (0x2F34, 'M', u'广'),
+ (0x2F35, 'M', u'廴'),
+ (0x2F36, 'M', u'廾'),
+ (0x2F37, 'M', u'弋'),
+ (0x2F38, 'M', u'弓'),
+ (0x2F39, 'M', u'彐'),
+ (0x2F3A, 'M', u'彡'),
+ (0x2F3B, 'M', u'彳'),
+ (0x2F3C, 'M', u'心'),
+ (0x2F3D, 'M', u'戈'),
+ (0x2F3E, 'M', u'戶'),
+ (0x2F3F, 'M', u'手'),
+ (0x2F40, 'M', u'支'),
+ (0x2F41, 'M', u'攴'),
+ (0x2F42, 'M', u'文'),
+ (0x2F43, 'M', u'斗'),
+ (0x2F44, 'M', u'斤'),
+ (0x2F45, 'M', u'方'),
+ (0x2F46, 'M', u'无'),
+ (0x2F47, 'M', u'日'),
+ (0x2F48, 'M', u'曰'),
+ (0x2F49, 'M', u'月'),
+ (0x2F4A, 'M', u'木'),
+ (0x2F4B, 'M', u'欠'),
+ (0x2F4C, 'M', u'止'),
+ (0x2F4D, 'M', u'歹'),
+ (0x2F4E, 'M', u'殳'),
+ (0x2F4F, 'M', u'毋'),
+ (0x2F50, 'M', u'比'),
+ (0x2F51, 'M', u'毛'),
+ (0x2F52, 'M', u'氏'),
+ (0x2F53, 'M', u'气'),
+ (0x2F54, 'M', u'水'),
+ (0x2F55, 'M', u'火'),
+ (0x2F56, 'M', u'爪'),
+ (0x2F57, 'M', u'父'),
+ (0x2F58, 'M', u'爻'),
+ (0x2F59, 'M', u'爿'),
+ (0x2F5A, 'M', u'片'),
+ (0x2F5B, 'M', u'牙'),
+ (0x2F5C, 'M', u'牛'),
+ (0x2F5D, 'M', u'犬'),
+ (0x2F5E, 'M', u'玄'),
+ (0x2F5F, 'M', u'玉'),
+ (0x2F60, 'M', u'瓜'),
+ (0x2F61, 'M', u'瓦'),
+ (0x2F62, 'M', u'甘'),
+ (0x2F63, 'M', u'生'),
+ (0x2F64, 'M', u'用'),
+ (0x2F65, 'M', u'田'),
+ (0x2F66, 'M', u'疋'),
+ (0x2F67, 'M', u'疒'),
+ (0x2F68, 'M', u'癶'),
+ (0x2F69, 'M', u'白'),
+ (0x2F6A, 'M', u'皮'),
+ (0x2F6B, 'M', u'皿'),
+ (0x2F6C, 'M', u'目'),
+ (0x2F6D, 'M', u'矛'),
+ (0x2F6E, 'M', u'矢'),
+ (0x2F6F, 'M', u'石'),
+ (0x2F70, 'M', u'示'),
+ (0x2F71, 'M', u'禸'),
+ (0x2F72, 'M', u'禾'),
+ (0x2F73, 'M', u'穴'),
+ (0x2F74, 'M', u'立'),
+ (0x2F75, 'M', u'竹'),
+ ]
+
+def _seg_28():
+ return [
+ (0x2F76, 'M', u'米'),
+ (0x2F77, 'M', u'糸'),
+ (0x2F78, 'M', u'缶'),
+ (0x2F79, 'M', u'网'),
+ (0x2F7A, 'M', u'羊'),
+ (0x2F7B, 'M', u'羽'),
+ (0x2F7C, 'M', u'老'),
+ (0x2F7D, 'M', u'而'),
+ (0x2F7E, 'M', u'耒'),
+ (0x2F7F, 'M', u'耳'),
+ (0x2F80, 'M', u'聿'),
+ (0x2F81, 'M', u'肉'),
+ (0x2F82, 'M', u'臣'),
+ (0x2F83, 'M', u'自'),
+ (0x2F84, 'M', u'至'),
+ (0x2F85, 'M', u'臼'),
+ (0x2F86, 'M', u'舌'),
+ (0x2F87, 'M', u'舛'),
+ (0x2F88, 'M', u'舟'),
+ (0x2F89, 'M', u'艮'),
+ (0x2F8A, 'M', u'色'),
+ (0x2F8B, 'M', u'艸'),
+ (0x2F8C, 'M', u'虍'),
+ (0x2F8D, 'M', u'虫'),
+ (0x2F8E, 'M', u'血'),
+ (0x2F8F, 'M', u'行'),
+ (0x2F90, 'M', u'衣'),
+ (0x2F91, 'M', u'襾'),
+ (0x2F92, 'M', u'見'),
+ (0x2F93, 'M', u'角'),
+ (0x2F94, 'M', u'言'),
+ (0x2F95, 'M', u'谷'),
+ (0x2F96, 'M', u'豆'),
+ (0x2F97, 'M', u'豕'),
+ (0x2F98, 'M', u'豸'),
+ (0x2F99, 'M', u'貝'),
+ (0x2F9A, 'M', u'赤'),
+ (0x2F9B, 'M', u'走'),
+ (0x2F9C, 'M', u'足'),
+ (0x2F9D, 'M', u'身'),
+ (0x2F9E, 'M', u'車'),
+ (0x2F9F, 'M', u'辛'),
+ (0x2FA0, 'M', u'辰'),
+ (0x2FA1, 'M', u'辵'),
+ (0x2FA2, 'M', u'邑'),
+ (0x2FA3, 'M', u'酉'),
+ (0x2FA4, 'M', u'釆'),
+ (0x2FA5, 'M', u'里'),
+ (0x2FA6, 'M', u'金'),
+ (0x2FA7, 'M', u'長'),
+ (0x2FA8, 'M', u'門'),
+ (0x2FA9, 'M', u'阜'),
+ (0x2FAA, 'M', u'隶'),
+ (0x2FAB, 'M', u'隹'),
+ (0x2FAC, 'M', u'雨'),
+ (0x2FAD, 'M', u'靑'),
+ (0x2FAE, 'M', u'非'),
+ (0x2FAF, 'M', u'面'),
+ (0x2FB0, 'M', u'革'),
+ (0x2FB1, 'M', u'韋'),
+ (0x2FB2, 'M', u'韭'),
+ (0x2FB3, 'M', u'音'),
+ (0x2FB4, 'M', u'頁'),
+ (0x2FB5, 'M', u'風'),
+ (0x2FB6, 'M', u'飛'),
+ (0x2FB7, 'M', u'食'),
+ (0x2FB8, 'M', u'首'),
+ (0x2FB9, 'M', u'香'),
+ (0x2FBA, 'M', u'馬'),
+ (0x2FBB, 'M', u'骨'),
+ (0x2FBC, 'M', u'高'),
+ (0x2FBD, 'M', u'髟'),
+ (0x2FBE, 'M', u'鬥'),
+ (0x2FBF, 'M', u'鬯'),
+ (0x2FC0, 'M', u'鬲'),
+ (0x2FC1, 'M', u'鬼'),
+ (0x2FC2, 'M', u'魚'),
+ (0x2FC3, 'M', u'鳥'),
+ (0x2FC4, 'M', u'鹵'),
+ (0x2FC5, 'M', u'鹿'),
+ (0x2FC6, 'M', u'麥'),
+ (0x2FC7, 'M', u'麻'),
+ (0x2FC8, 'M', u'黃'),
+ (0x2FC9, 'M', u'黍'),
+ (0x2FCA, 'M', u'黑'),
+ (0x2FCB, 'M', u'黹'),
+ (0x2FCC, 'M', u'黽'),
+ (0x2FCD, 'M', u'鼎'),
+ (0x2FCE, 'M', u'鼓'),
+ (0x2FCF, 'M', u'鼠'),
+ (0x2FD0, 'M', u'鼻'),
+ (0x2FD1, 'M', u'齊'),
+ (0x2FD2, 'M', u'齒'),
+ (0x2FD3, 'M', u'龍'),
+ (0x2FD4, 'M', u'龜'),
+ (0x2FD5, 'M', u'龠'),
+ (0x2FD6, 'X'),
+ (0x3000, '3', u' '),
+ (0x3001, 'V'),
+ (0x3002, 'M', u'.'),
+ ]
+
+def _seg_29():
+ return [
+ (0x3003, 'V'),
+ (0x3036, 'M', u'〒'),
+ (0x3037, 'V'),
+ (0x3038, 'M', u'十'),
+ (0x3039, 'M', u'卄'),
+ (0x303A, 'M', u'卅'),
+ (0x303B, 'V'),
+ (0x3040, 'X'),
+ (0x3041, 'V'),
+ (0x3097, 'X'),
+ (0x3099, 'V'),
+ (0x309B, '3', u' ゙'),
+ (0x309C, '3', u' ゚'),
+ (0x309D, 'V'),
+ (0x309F, 'M', u'より'),
+ (0x30A0, 'V'),
+ (0x30FF, 'M', u'コト'),
+ (0x3100, 'X'),
+ (0x3105, 'V'),
+ (0x3130, 'X'),
+ (0x3131, 'M', u'ᄀ'),
+ (0x3132, 'M', u'ᄁ'),
+ (0x3133, 'M', u'ᆪ'),
+ (0x3134, 'M', u'ᄂ'),
+ (0x3135, 'M', u'ᆬ'),
+ (0x3136, 'M', u'ᆭ'),
+ (0x3137, 'M', u'ᄃ'),
+ (0x3138, 'M', u'ᄄ'),
+ (0x3139, 'M', u'ᄅ'),
+ (0x313A, 'M', u'ᆰ'),
+ (0x313B, 'M', u'ᆱ'),
+ (0x313C, 'M', u'ᆲ'),
+ (0x313D, 'M', u'ᆳ'),
+ (0x313E, 'M', u'ᆴ'),
+ (0x313F, 'M', u'ᆵ'),
+ (0x3140, 'M', u'ᄚ'),
+ (0x3141, 'M', u'ᄆ'),
+ (0x3142, 'M', u'ᄇ'),
+ (0x3143, 'M', u'ᄈ'),
+ (0x3144, 'M', u'ᄡ'),
+ (0x3145, 'M', u'ᄉ'),
+ (0x3146, 'M', u'ᄊ'),
+ (0x3147, 'M', u'ᄋ'),
+ (0x3148, 'M', u'ᄌ'),
+ (0x3149, 'M', u'ᄍ'),
+ (0x314A, 'M', u'ᄎ'),
+ (0x314B, 'M', u'ᄏ'),
+ (0x314C, 'M', u'ᄐ'),
+ (0x314D, 'M', u'ᄑ'),
+ (0x314E, 'M', u'ᄒ'),
+ (0x314F, 'M', u'ᅡ'),
+ (0x3150, 'M', u'ᅢ'),
+ (0x3151, 'M', u'ᅣ'),
+ (0x3152, 'M', u'ᅤ'),
+ (0x3153, 'M', u'ᅥ'),
+ (0x3154, 'M', u'ᅦ'),
+ (0x3155, 'M', u'ᅧ'),
+ (0x3156, 'M', u'ᅨ'),
+ (0x3157, 'M', u'ᅩ'),
+ (0x3158, 'M', u'ᅪ'),
+ (0x3159, 'M', u'ᅫ'),
+ (0x315A, 'M', u'ᅬ'),
+ (0x315B, 'M', u'ᅭ'),
+ (0x315C, 'M', u'ᅮ'),
+ (0x315D, 'M', u'ᅯ'),
+ (0x315E, 'M', u'ᅰ'),
+ (0x315F, 'M', u'ᅱ'),
+ (0x3160, 'M', u'ᅲ'),
+ (0x3161, 'M', u'ᅳ'),
+ (0x3162, 'M', u'ᅴ'),
+ (0x3163, 'M', u'ᅵ'),
+ (0x3164, 'X'),
+ (0x3165, 'M', u'ᄔ'),
+ (0x3166, 'M', u'ᄕ'),
+ (0x3167, 'M', u'ᇇ'),
+ (0x3168, 'M', u'ᇈ'),
+ (0x3169, 'M', u'ᇌ'),
+ (0x316A, 'M', u'ᇎ'),
+ (0x316B, 'M', u'ᇓ'),
+ (0x316C, 'M', u'ᇗ'),
+ (0x316D, 'M', u'ᇙ'),
+ (0x316E, 'M', u'ᄜ'),
+ (0x316F, 'M', u'ᇝ'),
+ (0x3170, 'M', u'ᇟ'),
+ (0x3171, 'M', u'ᄝ'),
+ (0x3172, 'M', u'ᄞ'),
+ (0x3173, 'M', u'ᄠ'),
+ (0x3174, 'M', u'ᄢ'),
+ (0x3175, 'M', u'ᄣ'),
+ (0x3176, 'M', u'ᄧ'),
+ (0x3177, 'M', u'ᄩ'),
+ (0x3178, 'M', u'ᄫ'),
+ (0x3179, 'M', u'ᄬ'),
+ (0x317A, 'M', u'ᄭ'),
+ (0x317B, 'M', u'ᄮ'),
+ (0x317C, 'M', u'ᄯ'),
+ (0x317D, 'M', u'ᄲ'),
+ (0x317E, 'M', u'ᄶ'),
+ (0x317F, 'M', u'ᅀ'),
+ (0x3180, 'M', u'ᅇ'),
+ ]
+
+def _seg_30():
+ return [
+ (0x3181, 'M', u'ᅌ'),
+ (0x3182, 'M', u'ᇱ'),
+ (0x3183, 'M', u'ᇲ'),
+ (0x3184, 'M', u'ᅗ'),
+ (0x3185, 'M', u'ᅘ'),
+ (0x3186, 'M', u'ᅙ'),
+ (0x3187, 'M', u'ᆄ'),
+ (0x3188, 'M', u'ᆅ'),
+ (0x3189, 'M', u'ᆈ'),
+ (0x318A, 'M', u'ᆑ'),
+ (0x318B, 'M', u'ᆒ'),
+ (0x318C, 'M', u'ᆔ'),
+ (0x318D, 'M', u'ᆞ'),
+ (0x318E, 'M', u'ᆡ'),
+ (0x318F, 'X'),
+ (0x3190, 'V'),
+ (0x3192, 'M', u'一'),
+ (0x3193, 'M', u'二'),
+ (0x3194, 'M', u'三'),
+ (0x3195, 'M', u'四'),
+ (0x3196, 'M', u'上'),
+ (0x3197, 'M', u'中'),
+ (0x3198, 'M', u'下'),
+ (0x3199, 'M', u'甲'),
+ (0x319A, 'M', u'乙'),
+ (0x319B, 'M', u'丙'),
+ (0x319C, 'M', u'丁'),
+ (0x319D, 'M', u'天'),
+ (0x319E, 'M', u'地'),
+ (0x319F, 'M', u'人'),
+ (0x31A0, 'V'),
+ (0x31E4, 'X'),
+ (0x31F0, 'V'),
+ (0x3200, '3', u'(ᄀ)'),
+ (0x3201, '3', u'(ᄂ)'),
+ (0x3202, '3', u'(ᄃ)'),
+ (0x3203, '3', u'(ᄅ)'),
+ (0x3204, '3', u'(ᄆ)'),
+ (0x3205, '3', u'(ᄇ)'),
+ (0x3206, '3', u'(ᄉ)'),
+ (0x3207, '3', u'(ᄋ)'),
+ (0x3208, '3', u'(ᄌ)'),
+ (0x3209, '3', u'(ᄎ)'),
+ (0x320A, '3', u'(ᄏ)'),
+ (0x320B, '3', u'(ᄐ)'),
+ (0x320C, '3', u'(ᄑ)'),
+ (0x320D, '3', u'(ᄒ)'),
+ (0x320E, '3', u'(가)'),
+ (0x320F, '3', u'(나)'),
+ (0x3210, '3', u'(다)'),
+ (0x3211, '3', u'(라)'),
+ (0x3212, '3', u'(마)'),
+ (0x3213, '3', u'(바)'),
+ (0x3214, '3', u'(사)'),
+ (0x3215, '3', u'(아)'),
+ (0x3216, '3', u'(자)'),
+ (0x3217, '3', u'(차)'),
+ (0x3218, '3', u'(카)'),
+ (0x3219, '3', u'(타)'),
+ (0x321A, '3', u'(파)'),
+ (0x321B, '3', u'(하)'),
+ (0x321C, '3', u'(주)'),
+ (0x321D, '3', u'(오전)'),
+ (0x321E, '3', u'(오후)'),
+ (0x321F, 'X'),
+ (0x3220, '3', u'(一)'),
+ (0x3221, '3', u'(二)'),
+ (0x3222, '3', u'(三)'),
+ (0x3223, '3', u'(四)'),
+ (0x3224, '3', u'(五)'),
+ (0x3225, '3', u'(六)'),
+ (0x3226, '3', u'(七)'),
+ (0x3227, '3', u'(八)'),
+ (0x3228, '3', u'(九)'),
+ (0x3229, '3', u'(十)'),
+ (0x322A, '3', u'(月)'),
+ (0x322B, '3', u'(火)'),
+ (0x322C, '3', u'(水)'),
+ (0x322D, '3', u'(木)'),
+ (0x322E, '3', u'(金)'),
+ (0x322F, '3', u'(土)'),
+ (0x3230, '3', u'(日)'),
+ (0x3231, '3', u'(株)'),
+ (0x3232, '3', u'(有)'),
+ (0x3233, '3', u'(社)'),
+ (0x3234, '3', u'(名)'),
+ (0x3235, '3', u'(特)'),
+ (0x3236, '3', u'(財)'),
+ (0x3237, '3', u'(祝)'),
+ (0x3238, '3', u'(労)'),
+ (0x3239, '3', u'(代)'),
+ (0x323A, '3', u'(呼)'),
+ (0x323B, '3', u'(学)'),
+ (0x323C, '3', u'(監)'),
+ (0x323D, '3', u'(企)'),
+ (0x323E, '3', u'(資)'),
+ (0x323F, '3', u'(協)'),
+ (0x3240, '3', u'(祭)'),
+ (0x3241, '3', u'(休)'),
+ (0x3242, '3', u'(自)'),
+ ]
+
+def _seg_31():
+ return [
+ (0x3243, '3', u'(至)'),
+ (0x3244, 'M', u'問'),
+ (0x3245, 'M', u'幼'),
+ (0x3246, 'M', u'文'),
+ (0x3247, 'M', u'箏'),
+ (0x3248, 'V'),
+ (0x3250, 'M', u'pte'),
+ (0x3251, 'M', u'21'),
+ (0x3252, 'M', u'22'),
+ (0x3253, 'M', u'23'),
+ (0x3254, 'M', u'24'),
+ (0x3255, 'M', u'25'),
+ (0x3256, 'M', u'26'),
+ (0x3257, 'M', u'27'),
+ (0x3258, 'M', u'28'),
+ (0x3259, 'M', u'29'),
+ (0x325A, 'M', u'30'),
+ (0x325B, 'M', u'31'),
+ (0x325C, 'M', u'32'),
+ (0x325D, 'M', u'33'),
+ (0x325E, 'M', u'34'),
+ (0x325F, 'M', u'35'),
+ (0x3260, 'M', u'ᄀ'),
+ (0x3261, 'M', u'ᄂ'),
+ (0x3262, 'M', u'ᄃ'),
+ (0x3263, 'M', u'ᄅ'),
+ (0x3264, 'M', u'ᄆ'),
+ (0x3265, 'M', u'ᄇ'),
+ (0x3266, 'M', u'ᄉ'),
+ (0x3267, 'M', u'ᄋ'),
+ (0x3268, 'M', u'ᄌ'),
+ (0x3269, 'M', u'ᄎ'),
+ (0x326A, 'M', u'ᄏ'),
+ (0x326B, 'M', u'ᄐ'),
+ (0x326C, 'M', u'ᄑ'),
+ (0x326D, 'M', u'ᄒ'),
+ (0x326E, 'M', u'가'),
+ (0x326F, 'M', u'나'),
+ (0x3270, 'M', u'다'),
+ (0x3271, 'M', u'라'),
+ (0x3272, 'M', u'마'),
+ (0x3273, 'M', u'바'),
+ (0x3274, 'M', u'사'),
+ (0x3275, 'M', u'아'),
+ (0x3276, 'M', u'자'),
+ (0x3277, 'M', u'차'),
+ (0x3278, 'M', u'카'),
+ (0x3279, 'M', u'타'),
+ (0x327A, 'M', u'파'),
+ (0x327B, 'M', u'하'),
+ (0x327C, 'M', u'참고'),
+ (0x327D, 'M', u'주의'),
+ (0x327E, 'M', u'우'),
+ (0x327F, 'V'),
+ (0x3280, 'M', u'一'),
+ (0x3281, 'M', u'二'),
+ (0x3282, 'M', u'三'),
+ (0x3283, 'M', u'四'),
+ (0x3284, 'M', u'五'),
+ (0x3285, 'M', u'六'),
+ (0x3286, 'M', u'七'),
+ (0x3287, 'M', u'八'),
+ (0x3288, 'M', u'九'),
+ (0x3289, 'M', u'十'),
+ (0x328A, 'M', u'月'),
+ (0x328B, 'M', u'火'),
+ (0x328C, 'M', u'水'),
+ (0x328D, 'M', u'木'),
+ (0x328E, 'M', u'金'),
+ (0x328F, 'M', u'土'),
+ (0x3290, 'M', u'日'),
+ (0x3291, 'M', u'株'),
+ (0x3292, 'M', u'有'),
+ (0x3293, 'M', u'社'),
+ (0x3294, 'M', u'名'),
+ (0x3295, 'M', u'特'),
+ (0x3296, 'M', u'財'),
+ (0x3297, 'M', u'祝'),
+ (0x3298, 'M', u'労'),
+ (0x3299, 'M', u'秘'),
+ (0x329A, 'M', u'男'),
+ (0x329B, 'M', u'女'),
+ (0x329C, 'M', u'適'),
+ (0x329D, 'M', u'優'),
+ (0x329E, 'M', u'印'),
+ (0x329F, 'M', u'注'),
+ (0x32A0, 'M', u'項'),
+ (0x32A1, 'M', u'休'),
+ (0x32A2, 'M', u'写'),
+ (0x32A3, 'M', u'正'),
+ (0x32A4, 'M', u'上'),
+ (0x32A5, 'M', u'中'),
+ (0x32A6, 'M', u'下'),
+ (0x32A7, 'M', u'左'),
+ (0x32A8, 'M', u'右'),
+ (0x32A9, 'M', u'医'),
+ (0x32AA, 'M', u'宗'),
+ (0x32AB, 'M', u'学'),
+ (0x32AC, 'M', u'監'),
+ (0x32AD, 'M', u'企'),
+ ]
+
+def _seg_32():
+ return [
+ (0x32AE, 'M', u'資'),
+ (0x32AF, 'M', u'協'),
+ (0x32B0, 'M', u'夜'),
+ (0x32B1, 'M', u'36'),
+ (0x32B2, 'M', u'37'),
+ (0x32B3, 'M', u'38'),
+ (0x32B4, 'M', u'39'),
+ (0x32B5, 'M', u'40'),
+ (0x32B6, 'M', u'41'),
+ (0x32B7, 'M', u'42'),
+ (0x32B8, 'M', u'43'),
+ (0x32B9, 'M', u'44'),
+ (0x32BA, 'M', u'45'),
+ (0x32BB, 'M', u'46'),
+ (0x32BC, 'M', u'47'),
+ (0x32BD, 'M', u'48'),
+ (0x32BE, 'M', u'49'),
+ (0x32BF, 'M', u'50'),
+ (0x32C0, 'M', u'1月'),
+ (0x32C1, 'M', u'2月'),
+ (0x32C2, 'M', u'3月'),
+ (0x32C3, 'M', u'4月'),
+ (0x32C4, 'M', u'5月'),
+ (0x32C5, 'M', u'6月'),
+ (0x32C6, 'M', u'7月'),
+ (0x32C7, 'M', u'8月'),
+ (0x32C8, 'M', u'9月'),
+ (0x32C9, 'M', u'10月'),
+ (0x32CA, 'M', u'11月'),
+ (0x32CB, 'M', u'12月'),
+ (0x32CC, 'M', u'hg'),
+ (0x32CD, 'M', u'erg'),
+ (0x32CE, 'M', u'ev'),
+ (0x32CF, 'M', u'ltd'),
+ (0x32D0, 'M', u'ア'),
+ (0x32D1, 'M', u'イ'),
+ (0x32D2, 'M', u'ウ'),
+ (0x32D3, 'M', u'エ'),
+ (0x32D4, 'M', u'オ'),
+ (0x32D5, 'M', u'カ'),
+ (0x32D6, 'M', u'キ'),
+ (0x32D7, 'M', u'ク'),
+ (0x32D8, 'M', u'ケ'),
+ (0x32D9, 'M', u'コ'),
+ (0x32DA, 'M', u'サ'),
+ (0x32DB, 'M', u'シ'),
+ (0x32DC, 'M', u'ス'),
+ (0x32DD, 'M', u'セ'),
+ (0x32DE, 'M', u'ソ'),
+ (0x32DF, 'M', u'タ'),
+ (0x32E0, 'M', u'チ'),
+ (0x32E1, 'M', u'ツ'),
+ (0x32E2, 'M', u'テ'),
+ (0x32E3, 'M', u'ト'),
+ (0x32E4, 'M', u'ナ'),
+ (0x32E5, 'M', u'ニ'),
+ (0x32E6, 'M', u'ヌ'),
+ (0x32E7, 'M', u'ネ'),
+ (0x32E8, 'M', u'ノ'),
+ (0x32E9, 'M', u'ハ'),
+ (0x32EA, 'M', u'ヒ'),
+ (0x32EB, 'M', u'フ'),
+ (0x32EC, 'M', u'ヘ'),
+ (0x32ED, 'M', u'ホ'),
+ (0x32EE, 'M', u'マ'),
+ (0x32EF, 'M', u'ミ'),
+ (0x32F0, 'M', u'ム'),
+ (0x32F1, 'M', u'メ'),
+ (0x32F2, 'M', u'モ'),
+ (0x32F3, 'M', u'ヤ'),
+ (0x32F4, 'M', u'ユ'),
+ (0x32F5, 'M', u'ヨ'),
+ (0x32F6, 'M', u'ラ'),
+ (0x32F7, 'M', u'リ'),
+ (0x32F8, 'M', u'ル'),
+ (0x32F9, 'M', u'レ'),
+ (0x32FA, 'M', u'ロ'),
+ (0x32FB, 'M', u'ワ'),
+ (0x32FC, 'M', u'ヰ'),
+ (0x32FD, 'M', u'ヱ'),
+ (0x32FE, 'M', u'ヲ'),
+ (0x32FF, 'M', u'令和'),
+ (0x3300, 'M', u'アパート'),
+ (0x3301, 'M', u'アルファ'),
+ (0x3302, 'M', u'アンペア'),
+ (0x3303, 'M', u'アール'),
+ (0x3304, 'M', u'イニング'),
+ (0x3305, 'M', u'インチ'),
+ (0x3306, 'M', u'ウォン'),
+ (0x3307, 'M', u'エスクード'),
+ (0x3308, 'M', u'エーカー'),
+ (0x3309, 'M', u'オンス'),
+ (0x330A, 'M', u'オーム'),
+ (0x330B, 'M', u'カイリ'),
+ (0x330C, 'M', u'カラット'),
+ (0x330D, 'M', u'カロリー'),
+ (0x330E, 'M', u'ガロン'),
+ (0x330F, 'M', u'ガンマ'),
+ (0x3310, 'M', u'ギガ'),
+ (0x3311, 'M', u'ギニー'),
+ ]
+
+def _seg_33():
+ return [
+ (0x3312, 'M', u'キュリー'),
+ (0x3313, 'M', u'ギルダー'),
+ (0x3314, 'M', u'キロ'),
+ (0x3315, 'M', u'キログラム'),
+ (0x3316, 'M', u'キロメートル'),
+ (0x3317, 'M', u'キロワット'),
+ (0x3318, 'M', u'グラム'),
+ (0x3319, 'M', u'グラムトン'),
+ (0x331A, 'M', u'クルゼイロ'),
+ (0x331B, 'M', u'クローネ'),
+ (0x331C, 'M', u'ケース'),
+ (0x331D, 'M', u'コルナ'),
+ (0x331E, 'M', u'コーポ'),
+ (0x331F, 'M', u'サイクル'),
+ (0x3320, 'M', u'サンチーム'),
+ (0x3321, 'M', u'シリング'),
+ (0x3322, 'M', u'センチ'),
+ (0x3323, 'M', u'セント'),
+ (0x3324, 'M', u'ダース'),
+ (0x3325, 'M', u'デシ'),
+ (0x3326, 'M', u'ドル'),
+ (0x3327, 'M', u'トン'),
+ (0x3328, 'M', u'ナノ'),
+ (0x3329, 'M', u'ノット'),
+ (0x332A, 'M', u'ハイツ'),
+ (0x332B, 'M', u'パーセント'),
+ (0x332C, 'M', u'パーツ'),
+ (0x332D, 'M', u'バーレル'),
+ (0x332E, 'M', u'ピアストル'),
+ (0x332F, 'M', u'ピクル'),
+ (0x3330, 'M', u'ピコ'),
+ (0x3331, 'M', u'ビル'),
+ (0x3332, 'M', u'ファラッド'),
+ (0x3333, 'M', u'フィート'),
+ (0x3334, 'M', u'ブッシェル'),
+ (0x3335, 'M', u'フラン'),
+ (0x3336, 'M', u'ヘクタール'),
+ (0x3337, 'M', u'ペソ'),
+ (0x3338, 'M', u'ペニヒ'),
+ (0x3339, 'M', u'ヘルツ'),
+ (0x333A, 'M', u'ペンス'),
+ (0x333B, 'M', u'ページ'),
+ (0x333C, 'M', u'ベータ'),
+ (0x333D, 'M', u'ポイント'),
+ (0x333E, 'M', u'ボルト'),
+ (0x333F, 'M', u'ホン'),
+ (0x3340, 'M', u'ポンド'),
+ (0x3341, 'M', u'ホール'),
+ (0x3342, 'M', u'ホーン'),
+ (0x3343, 'M', u'マイクロ'),
+ (0x3344, 'M', u'マイル'),
+ (0x3345, 'M', u'マッハ'),
+ (0x3346, 'M', u'マルク'),
+ (0x3347, 'M', u'マンション'),
+ (0x3348, 'M', u'ミクロン'),
+ (0x3349, 'M', u'ミリ'),
+ (0x334A, 'M', u'ミリバール'),
+ (0x334B, 'M', u'メガ'),
+ (0x334C, 'M', u'メガトン'),
+ (0x334D, 'M', u'メートル'),
+ (0x334E, 'M', u'ヤード'),
+ (0x334F, 'M', u'ヤール'),
+ (0x3350, 'M', u'ユアン'),
+ (0x3351, 'M', u'リットル'),
+ (0x3352, 'M', u'リラ'),
+ (0x3353, 'M', u'ルピー'),
+ (0x3354, 'M', u'ルーブル'),
+ (0x3355, 'M', u'レム'),
+ (0x3356, 'M', u'レントゲン'),
+ (0x3357, 'M', u'ワット'),
+ (0x3358, 'M', u'0点'),
+ (0x3359, 'M', u'1点'),
+ (0x335A, 'M', u'2点'),
+ (0x335B, 'M', u'3点'),
+ (0x335C, 'M', u'4点'),
+ (0x335D, 'M', u'5点'),
+ (0x335E, 'M', u'6点'),
+ (0x335F, 'M', u'7点'),
+ (0x3360, 'M', u'8点'),
+ (0x3361, 'M', u'9点'),
+ (0x3362, 'M', u'10点'),
+ (0x3363, 'M', u'11点'),
+ (0x3364, 'M', u'12点'),
+ (0x3365, 'M', u'13点'),
+ (0x3366, 'M', u'14点'),
+ (0x3367, 'M', u'15点'),
+ (0x3368, 'M', u'16点'),
+ (0x3369, 'M', u'17点'),
+ (0x336A, 'M', u'18点'),
+ (0x336B, 'M', u'19点'),
+ (0x336C, 'M', u'20点'),
+ (0x336D, 'M', u'21点'),
+ (0x336E, 'M', u'22点'),
+ (0x336F, 'M', u'23点'),
+ (0x3370, 'M', u'24点'),
+ (0x3371, 'M', u'hpa'),
+ (0x3372, 'M', u'da'),
+ (0x3373, 'M', u'au'),
+ (0x3374, 'M', u'bar'),
+ (0x3375, 'M', u'ov'),
+ ]
+
+def _seg_34():
+ return [
+ (0x3376, 'M', u'pc'),
+ (0x3377, 'M', u'dm'),
+ (0x3378, 'M', u'dm2'),
+ (0x3379, 'M', u'dm3'),
+ (0x337A, 'M', u'iu'),
+ (0x337B, 'M', u'平成'),
+ (0x337C, 'M', u'昭和'),
+ (0x337D, 'M', u'大正'),
+ (0x337E, 'M', u'明治'),
+ (0x337F, 'M', u'株式会社'),
+ (0x3380, 'M', u'pa'),
+ (0x3381, 'M', u'na'),
+ (0x3382, 'M', u'μa'),
+ (0x3383, 'M', u'ma'),
+ (0x3384, 'M', u'ka'),
+ (0x3385, 'M', u'kb'),
+ (0x3386, 'M', u'mb'),
+ (0x3387, 'M', u'gb'),
+ (0x3388, 'M', u'cal'),
+ (0x3389, 'M', u'kcal'),
+ (0x338A, 'M', u'pf'),
+ (0x338B, 'M', u'nf'),
+ (0x338C, 'M', u'μf'),
+ (0x338D, 'M', u'μg'),
+ (0x338E, 'M', u'mg'),
+ (0x338F, 'M', u'kg'),
+ (0x3390, 'M', u'hz'),
+ (0x3391, 'M', u'khz'),
+ (0x3392, 'M', u'mhz'),
+ (0x3393, 'M', u'ghz'),
+ (0x3394, 'M', u'thz'),
+ (0x3395, 'M', u'μl'),
+ (0x3396, 'M', u'ml'),
+ (0x3397, 'M', u'dl'),
+ (0x3398, 'M', u'kl'),
+ (0x3399, 'M', u'fm'),
+ (0x339A, 'M', u'nm'),
+ (0x339B, 'M', u'μm'),
+ (0x339C, 'M', u'mm'),
+ (0x339D, 'M', u'cm'),
+ (0x339E, 'M', u'km'),
+ (0x339F, 'M', u'mm2'),
+ (0x33A0, 'M', u'cm2'),
+ (0x33A1, 'M', u'm2'),
+ (0x33A2, 'M', u'km2'),
+ (0x33A3, 'M', u'mm3'),
+ (0x33A4, 'M', u'cm3'),
+ (0x33A5, 'M', u'm3'),
+ (0x33A6, 'M', u'km3'),
+ (0x33A7, 'M', u'm∕s'),
+ (0x33A8, 'M', u'm∕s2'),
+ (0x33A9, 'M', u'pa'),
+ (0x33AA, 'M', u'kpa'),
+ (0x33AB, 'M', u'mpa'),
+ (0x33AC, 'M', u'gpa'),
+ (0x33AD, 'M', u'rad'),
+ (0x33AE, 'M', u'rad∕s'),
+ (0x33AF, 'M', u'rad∕s2'),
+ (0x33B0, 'M', u'ps'),
+ (0x33B1, 'M', u'ns'),
+ (0x33B2, 'M', u'μs'),
+ (0x33B3, 'M', u'ms'),
+ (0x33B4, 'M', u'pv'),
+ (0x33B5, 'M', u'nv'),
+ (0x33B6, 'M', u'μv'),
+ (0x33B7, 'M', u'mv'),
+ (0x33B8, 'M', u'kv'),
+ (0x33B9, 'M', u'mv'),
+ (0x33BA, 'M', u'pw'),
+ (0x33BB, 'M', u'nw'),
+ (0x33BC, 'M', u'μw'),
+ (0x33BD, 'M', u'mw'),
+ (0x33BE, 'M', u'kw'),
+ (0x33BF, 'M', u'mw'),
+ (0x33C0, 'M', u'kω'),
+ (0x33C1, 'M', u'mω'),
+ (0x33C2, 'X'),
+ (0x33C3, 'M', u'bq'),
+ (0x33C4, 'M', u'cc'),
+ (0x33C5, 'M', u'cd'),
+ (0x33C6, 'M', u'c∕kg'),
+ (0x33C7, 'X'),
+ (0x33C8, 'M', u'db'),
+ (0x33C9, 'M', u'gy'),
+ (0x33CA, 'M', u'ha'),
+ (0x33CB, 'M', u'hp'),
+ (0x33CC, 'M', u'in'),
+ (0x33CD, 'M', u'kk'),
+ (0x33CE, 'M', u'km'),
+ (0x33CF, 'M', u'kt'),
+ (0x33D0, 'M', u'lm'),
+ (0x33D1, 'M', u'ln'),
+ (0x33D2, 'M', u'log'),
+ (0x33D3, 'M', u'lx'),
+ (0x33D4, 'M', u'mb'),
+ (0x33D5, 'M', u'mil'),
+ (0x33D6, 'M', u'mol'),
+ (0x33D7, 'M', u'ph'),
+ (0x33D8, 'X'),
+ (0x33D9, 'M', u'ppm'),
+ ]
+
+def _seg_35():
+ return [
+ (0x33DA, 'M', u'pr'),
+ (0x33DB, 'M', u'sr'),
+ (0x33DC, 'M', u'sv'),
+ (0x33DD, 'M', u'wb'),
+ (0x33DE, 'M', u'v∕m'),
+ (0x33DF, 'M', u'a∕m'),
+ (0x33E0, 'M', u'1日'),
+ (0x33E1, 'M', u'2日'),
+ (0x33E2, 'M', u'3日'),
+ (0x33E3, 'M', u'4日'),
+ (0x33E4, 'M', u'5日'),
+ (0x33E5, 'M', u'6日'),
+ (0x33E6, 'M', u'7日'),
+ (0x33E7, 'M', u'8日'),
+ (0x33E8, 'M', u'9日'),
+ (0x33E9, 'M', u'10日'),
+ (0x33EA, 'M', u'11日'),
+ (0x33EB, 'M', u'12日'),
+ (0x33EC, 'M', u'13日'),
+ (0x33ED, 'M', u'14日'),
+ (0x33EE, 'M', u'15日'),
+ (0x33EF, 'M', u'16日'),
+ (0x33F0, 'M', u'17日'),
+ (0x33F1, 'M', u'18日'),
+ (0x33F2, 'M', u'19日'),
+ (0x33F3, 'M', u'20日'),
+ (0x33F4, 'M', u'21日'),
+ (0x33F5, 'M', u'22日'),
+ (0x33F6, 'M', u'23日'),
+ (0x33F7, 'M', u'24日'),
+ (0x33F8, 'M', u'25日'),
+ (0x33F9, 'M', u'26日'),
+ (0x33FA, 'M', u'27日'),
+ (0x33FB, 'M', u'28日'),
+ (0x33FC, 'M', u'29日'),
+ (0x33FD, 'M', u'30日'),
+ (0x33FE, 'M', u'31日'),
+ (0x33FF, 'M', u'gal'),
+ (0x3400, 'V'),
+ (0x9FFD, 'X'),
+ (0xA000, 'V'),
+ (0xA48D, 'X'),
+ (0xA490, 'V'),
+ (0xA4C7, 'X'),
+ (0xA4D0, 'V'),
+ (0xA62C, 'X'),
+ (0xA640, 'M', u'ꙁ'),
+ (0xA641, 'V'),
+ (0xA642, 'M', u'ꙃ'),
+ (0xA643, 'V'),
+ (0xA644, 'M', u'ꙅ'),
+ (0xA645, 'V'),
+ (0xA646, 'M', u'ꙇ'),
+ (0xA647, 'V'),
+ (0xA648, 'M', u'ꙉ'),
+ (0xA649, 'V'),
+ (0xA64A, 'M', u'ꙋ'),
+ (0xA64B, 'V'),
+ (0xA64C, 'M', u'ꙍ'),
+ (0xA64D, 'V'),
+ (0xA64E, 'M', u'ꙏ'),
+ (0xA64F, 'V'),
+ (0xA650, 'M', u'ꙑ'),
+ (0xA651, 'V'),
+ (0xA652, 'M', u'ꙓ'),
+ (0xA653, 'V'),
+ (0xA654, 'M', u'ꙕ'),
+ (0xA655, 'V'),
+ (0xA656, 'M', u'ꙗ'),
+ (0xA657, 'V'),
+ (0xA658, 'M', u'ꙙ'),
+ (0xA659, 'V'),
+ (0xA65A, 'M', u'ꙛ'),
+ (0xA65B, 'V'),
+ (0xA65C, 'M', u'ꙝ'),
+ (0xA65D, 'V'),
+ (0xA65E, 'M', u'ꙟ'),
+ (0xA65F, 'V'),
+ (0xA660, 'M', u'ꙡ'),
+ (0xA661, 'V'),
+ (0xA662, 'M', u'ꙣ'),
+ (0xA663, 'V'),
+ (0xA664, 'M', u'ꙥ'),
+ (0xA665, 'V'),
+ (0xA666, 'M', u'ꙧ'),
+ (0xA667, 'V'),
+ (0xA668, 'M', u'ꙩ'),
+ (0xA669, 'V'),
+ (0xA66A, 'M', u'ꙫ'),
+ (0xA66B, 'V'),
+ (0xA66C, 'M', u'ꙭ'),
+ (0xA66D, 'V'),
+ (0xA680, 'M', u'ꚁ'),
+ (0xA681, 'V'),
+ (0xA682, 'M', u'ꚃ'),
+ (0xA683, 'V'),
+ (0xA684, 'M', u'ꚅ'),
+ (0xA685, 'V'),
+ (0xA686, 'M', u'ꚇ'),
+ (0xA687, 'V'),
+ ]
+
+def _seg_36():
+ return [
+ (0xA688, 'M', u'ꚉ'),
+ (0xA689, 'V'),
+ (0xA68A, 'M', u'ꚋ'),
+ (0xA68B, 'V'),
+ (0xA68C, 'M', u'ꚍ'),
+ (0xA68D, 'V'),
+ (0xA68E, 'M', u'ꚏ'),
+ (0xA68F, 'V'),
+ (0xA690, 'M', u'ꚑ'),
+ (0xA691, 'V'),
+ (0xA692, 'M', u'ꚓ'),
+ (0xA693, 'V'),
+ (0xA694, 'M', u'ꚕ'),
+ (0xA695, 'V'),
+ (0xA696, 'M', u'ꚗ'),
+ (0xA697, 'V'),
+ (0xA698, 'M', u'ꚙ'),
+ (0xA699, 'V'),
+ (0xA69A, 'M', u'ꚛ'),
+ (0xA69B, 'V'),
+ (0xA69C, 'M', u'ъ'),
+ (0xA69D, 'M', u'ь'),
+ (0xA69E, 'V'),
+ (0xA6F8, 'X'),
+ (0xA700, 'V'),
+ (0xA722, 'M', u'ꜣ'),
+ (0xA723, 'V'),
+ (0xA724, 'M', u'ꜥ'),
+ (0xA725, 'V'),
+ (0xA726, 'M', u'ꜧ'),
+ (0xA727, 'V'),
+ (0xA728, 'M', u'ꜩ'),
+ (0xA729, 'V'),
+ (0xA72A, 'M', u'ꜫ'),
+ (0xA72B, 'V'),
+ (0xA72C, 'M', u'ꜭ'),
+ (0xA72D, 'V'),
+ (0xA72E, 'M', u'ꜯ'),
+ (0xA72F, 'V'),
+ (0xA732, 'M', u'ꜳ'),
+ (0xA733, 'V'),
+ (0xA734, 'M', u'ꜵ'),
+ (0xA735, 'V'),
+ (0xA736, 'M', u'ꜷ'),
+ (0xA737, 'V'),
+ (0xA738, 'M', u'ꜹ'),
+ (0xA739, 'V'),
+ (0xA73A, 'M', u'ꜻ'),
+ (0xA73B, 'V'),
+ (0xA73C, 'M', u'ꜽ'),
+ (0xA73D, 'V'),
+ (0xA73E, 'M', u'ꜿ'),
+ (0xA73F, 'V'),
+ (0xA740, 'M', u'ꝁ'),
+ (0xA741, 'V'),
+ (0xA742, 'M', u'ꝃ'),
+ (0xA743, 'V'),
+ (0xA744, 'M', u'ꝅ'),
+ (0xA745, 'V'),
+ (0xA746, 'M', u'ꝇ'),
+ (0xA747, 'V'),
+ (0xA748, 'M', u'ꝉ'),
+ (0xA749, 'V'),
+ (0xA74A, 'M', u'ꝋ'),
+ (0xA74B, 'V'),
+ (0xA74C, 'M', u'ꝍ'),
+ (0xA74D, 'V'),
+ (0xA74E, 'M', u'ꝏ'),
+ (0xA74F, 'V'),
+ (0xA750, 'M', u'ꝑ'),
+ (0xA751, 'V'),
+ (0xA752, 'M', u'ꝓ'),
+ (0xA753, 'V'),
+ (0xA754, 'M', u'ꝕ'),
+ (0xA755, 'V'),
+ (0xA756, 'M', u'ꝗ'),
+ (0xA757, 'V'),
+ (0xA758, 'M', u'ꝙ'),
+ (0xA759, 'V'),
+ (0xA75A, 'M', u'ꝛ'),
+ (0xA75B, 'V'),
+ (0xA75C, 'M', u'ꝝ'),
+ (0xA75D, 'V'),
+ (0xA75E, 'M', u'ꝟ'),
+ (0xA75F, 'V'),
+ (0xA760, 'M', u'ꝡ'),
+ (0xA761, 'V'),
+ (0xA762, 'M', u'ꝣ'),
+ (0xA763, 'V'),
+ (0xA764, 'M', u'ꝥ'),
+ (0xA765, 'V'),
+ (0xA766, 'M', u'ꝧ'),
+ (0xA767, 'V'),
+ (0xA768, 'M', u'ꝩ'),
+ (0xA769, 'V'),
+ (0xA76A, 'M', u'ꝫ'),
+ (0xA76B, 'V'),
+ (0xA76C, 'M', u'ꝭ'),
+ (0xA76D, 'V'),
+ (0xA76E, 'M', u'ꝯ'),
+ ]
+
+def _seg_37():
+ return [
+ (0xA76F, 'V'),
+ (0xA770, 'M', u'ꝯ'),
+ (0xA771, 'V'),
+ (0xA779, 'M', u'ꝺ'),
+ (0xA77A, 'V'),
+ (0xA77B, 'M', u'ꝼ'),
+ (0xA77C, 'V'),
+ (0xA77D, 'M', u'ᵹ'),
+ (0xA77E, 'M', u'ꝿ'),
+ (0xA77F, 'V'),
+ (0xA780, 'M', u'ꞁ'),
+ (0xA781, 'V'),
+ (0xA782, 'M', u'ꞃ'),
+ (0xA783, 'V'),
+ (0xA784, 'M', u'ꞅ'),
+ (0xA785, 'V'),
+ (0xA786, 'M', u'ꞇ'),
+ (0xA787, 'V'),
+ (0xA78B, 'M', u'ꞌ'),
+ (0xA78C, 'V'),
+ (0xA78D, 'M', u'ɥ'),
+ (0xA78E, 'V'),
+ (0xA790, 'M', u'ꞑ'),
+ (0xA791, 'V'),
+ (0xA792, 'M', u'ꞓ'),
+ (0xA793, 'V'),
+ (0xA796, 'M', u'ꞗ'),
+ (0xA797, 'V'),
+ (0xA798, 'M', u'ꞙ'),
+ (0xA799, 'V'),
+ (0xA79A, 'M', u'ꞛ'),
+ (0xA79B, 'V'),
+ (0xA79C, 'M', u'ꞝ'),
+ (0xA79D, 'V'),
+ (0xA79E, 'M', u'ꞟ'),
+ (0xA79F, 'V'),
+ (0xA7A0, 'M', u'ꞡ'),
+ (0xA7A1, 'V'),
+ (0xA7A2, 'M', u'ꞣ'),
+ (0xA7A3, 'V'),
+ (0xA7A4, 'M', u'ꞥ'),
+ (0xA7A5, 'V'),
+ (0xA7A6, 'M', u'ꞧ'),
+ (0xA7A7, 'V'),
+ (0xA7A8, 'M', u'ꞩ'),
+ (0xA7A9, 'V'),
+ (0xA7AA, 'M', u'ɦ'),
+ (0xA7AB, 'M', u'ɜ'),
+ (0xA7AC, 'M', u'ɡ'),
+ (0xA7AD, 'M', u'ɬ'),
+ (0xA7AE, 'M', u'ɪ'),
+ (0xA7AF, 'V'),
+ (0xA7B0, 'M', u'ʞ'),
+ (0xA7B1, 'M', u'ʇ'),
+ (0xA7B2, 'M', u'ʝ'),
+ (0xA7B3, 'M', u'ꭓ'),
+ (0xA7B4, 'M', u'ꞵ'),
+ (0xA7B5, 'V'),
+ (0xA7B6, 'M', u'ꞷ'),
+ (0xA7B7, 'V'),
+ (0xA7B8, 'M', u'ꞹ'),
+ (0xA7B9, 'V'),
+ (0xA7BA, 'M', u'ꞻ'),
+ (0xA7BB, 'V'),
+ (0xA7BC, 'M', u'ꞽ'),
+ (0xA7BD, 'V'),
+ (0xA7BE, 'M', u'ꞿ'),
+ (0xA7BF, 'V'),
+ (0xA7C0, 'X'),
+ (0xA7C2, 'M', u'ꟃ'),
+ (0xA7C3, 'V'),
+ (0xA7C4, 'M', u'ꞔ'),
+ (0xA7C5, 'M', u'ʂ'),
+ (0xA7C6, 'M', u'ᶎ'),
+ (0xA7C7, 'M', u'ꟈ'),
+ (0xA7C8, 'V'),
+ (0xA7C9, 'M', u'ꟊ'),
+ (0xA7CA, 'V'),
+ (0xA7CB, 'X'),
+ (0xA7F5, 'M', u'ꟶ'),
+ (0xA7F6, 'V'),
+ (0xA7F8, 'M', u'ħ'),
+ (0xA7F9, 'M', u'œ'),
+ (0xA7FA, 'V'),
+ (0xA82D, 'X'),
+ (0xA830, 'V'),
+ (0xA83A, 'X'),
+ (0xA840, 'V'),
+ (0xA878, 'X'),
+ (0xA880, 'V'),
+ (0xA8C6, 'X'),
+ (0xA8CE, 'V'),
+ (0xA8DA, 'X'),
+ (0xA8E0, 'V'),
+ (0xA954, 'X'),
+ (0xA95F, 'V'),
+ (0xA97D, 'X'),
+ (0xA980, 'V'),
+ (0xA9CE, 'X'),
+ (0xA9CF, 'V'),
+ ]
+
+def _seg_38():
+ return [
+ (0xA9DA, 'X'),
+ (0xA9DE, 'V'),
+ (0xA9FF, 'X'),
+ (0xAA00, 'V'),
+ (0xAA37, 'X'),
+ (0xAA40, 'V'),
+ (0xAA4E, 'X'),
+ (0xAA50, 'V'),
+ (0xAA5A, 'X'),
+ (0xAA5C, 'V'),
+ (0xAAC3, 'X'),
+ (0xAADB, 'V'),
+ (0xAAF7, 'X'),
+ (0xAB01, 'V'),
+ (0xAB07, 'X'),
+ (0xAB09, 'V'),
+ (0xAB0F, 'X'),
+ (0xAB11, 'V'),
+ (0xAB17, 'X'),
+ (0xAB20, 'V'),
+ (0xAB27, 'X'),
+ (0xAB28, 'V'),
+ (0xAB2F, 'X'),
+ (0xAB30, 'V'),
+ (0xAB5C, 'M', u'ꜧ'),
+ (0xAB5D, 'M', u'ꬷ'),
+ (0xAB5E, 'M', u'ɫ'),
+ (0xAB5F, 'M', u'ꭒ'),
+ (0xAB60, 'V'),
+ (0xAB69, 'M', u'ʍ'),
+ (0xAB6A, 'V'),
+ (0xAB6C, 'X'),
+ (0xAB70, 'M', u'Ꭰ'),
+ (0xAB71, 'M', u'Ꭱ'),
+ (0xAB72, 'M', u'Ꭲ'),
+ (0xAB73, 'M', u'Ꭳ'),
+ (0xAB74, 'M', u'Ꭴ'),
+ (0xAB75, 'M', u'Ꭵ'),
+ (0xAB76, 'M', u'Ꭶ'),
+ (0xAB77, 'M', u'Ꭷ'),
+ (0xAB78, 'M', u'Ꭸ'),
+ (0xAB79, 'M', u'Ꭹ'),
+ (0xAB7A, 'M', u'Ꭺ'),
+ (0xAB7B, 'M', u'Ꭻ'),
+ (0xAB7C, 'M', u'Ꭼ'),
+ (0xAB7D, 'M', u'Ꭽ'),
+ (0xAB7E, 'M', u'Ꭾ'),
+ (0xAB7F, 'M', u'Ꭿ'),
+ (0xAB80, 'M', u'Ꮀ'),
+ (0xAB81, 'M', u'Ꮁ'),
+ (0xAB82, 'M', u'Ꮂ'),
+ (0xAB83, 'M', u'Ꮃ'),
+ (0xAB84, 'M', u'Ꮄ'),
+ (0xAB85, 'M', u'Ꮅ'),
+ (0xAB86, 'M', u'Ꮆ'),
+ (0xAB87, 'M', u'Ꮇ'),
+ (0xAB88, 'M', u'Ꮈ'),
+ (0xAB89, 'M', u'Ꮉ'),
+ (0xAB8A, 'M', u'Ꮊ'),
+ (0xAB8B, 'M', u'Ꮋ'),
+ (0xAB8C, 'M', u'Ꮌ'),
+ (0xAB8D, 'M', u'Ꮍ'),
+ (0xAB8E, 'M', u'Ꮎ'),
+ (0xAB8F, 'M', u'Ꮏ'),
+ (0xAB90, 'M', u'Ꮐ'),
+ (0xAB91, 'M', u'Ꮑ'),
+ (0xAB92, 'M', u'Ꮒ'),
+ (0xAB93, 'M', u'Ꮓ'),
+ (0xAB94, 'M', u'Ꮔ'),
+ (0xAB95, 'M', u'Ꮕ'),
+ (0xAB96, 'M', u'Ꮖ'),
+ (0xAB97, 'M', u'Ꮗ'),
+ (0xAB98, 'M', u'Ꮘ'),
+ (0xAB99, 'M', u'Ꮙ'),
+ (0xAB9A, 'M', u'Ꮚ'),
+ (0xAB9B, 'M', u'Ꮛ'),
+ (0xAB9C, 'M', u'Ꮜ'),
+ (0xAB9D, 'M', u'Ꮝ'),
+ (0xAB9E, 'M', u'Ꮞ'),
+ (0xAB9F, 'M', u'Ꮟ'),
+ (0xABA0, 'M', u'Ꮠ'),
+ (0xABA1, 'M', u'Ꮡ'),
+ (0xABA2, 'M', u'Ꮢ'),
+ (0xABA3, 'M', u'Ꮣ'),
+ (0xABA4, 'M', u'Ꮤ'),
+ (0xABA5, 'M', u'Ꮥ'),
+ (0xABA6, 'M', u'Ꮦ'),
+ (0xABA7, 'M', u'Ꮧ'),
+ (0xABA8, 'M', u'Ꮨ'),
+ (0xABA9, 'M', u'Ꮩ'),
+ (0xABAA, 'M', u'Ꮪ'),
+ (0xABAB, 'M', u'Ꮫ'),
+ (0xABAC, 'M', u'Ꮬ'),
+ (0xABAD, 'M', u'Ꮭ'),
+ (0xABAE, 'M', u'Ꮮ'),
+ (0xABAF, 'M', u'Ꮯ'),
+ (0xABB0, 'M', u'Ꮰ'),
+ (0xABB1, 'M', u'Ꮱ'),
+ (0xABB2, 'M', u'Ꮲ'),
+ (0xABB3, 'M', u'Ꮳ'),
+ ]
+
+def _seg_39():
+ return [
+ (0xABB4, 'M', u'Ꮴ'),
+ (0xABB5, 'M', u'Ꮵ'),
+ (0xABB6, 'M', u'Ꮶ'),
+ (0xABB7, 'M', u'Ꮷ'),
+ (0xABB8, 'M', u'Ꮸ'),
+ (0xABB9, 'M', u'Ꮹ'),
+ (0xABBA, 'M', u'Ꮺ'),
+ (0xABBB, 'M', u'Ꮻ'),
+ (0xABBC, 'M', u'Ꮼ'),
+ (0xABBD, 'M', u'Ꮽ'),
+ (0xABBE, 'M', u'Ꮾ'),
+ (0xABBF, 'M', u'Ꮿ'),
+ (0xABC0, 'V'),
+ (0xABEE, 'X'),
+ (0xABF0, 'V'),
+ (0xABFA, 'X'),
+ (0xAC00, 'V'),
+ (0xD7A4, 'X'),
+ (0xD7B0, 'V'),
+ (0xD7C7, 'X'),
+ (0xD7CB, 'V'),
+ (0xD7FC, 'X'),
+ (0xF900, 'M', u'豈'),
+ (0xF901, 'M', u'更'),
+ (0xF902, 'M', u'車'),
+ (0xF903, 'M', u'賈'),
+ (0xF904, 'M', u'滑'),
+ (0xF905, 'M', u'串'),
+ (0xF906, 'M', u'句'),
+ (0xF907, 'M', u'龜'),
+ (0xF909, 'M', u'契'),
+ (0xF90A, 'M', u'金'),
+ (0xF90B, 'M', u'喇'),
+ (0xF90C, 'M', u'奈'),
+ (0xF90D, 'M', u'懶'),
+ (0xF90E, 'M', u'癩'),
+ (0xF90F, 'M', u'羅'),
+ (0xF910, 'M', u'蘿'),
+ (0xF911, 'M', u'螺'),
+ (0xF912, 'M', u'裸'),
+ (0xF913, 'M', u'邏'),
+ (0xF914, 'M', u'樂'),
+ (0xF915, 'M', u'洛'),
+ (0xF916, 'M', u'烙'),
+ (0xF917, 'M', u'珞'),
+ (0xF918, 'M', u'落'),
+ (0xF919, 'M', u'酪'),
+ (0xF91A, 'M', u'駱'),
+ (0xF91B, 'M', u'亂'),
+ (0xF91C, 'M', u'卵'),
+ (0xF91D, 'M', u'欄'),
+ (0xF91E, 'M', u'爛'),
+ (0xF91F, 'M', u'蘭'),
+ (0xF920, 'M', u'鸞'),
+ (0xF921, 'M', u'嵐'),
+ (0xF922, 'M', u'濫'),
+ (0xF923, 'M', u'藍'),
+ (0xF924, 'M', u'襤'),
+ (0xF925, 'M', u'拉'),
+ (0xF926, 'M', u'臘'),
+ (0xF927, 'M', u'蠟'),
+ (0xF928, 'M', u'廊'),
+ (0xF929, 'M', u'朗'),
+ (0xF92A, 'M', u'浪'),
+ (0xF92B, 'M', u'狼'),
+ (0xF92C, 'M', u'郎'),
+ (0xF92D, 'M', u'來'),
+ (0xF92E, 'M', u'冷'),
+ (0xF92F, 'M', u'勞'),
+ (0xF930, 'M', u'擄'),
+ (0xF931, 'M', u'櫓'),
+ (0xF932, 'M', u'爐'),
+ (0xF933, 'M', u'盧'),
+ (0xF934, 'M', u'老'),
+ (0xF935, 'M', u'蘆'),
+ (0xF936, 'M', u'虜'),
+ (0xF937, 'M', u'路'),
+ (0xF938, 'M', u'露'),
+ (0xF939, 'M', u'魯'),
+ (0xF93A, 'M', u'鷺'),
+ (0xF93B, 'M', u'碌'),
+ (0xF93C, 'M', u'祿'),
+ (0xF93D, 'M', u'綠'),
+ (0xF93E, 'M', u'菉'),
+ (0xF93F, 'M', u'錄'),
+ (0xF940, 'M', u'鹿'),
+ (0xF941, 'M', u'論'),
+ (0xF942, 'M', u'壟'),
+ (0xF943, 'M', u'弄'),
+ (0xF944, 'M', u'籠'),
+ (0xF945, 'M', u'聾'),
+ (0xF946, 'M', u'牢'),
+ (0xF947, 'M', u'磊'),
+ (0xF948, 'M', u'賂'),
+ (0xF949, 'M', u'雷'),
+ (0xF94A, 'M', u'壘'),
+ (0xF94B, 'M', u'屢'),
+ (0xF94C, 'M', u'樓'),
+ (0xF94D, 'M', u'淚'),
+ (0xF94E, 'M', u'漏'),
+ ]
+
+def _seg_40():
+ return [
+ (0xF94F, 'M', u'累'),
+ (0xF950, 'M', u'縷'),
+ (0xF951, 'M', u'陋'),
+ (0xF952, 'M', u'勒'),
+ (0xF953, 'M', u'肋'),
+ (0xF954, 'M', u'凜'),
+ (0xF955, 'M', u'凌'),
+ (0xF956, 'M', u'稜'),
+ (0xF957, 'M', u'綾'),
+ (0xF958, 'M', u'菱'),
+ (0xF959, 'M', u'陵'),
+ (0xF95A, 'M', u'讀'),
+ (0xF95B, 'M', u'拏'),
+ (0xF95C, 'M', u'樂'),
+ (0xF95D, 'M', u'諾'),
+ (0xF95E, 'M', u'丹'),
+ (0xF95F, 'M', u'寧'),
+ (0xF960, 'M', u'怒'),
+ (0xF961, 'M', u'率'),
+ (0xF962, 'M', u'異'),
+ (0xF963, 'M', u'北'),
+ (0xF964, 'M', u'磻'),
+ (0xF965, 'M', u'便'),
+ (0xF966, 'M', u'復'),
+ (0xF967, 'M', u'不'),
+ (0xF968, 'M', u'泌'),
+ (0xF969, 'M', u'數'),
+ (0xF96A, 'M', u'索'),
+ (0xF96B, 'M', u'參'),
+ (0xF96C, 'M', u'塞'),
+ (0xF96D, 'M', u'省'),
+ (0xF96E, 'M', u'葉'),
+ (0xF96F, 'M', u'說'),
+ (0xF970, 'M', u'殺'),
+ (0xF971, 'M', u'辰'),
+ (0xF972, 'M', u'沈'),
+ (0xF973, 'M', u'拾'),
+ (0xF974, 'M', u'若'),
+ (0xF975, 'M', u'掠'),
+ (0xF976, 'M', u'略'),
+ (0xF977, 'M', u'亮'),
+ (0xF978, 'M', u'兩'),
+ (0xF979, 'M', u'凉'),
+ (0xF97A, 'M', u'梁'),
+ (0xF97B, 'M', u'糧'),
+ (0xF97C, 'M', u'良'),
+ (0xF97D, 'M', u'諒'),
+ (0xF97E, 'M', u'量'),
+ (0xF97F, 'M', u'勵'),
+ (0xF980, 'M', u'呂'),
+ (0xF981, 'M', u'女'),
+ (0xF982, 'M', u'廬'),
+ (0xF983, 'M', u'旅'),
+ (0xF984, 'M', u'濾'),
+ (0xF985, 'M', u'礪'),
+ (0xF986, 'M', u'閭'),
+ (0xF987, 'M', u'驪'),
+ (0xF988, 'M', u'麗'),
+ (0xF989, 'M', u'黎'),
+ (0xF98A, 'M', u'力'),
+ (0xF98B, 'M', u'曆'),
+ (0xF98C, 'M', u'歷'),
+ (0xF98D, 'M', u'轢'),
+ (0xF98E, 'M', u'年'),
+ (0xF98F, 'M', u'憐'),
+ (0xF990, 'M', u'戀'),
+ (0xF991, 'M', u'撚'),
+ (0xF992, 'M', u'漣'),
+ (0xF993, 'M', u'煉'),
+ (0xF994, 'M', u'璉'),
+ (0xF995, 'M', u'秊'),
+ (0xF996, 'M', u'練'),
+ (0xF997, 'M', u'聯'),
+ (0xF998, 'M', u'輦'),
+ (0xF999, 'M', u'蓮'),
+ (0xF99A, 'M', u'連'),
+ (0xF99B, 'M', u'鍊'),
+ (0xF99C, 'M', u'列'),
+ (0xF99D, 'M', u'劣'),
+ (0xF99E, 'M', u'咽'),
+ (0xF99F, 'M', u'烈'),
+ (0xF9A0, 'M', u'裂'),
+ (0xF9A1, 'M', u'說'),
+ (0xF9A2, 'M', u'廉'),
+ (0xF9A3, 'M', u'念'),
+ (0xF9A4, 'M', u'捻'),
+ (0xF9A5, 'M', u'殮'),
+ (0xF9A6, 'M', u'簾'),
+ (0xF9A7, 'M', u'獵'),
+ (0xF9A8, 'M', u'令'),
+ (0xF9A9, 'M', u'囹'),
+ (0xF9AA, 'M', u'寧'),
+ (0xF9AB, 'M', u'嶺'),
+ (0xF9AC, 'M', u'怜'),
+ (0xF9AD, 'M', u'玲'),
+ (0xF9AE, 'M', u'瑩'),
+ (0xF9AF, 'M', u'羚'),
+ (0xF9B0, 'M', u'聆'),
+ (0xF9B1, 'M', u'鈴'),
+ (0xF9B2, 'M', u'零'),
+ ]
+
+def _seg_41():
+ return [
+ (0xF9B3, 'M', u'靈'),
+ (0xF9B4, 'M', u'領'),
+ (0xF9B5, 'M', u'例'),
+ (0xF9B6, 'M', u'禮'),
+ (0xF9B7, 'M', u'醴'),
+ (0xF9B8, 'M', u'隸'),
+ (0xF9B9, 'M', u'惡'),
+ (0xF9BA, 'M', u'了'),
+ (0xF9BB, 'M', u'僚'),
+ (0xF9BC, 'M', u'寮'),
+ (0xF9BD, 'M', u'尿'),
+ (0xF9BE, 'M', u'料'),
+ (0xF9BF, 'M', u'樂'),
+ (0xF9C0, 'M', u'燎'),
+ (0xF9C1, 'M', u'療'),
+ (0xF9C2, 'M', u'蓼'),
+ (0xF9C3, 'M', u'遼'),
+ (0xF9C4, 'M', u'龍'),
+ (0xF9C5, 'M', u'暈'),
+ (0xF9C6, 'M', u'阮'),
+ (0xF9C7, 'M', u'劉'),
+ (0xF9C8, 'M', u'杻'),
+ (0xF9C9, 'M', u'柳'),
+ (0xF9CA, 'M', u'流'),
+ (0xF9CB, 'M', u'溜'),
+ (0xF9CC, 'M', u'琉'),
+ (0xF9CD, 'M', u'留'),
+ (0xF9CE, 'M', u'硫'),
+ (0xF9CF, 'M', u'紐'),
+ (0xF9D0, 'M', u'類'),
+ (0xF9D1, 'M', u'六'),
+ (0xF9D2, 'M', u'戮'),
+ (0xF9D3, 'M', u'陸'),
+ (0xF9D4, 'M', u'倫'),
+ (0xF9D5, 'M', u'崙'),
+ (0xF9D6, 'M', u'淪'),
+ (0xF9D7, 'M', u'輪'),
+ (0xF9D8, 'M', u'律'),
+ (0xF9D9, 'M', u'慄'),
+ (0xF9DA, 'M', u'栗'),
+ (0xF9DB, 'M', u'率'),
+ (0xF9DC, 'M', u'隆'),
+ (0xF9DD, 'M', u'利'),
+ (0xF9DE, 'M', u'吏'),
+ (0xF9DF, 'M', u'履'),
+ (0xF9E0, 'M', u'易'),
+ (0xF9E1, 'M', u'李'),
+ (0xF9E2, 'M', u'梨'),
+ (0xF9E3, 'M', u'泥'),
+ (0xF9E4, 'M', u'理'),
+ (0xF9E5, 'M', u'痢'),
+ (0xF9E6, 'M', u'罹'),
+ (0xF9E7, 'M', u'裏'),
+ (0xF9E8, 'M', u'裡'),
+ (0xF9E9, 'M', u'里'),
+ (0xF9EA, 'M', u'離'),
+ (0xF9EB, 'M', u'匿'),
+ (0xF9EC, 'M', u'溺'),
+ (0xF9ED, 'M', u'吝'),
+ (0xF9EE, 'M', u'燐'),
+ (0xF9EF, 'M', u'璘'),
+ (0xF9F0, 'M', u'藺'),
+ (0xF9F1, 'M', u'隣'),
+ (0xF9F2, 'M', u'鱗'),
+ (0xF9F3, 'M', u'麟'),
+ (0xF9F4, 'M', u'林'),
+ (0xF9F5, 'M', u'淋'),
+ (0xF9F6, 'M', u'臨'),
+ (0xF9F7, 'M', u'立'),
+ (0xF9F8, 'M', u'笠'),
+ (0xF9F9, 'M', u'粒'),
+ (0xF9FA, 'M', u'狀'),
+ (0xF9FB, 'M', u'炙'),
+ (0xF9FC, 'M', u'識'),
+ (0xF9FD, 'M', u'什'),
+ (0xF9FE, 'M', u'茶'),
+ (0xF9FF, 'M', u'刺'),
+ (0xFA00, 'M', u'切'),
+ (0xFA01, 'M', u'度'),
+ (0xFA02, 'M', u'拓'),
+ (0xFA03, 'M', u'糖'),
+ (0xFA04, 'M', u'宅'),
+ (0xFA05, 'M', u'洞'),
+ (0xFA06, 'M', u'暴'),
+ (0xFA07, 'M', u'輻'),
+ (0xFA08, 'M', u'行'),
+ (0xFA09, 'M', u'降'),
+ (0xFA0A, 'M', u'見'),
+ (0xFA0B, 'M', u'廓'),
+ (0xFA0C, 'M', u'兀'),
+ (0xFA0D, 'M', u'嗀'),
+ (0xFA0E, 'V'),
+ (0xFA10, 'M', u'塚'),
+ (0xFA11, 'V'),
+ (0xFA12, 'M', u'晴'),
+ (0xFA13, 'V'),
+ (0xFA15, 'M', u'凞'),
+ (0xFA16, 'M', u'猪'),
+ (0xFA17, 'M', u'益'),
+ (0xFA18, 'M', u'礼'),
+ ]
+
+def _seg_42():
+ return [
+ (0xFA19, 'M', u'神'),
+ (0xFA1A, 'M', u'祥'),
+ (0xFA1B, 'M', u'福'),
+ (0xFA1C, 'M', u'靖'),
+ (0xFA1D, 'M', u'精'),
+ (0xFA1E, 'M', u'羽'),
+ (0xFA1F, 'V'),
+ (0xFA20, 'M', u'蘒'),
+ (0xFA21, 'V'),
+ (0xFA22, 'M', u'諸'),
+ (0xFA23, 'V'),
+ (0xFA25, 'M', u'逸'),
+ (0xFA26, 'M', u'都'),
+ (0xFA27, 'V'),
+ (0xFA2A, 'M', u'飯'),
+ (0xFA2B, 'M', u'飼'),
+ (0xFA2C, 'M', u'館'),
+ (0xFA2D, 'M', u'鶴'),
+ (0xFA2E, 'M', u'郞'),
+ (0xFA2F, 'M', u'隷'),
+ (0xFA30, 'M', u'侮'),
+ (0xFA31, 'M', u'僧'),
+ (0xFA32, 'M', u'免'),
+ (0xFA33, 'M', u'勉'),
+ (0xFA34, 'M', u'勤'),
+ (0xFA35, 'M', u'卑'),
+ (0xFA36, 'M', u'喝'),
+ (0xFA37, 'M', u'嘆'),
+ (0xFA38, 'M', u'器'),
+ (0xFA39, 'M', u'塀'),
+ (0xFA3A, 'M', u'墨'),
+ (0xFA3B, 'M', u'層'),
+ (0xFA3C, 'M', u'屮'),
+ (0xFA3D, 'M', u'悔'),
+ (0xFA3E, 'M', u'慨'),
+ (0xFA3F, 'M', u'憎'),
+ (0xFA40, 'M', u'懲'),
+ (0xFA41, 'M', u'敏'),
+ (0xFA42, 'M', u'既'),
+ (0xFA43, 'M', u'暑'),
+ (0xFA44, 'M', u'梅'),
+ (0xFA45, 'M', u'海'),
+ (0xFA46, 'M', u'渚'),
+ (0xFA47, 'M', u'漢'),
+ (0xFA48, 'M', u'煮'),
+ (0xFA49, 'M', u'爫'),
+ (0xFA4A, 'M', u'琢'),
+ (0xFA4B, 'M', u'碑'),
+ (0xFA4C, 'M', u'社'),
+ (0xFA4D, 'M', u'祉'),
+ (0xFA4E, 'M', u'祈'),
+ (0xFA4F, 'M', u'祐'),
+ (0xFA50, 'M', u'祖'),
+ (0xFA51, 'M', u'祝'),
+ (0xFA52, 'M', u'禍'),
+ (0xFA53, 'M', u'禎'),
+ (0xFA54, 'M', u'穀'),
+ (0xFA55, 'M', u'突'),
+ (0xFA56, 'M', u'節'),
+ (0xFA57, 'M', u'練'),
+ (0xFA58, 'M', u'縉'),
+ (0xFA59, 'M', u'繁'),
+ (0xFA5A, 'M', u'署'),
+ (0xFA5B, 'M', u'者'),
+ (0xFA5C, 'M', u'臭'),
+ (0xFA5D, 'M', u'艹'),
+ (0xFA5F, 'M', u'著'),
+ (0xFA60, 'M', u'褐'),
+ (0xFA61, 'M', u'視'),
+ (0xFA62, 'M', u'謁'),
+ (0xFA63, 'M', u'謹'),
+ (0xFA64, 'M', u'賓'),
+ (0xFA65, 'M', u'贈'),
+ (0xFA66, 'M', u'辶'),
+ (0xFA67, 'M', u'逸'),
+ (0xFA68, 'M', u'難'),
+ (0xFA69, 'M', u'響'),
+ (0xFA6A, 'M', u'頻'),
+ (0xFA6B, 'M', u'恵'),
+ (0xFA6C, 'M', u'𤋮'),
+ (0xFA6D, 'M', u'舘'),
+ (0xFA6E, 'X'),
+ (0xFA70, 'M', u'並'),
+ (0xFA71, 'M', u'况'),
+ (0xFA72, 'M', u'全'),
+ (0xFA73, 'M', u'侀'),
+ (0xFA74, 'M', u'充'),
+ (0xFA75, 'M', u'冀'),
+ (0xFA76, 'M', u'勇'),
+ (0xFA77, 'M', u'勺'),
+ (0xFA78, 'M', u'喝'),
+ (0xFA79, 'M', u'啕'),
+ (0xFA7A, 'M', u'喙'),
+ (0xFA7B, 'M', u'嗢'),
+ (0xFA7C, 'M', u'塚'),
+ (0xFA7D, 'M', u'墳'),
+ (0xFA7E, 'M', u'奄'),
+ (0xFA7F, 'M', u'奔'),
+ (0xFA80, 'M', u'婢'),
+ (0xFA81, 'M', u'嬨'),
+ ]
+
+def _seg_43():
+ return [
+ (0xFA82, 'M', u'廒'),
+ (0xFA83, 'M', u'廙'),
+ (0xFA84, 'M', u'彩'),
+ (0xFA85, 'M', u'徭'),
+ (0xFA86, 'M', u'惘'),
+ (0xFA87, 'M', u'慎'),
+ (0xFA88, 'M', u'愈'),
+ (0xFA89, 'M', u'憎'),
+ (0xFA8A, 'M', u'慠'),
+ (0xFA8B, 'M', u'懲'),
+ (0xFA8C, 'M', u'戴'),
+ (0xFA8D, 'M', u'揄'),
+ (0xFA8E, 'M', u'搜'),
+ (0xFA8F, 'M', u'摒'),
+ (0xFA90, 'M', u'敖'),
+ (0xFA91, 'M', u'晴'),
+ (0xFA92, 'M', u'朗'),
+ (0xFA93, 'M', u'望'),
+ (0xFA94, 'M', u'杖'),
+ (0xFA95, 'M', u'歹'),
+ (0xFA96, 'M', u'殺'),
+ (0xFA97, 'M', u'流'),
+ (0xFA98, 'M', u'滛'),
+ (0xFA99, 'M', u'滋'),
+ (0xFA9A, 'M', u'漢'),
+ (0xFA9B, 'M', u'瀞'),
+ (0xFA9C, 'M', u'煮'),
+ (0xFA9D, 'M', u'瞧'),
+ (0xFA9E, 'M', u'爵'),
+ (0xFA9F, 'M', u'犯'),
+ (0xFAA0, 'M', u'猪'),
+ (0xFAA1, 'M', u'瑱'),
+ (0xFAA2, 'M', u'甆'),
+ (0xFAA3, 'M', u'画'),
+ (0xFAA4, 'M', u'瘝'),
+ (0xFAA5, 'M', u'瘟'),
+ (0xFAA6, 'M', u'益'),
+ (0xFAA7, 'M', u'盛'),
+ (0xFAA8, 'M', u'直'),
+ (0xFAA9, 'M', u'睊'),
+ (0xFAAA, 'M', u'着'),
+ (0xFAAB, 'M', u'磌'),
+ (0xFAAC, 'M', u'窱'),
+ (0xFAAD, 'M', u'節'),
+ (0xFAAE, 'M', u'类'),
+ (0xFAAF, 'M', u'絛'),
+ (0xFAB0, 'M', u'練'),
+ (0xFAB1, 'M', u'缾'),
+ (0xFAB2, 'M', u'者'),
+ (0xFAB3, 'M', u'荒'),
+ (0xFAB4, 'M', u'華'),
+ (0xFAB5, 'M', u'蝹'),
+ (0xFAB6, 'M', u'襁'),
+ (0xFAB7, 'M', u'覆'),
+ (0xFAB8, 'M', u'視'),
+ (0xFAB9, 'M', u'調'),
+ (0xFABA, 'M', u'諸'),
+ (0xFABB, 'M', u'請'),
+ (0xFABC, 'M', u'謁'),
+ (0xFABD, 'M', u'諾'),
+ (0xFABE, 'M', u'諭'),
+ (0xFABF, 'M', u'謹'),
+ (0xFAC0, 'M', u'變'),
+ (0xFAC1, 'M', u'贈'),
+ (0xFAC2, 'M', u'輸'),
+ (0xFAC3, 'M', u'遲'),
+ (0xFAC4, 'M', u'醙'),
+ (0xFAC5, 'M', u'鉶'),
+ (0xFAC6, 'M', u'陼'),
+ (0xFAC7, 'M', u'難'),
+ (0xFAC8, 'M', u'靖'),
+ (0xFAC9, 'M', u'韛'),
+ (0xFACA, 'M', u'響'),
+ (0xFACB, 'M', u'頋'),
+ (0xFACC, 'M', u'頻'),
+ (0xFACD, 'M', u'鬒'),
+ (0xFACE, 'M', u'龜'),
+ (0xFACF, 'M', u'𢡊'),
+ (0xFAD0, 'M', u'𢡄'),
+ (0xFAD1, 'M', u'𣏕'),
+ (0xFAD2, 'M', u'㮝'),
+ (0xFAD3, 'M', u'䀘'),
+ (0xFAD4, 'M', u'䀹'),
+ (0xFAD5, 'M', u'𥉉'),
+ (0xFAD6, 'M', u'𥳐'),
+ (0xFAD7, 'M', u'𧻓'),
+ (0xFAD8, 'M', u'齃'),
+ (0xFAD9, 'M', u'龎'),
+ (0xFADA, 'X'),
+ (0xFB00, 'M', u'ff'),
+ (0xFB01, 'M', u'fi'),
+ (0xFB02, 'M', u'fl'),
+ (0xFB03, 'M', u'ffi'),
+ (0xFB04, 'M', u'ffl'),
+ (0xFB05, 'M', u'st'),
+ (0xFB07, 'X'),
+ (0xFB13, 'M', u'մն'),
+ (0xFB14, 'M', u'մե'),
+ (0xFB15, 'M', u'մի'),
+ (0xFB16, 'M', u'վն'),
+ ]
+
+def _seg_44():
+ return [
+ (0xFB17, 'M', u'մխ'),
+ (0xFB18, 'X'),
+ (0xFB1D, 'M', u'יִ'),
+ (0xFB1E, 'V'),
+ (0xFB1F, 'M', u'ײַ'),
+ (0xFB20, 'M', u'ע'),
+ (0xFB21, 'M', u'א'),
+ (0xFB22, 'M', u'ד'),
+ (0xFB23, 'M', u'ה'),
+ (0xFB24, 'M', u'כ'),
+ (0xFB25, 'M', u'ל'),
+ (0xFB26, 'M', u'ם'),
+ (0xFB27, 'M', u'ר'),
+ (0xFB28, 'M', u'ת'),
+ (0xFB29, '3', u'+'),
+ (0xFB2A, 'M', u'שׁ'),
+ (0xFB2B, 'M', u'שׂ'),
+ (0xFB2C, 'M', u'שּׁ'),
+ (0xFB2D, 'M', u'שּׂ'),
+ (0xFB2E, 'M', u'אַ'),
+ (0xFB2F, 'M', u'אָ'),
+ (0xFB30, 'M', u'אּ'),
+ (0xFB31, 'M', u'בּ'),
+ (0xFB32, 'M', u'גּ'),
+ (0xFB33, 'M', u'דּ'),
+ (0xFB34, 'M', u'הּ'),
+ (0xFB35, 'M', u'וּ'),
+ (0xFB36, 'M', u'זּ'),
+ (0xFB37, 'X'),
+ (0xFB38, 'M', u'טּ'),
+ (0xFB39, 'M', u'יּ'),
+ (0xFB3A, 'M', u'ךּ'),
+ (0xFB3B, 'M', u'כּ'),
+ (0xFB3C, 'M', u'לּ'),
+ (0xFB3D, 'X'),
+ (0xFB3E, 'M', u'מּ'),
+ (0xFB3F, 'X'),
+ (0xFB40, 'M', u'נּ'),
+ (0xFB41, 'M', u'סּ'),
+ (0xFB42, 'X'),
+ (0xFB43, 'M', u'ףּ'),
+ (0xFB44, 'M', u'פּ'),
+ (0xFB45, 'X'),
+ (0xFB46, 'M', u'צּ'),
+ (0xFB47, 'M', u'קּ'),
+ (0xFB48, 'M', u'רּ'),
+ (0xFB49, 'M', u'שּ'),
+ (0xFB4A, 'M', u'תּ'),
+ (0xFB4B, 'M', u'וֹ'),
+ (0xFB4C, 'M', u'בֿ'),
+ (0xFB4D, 'M', u'כֿ'),
+ (0xFB4E, 'M', u'פֿ'),
+ (0xFB4F, 'M', u'אל'),
+ (0xFB50, 'M', u'ٱ'),
+ (0xFB52, 'M', u'ٻ'),
+ (0xFB56, 'M', u'پ'),
+ (0xFB5A, 'M', u'ڀ'),
+ (0xFB5E, 'M', u'ٺ'),
+ (0xFB62, 'M', u'ٿ'),
+ (0xFB66, 'M', u'ٹ'),
+ (0xFB6A, 'M', u'ڤ'),
+ (0xFB6E, 'M', u'ڦ'),
+ (0xFB72, 'M', u'ڄ'),
+ (0xFB76, 'M', u'ڃ'),
+ (0xFB7A, 'M', u'چ'),
+ (0xFB7E, 'M', u'ڇ'),
+ (0xFB82, 'M', u'ڍ'),
+ (0xFB84, 'M', u'ڌ'),
+ (0xFB86, 'M', u'ڎ'),
+ (0xFB88, 'M', u'ڈ'),
+ (0xFB8A, 'M', u'ژ'),
+ (0xFB8C, 'M', u'ڑ'),
+ (0xFB8E, 'M', u'ک'),
+ (0xFB92, 'M', u'گ'),
+ (0xFB96, 'M', u'ڳ'),
+ (0xFB9A, 'M', u'ڱ'),
+ (0xFB9E, 'M', u'ں'),
+ (0xFBA0, 'M', u'ڻ'),
+ (0xFBA4, 'M', u'ۀ'),
+ (0xFBA6, 'M', u'ہ'),
+ (0xFBAA, 'M', u'ھ'),
+ (0xFBAE, 'M', u'ے'),
+ (0xFBB0, 'M', u'ۓ'),
+ (0xFBB2, 'V'),
+ (0xFBC2, 'X'),
+ (0xFBD3, 'M', u'ڭ'),
+ (0xFBD7, 'M', u'ۇ'),
+ (0xFBD9, 'M', u'ۆ'),
+ (0xFBDB, 'M', u'ۈ'),
+ (0xFBDD, 'M', u'ۇٴ'),
+ (0xFBDE, 'M', u'ۋ'),
+ (0xFBE0, 'M', u'ۅ'),
+ (0xFBE2, 'M', u'ۉ'),
+ (0xFBE4, 'M', u'ې'),
+ (0xFBE8, 'M', u'ى'),
+ (0xFBEA, 'M', u'ئا'),
+ (0xFBEC, 'M', u'ئە'),
+ (0xFBEE, 'M', u'ئو'),
+ (0xFBF0, 'M', u'ئۇ'),
+ (0xFBF2, 'M', u'ئۆ'),
+ ]
+
+def _seg_45():
+ return [
+ (0xFBF4, 'M', u'ئۈ'),
+ (0xFBF6, 'M', u'ئې'),
+ (0xFBF9, 'M', u'ئى'),
+ (0xFBFC, 'M', u'ی'),
+ (0xFC00, 'M', u'ئج'),
+ (0xFC01, 'M', u'ئح'),
+ (0xFC02, 'M', u'ئم'),
+ (0xFC03, 'M', u'ئى'),
+ (0xFC04, 'M', u'ئي'),
+ (0xFC05, 'M', u'بج'),
+ (0xFC06, 'M', u'بح'),
+ (0xFC07, 'M', u'بخ'),
+ (0xFC08, 'M', u'بم'),
+ (0xFC09, 'M', u'بى'),
+ (0xFC0A, 'M', u'بي'),
+ (0xFC0B, 'M', u'تج'),
+ (0xFC0C, 'M', u'تح'),
+ (0xFC0D, 'M', u'تخ'),
+ (0xFC0E, 'M', u'تم'),
+ (0xFC0F, 'M', u'تى'),
+ (0xFC10, 'M', u'تي'),
+ (0xFC11, 'M', u'ثج'),
+ (0xFC12, 'M', u'ثم'),
+ (0xFC13, 'M', u'ثى'),
+ (0xFC14, 'M', u'ثي'),
+ (0xFC15, 'M', u'جح'),
+ (0xFC16, 'M', u'جم'),
+ (0xFC17, 'M', u'حج'),
+ (0xFC18, 'M', u'حم'),
+ (0xFC19, 'M', u'خج'),
+ (0xFC1A, 'M', u'خح'),
+ (0xFC1B, 'M', u'خم'),
+ (0xFC1C, 'M', u'سج'),
+ (0xFC1D, 'M', u'سح'),
+ (0xFC1E, 'M', u'سخ'),
+ (0xFC1F, 'M', u'سم'),
+ (0xFC20, 'M', u'صح'),
+ (0xFC21, 'M', u'صم'),
+ (0xFC22, 'M', u'ضج'),
+ (0xFC23, 'M', u'ضح'),
+ (0xFC24, 'M', u'ضخ'),
+ (0xFC25, 'M', u'ضم'),
+ (0xFC26, 'M', u'طح'),
+ (0xFC27, 'M', u'طم'),
+ (0xFC28, 'M', u'ظم'),
+ (0xFC29, 'M', u'عج'),
+ (0xFC2A, 'M', u'عم'),
+ (0xFC2B, 'M', u'غج'),
+ (0xFC2C, 'M', u'غم'),
+ (0xFC2D, 'M', u'فج'),
+ (0xFC2E, 'M', u'فح'),
+ (0xFC2F, 'M', u'فخ'),
+ (0xFC30, 'M', u'فم'),
+ (0xFC31, 'M', u'فى'),
+ (0xFC32, 'M', u'في'),
+ (0xFC33, 'M', u'قح'),
+ (0xFC34, 'M', u'قم'),
+ (0xFC35, 'M', u'قى'),
+ (0xFC36, 'M', u'قي'),
+ (0xFC37, 'M', u'كا'),
+ (0xFC38, 'M', u'كج'),
+ (0xFC39, 'M', u'كح'),
+ (0xFC3A, 'M', u'كخ'),
+ (0xFC3B, 'M', u'كل'),
+ (0xFC3C, 'M', u'كم'),
+ (0xFC3D, 'M', u'كى'),
+ (0xFC3E, 'M', u'كي'),
+ (0xFC3F, 'M', u'لج'),
+ (0xFC40, 'M', u'لح'),
+ (0xFC41, 'M', u'لخ'),
+ (0xFC42, 'M', u'لم'),
+ (0xFC43, 'M', u'لى'),
+ (0xFC44, 'M', u'لي'),
+ (0xFC45, 'M', u'مج'),
+ (0xFC46, 'M', u'مح'),
+ (0xFC47, 'M', u'مخ'),
+ (0xFC48, 'M', u'مم'),
+ (0xFC49, 'M', u'مى'),
+ (0xFC4A, 'M', u'مي'),
+ (0xFC4B, 'M', u'نج'),
+ (0xFC4C, 'M', u'نح'),
+ (0xFC4D, 'M', u'نخ'),
+ (0xFC4E, 'M', u'نم'),
+ (0xFC4F, 'M', u'نى'),
+ (0xFC50, 'M', u'ني'),
+ (0xFC51, 'M', u'هج'),
+ (0xFC52, 'M', u'هم'),
+ (0xFC53, 'M', u'هى'),
+ (0xFC54, 'M', u'هي'),
+ (0xFC55, 'M', u'يج'),
+ (0xFC56, 'M', u'يح'),
+ (0xFC57, 'M', u'يخ'),
+ (0xFC58, 'M', u'يم'),
+ (0xFC59, 'M', u'يى'),
+ (0xFC5A, 'M', u'يي'),
+ (0xFC5B, 'M', u'ذٰ'),
+ (0xFC5C, 'M', u'رٰ'),
+ (0xFC5D, 'M', u'ىٰ'),
+ (0xFC5E, '3', u' ٌّ'),
+ (0xFC5F, '3', u' ٍّ'),
+ ]
+
+def _seg_46():
+ return [
+ (0xFC60, '3', u' َّ'),
+ (0xFC61, '3', u' ُّ'),
+ (0xFC62, '3', u' ِّ'),
+ (0xFC63, '3', u' ّٰ'),
+ (0xFC64, 'M', u'ئر'),
+ (0xFC65, 'M', u'ئز'),
+ (0xFC66, 'M', u'ئم'),
+ (0xFC67, 'M', u'ئن'),
+ (0xFC68, 'M', u'ئى'),
+ (0xFC69, 'M', u'ئي'),
+ (0xFC6A, 'M', u'بر'),
+ (0xFC6B, 'M', u'بز'),
+ (0xFC6C, 'M', u'بم'),
+ (0xFC6D, 'M', u'بن'),
+ (0xFC6E, 'M', u'بى'),
+ (0xFC6F, 'M', u'بي'),
+ (0xFC70, 'M', u'تر'),
+ (0xFC71, 'M', u'تز'),
+ (0xFC72, 'M', u'تم'),
+ (0xFC73, 'M', u'تن'),
+ (0xFC74, 'M', u'تى'),
+ (0xFC75, 'M', u'تي'),
+ (0xFC76, 'M', u'ثر'),
+ (0xFC77, 'M', u'ثز'),
+ (0xFC78, 'M', u'ثم'),
+ (0xFC79, 'M', u'ثن'),
+ (0xFC7A, 'M', u'ثى'),
+ (0xFC7B, 'M', u'ثي'),
+ (0xFC7C, 'M', u'فى'),
+ (0xFC7D, 'M', u'في'),
+ (0xFC7E, 'M', u'قى'),
+ (0xFC7F, 'M', u'قي'),
+ (0xFC80, 'M', u'كا'),
+ (0xFC81, 'M', u'كل'),
+ (0xFC82, 'M', u'كم'),
+ (0xFC83, 'M', u'كى'),
+ (0xFC84, 'M', u'كي'),
+ (0xFC85, 'M', u'لم'),
+ (0xFC86, 'M', u'لى'),
+ (0xFC87, 'M', u'لي'),
+ (0xFC88, 'M', u'ما'),
+ (0xFC89, 'M', u'مم'),
+ (0xFC8A, 'M', u'نر'),
+ (0xFC8B, 'M', u'نز'),
+ (0xFC8C, 'M', u'نم'),
+ (0xFC8D, 'M', u'نن'),
+ (0xFC8E, 'M', u'نى'),
+ (0xFC8F, 'M', u'ني'),
+ (0xFC90, 'M', u'ىٰ'),
+ (0xFC91, 'M', u'ير'),
+ (0xFC92, 'M', u'يز'),
+ (0xFC93, 'M', u'يم'),
+ (0xFC94, 'M', u'ين'),
+ (0xFC95, 'M', u'يى'),
+ (0xFC96, 'M', u'يي'),
+ (0xFC97, 'M', u'ئج'),
+ (0xFC98, 'M', u'ئح'),
+ (0xFC99, 'M', u'ئخ'),
+ (0xFC9A, 'M', u'ئم'),
+ (0xFC9B, 'M', u'ئه'),
+ (0xFC9C, 'M', u'بج'),
+ (0xFC9D, 'M', u'بح'),
+ (0xFC9E, 'M', u'بخ'),
+ (0xFC9F, 'M', u'بم'),
+ (0xFCA0, 'M', u'به'),
+ (0xFCA1, 'M', u'تج'),
+ (0xFCA2, 'M', u'تح'),
+ (0xFCA3, 'M', u'تخ'),
+ (0xFCA4, 'M', u'تم'),
+ (0xFCA5, 'M', u'ته'),
+ (0xFCA6, 'M', u'ثم'),
+ (0xFCA7, 'M', u'جح'),
+ (0xFCA8, 'M', u'جم'),
+ (0xFCA9, 'M', u'حج'),
+ (0xFCAA, 'M', u'حم'),
+ (0xFCAB, 'M', u'خج'),
+ (0xFCAC, 'M', u'خم'),
+ (0xFCAD, 'M', u'سج'),
+ (0xFCAE, 'M', u'سح'),
+ (0xFCAF, 'M', u'سخ'),
+ (0xFCB0, 'M', u'سم'),
+ (0xFCB1, 'M', u'صح'),
+ (0xFCB2, 'M', u'صخ'),
+ (0xFCB3, 'M', u'صم'),
+ (0xFCB4, 'M', u'ضج'),
+ (0xFCB5, 'M', u'ضح'),
+ (0xFCB6, 'M', u'ضخ'),
+ (0xFCB7, 'M', u'ضم'),
+ (0xFCB8, 'M', u'طح'),
+ (0xFCB9, 'M', u'ظم'),
+ (0xFCBA, 'M', u'عج'),
+ (0xFCBB, 'M', u'عم'),
+ (0xFCBC, 'M', u'غج'),
+ (0xFCBD, 'M', u'غم'),
+ (0xFCBE, 'M', u'فج'),
+ (0xFCBF, 'M', u'فح'),
+ (0xFCC0, 'M', u'فخ'),
+ (0xFCC1, 'M', u'فم'),
+ (0xFCC2, 'M', u'قح'),
+ (0xFCC3, 'M', u'قم'),
+ ]
+
+def _seg_47():
+ return [
+ (0xFCC4, 'M', u'كج'),
+ (0xFCC5, 'M', u'كح'),
+ (0xFCC6, 'M', u'كخ'),
+ (0xFCC7, 'M', u'كل'),
+ (0xFCC8, 'M', u'كم'),
+ (0xFCC9, 'M', u'لج'),
+ (0xFCCA, 'M', u'لح'),
+ (0xFCCB, 'M', u'لخ'),
+ (0xFCCC, 'M', u'لم'),
+ (0xFCCD, 'M', u'له'),
+ (0xFCCE, 'M', u'مج'),
+ (0xFCCF, 'M', u'مح'),
+ (0xFCD0, 'M', u'مخ'),
+ (0xFCD1, 'M', u'مم'),
+ (0xFCD2, 'M', u'نج'),
+ (0xFCD3, 'M', u'نح'),
+ (0xFCD4, 'M', u'نخ'),
+ (0xFCD5, 'M', u'نم'),
+ (0xFCD6, 'M', u'نه'),
+ (0xFCD7, 'M', u'هج'),
+ (0xFCD8, 'M', u'هم'),
+ (0xFCD9, 'M', u'هٰ'),
+ (0xFCDA, 'M', u'يج'),
+ (0xFCDB, 'M', u'يح'),
+ (0xFCDC, 'M', u'يخ'),
+ (0xFCDD, 'M', u'يم'),
+ (0xFCDE, 'M', u'يه'),
+ (0xFCDF, 'M', u'ئم'),
+ (0xFCE0, 'M', u'ئه'),
+ (0xFCE1, 'M', u'بم'),
+ (0xFCE2, 'M', u'به'),
+ (0xFCE3, 'M', u'تم'),
+ (0xFCE4, 'M', u'ته'),
+ (0xFCE5, 'M', u'ثم'),
+ (0xFCE6, 'M', u'ثه'),
+ (0xFCE7, 'M', u'سم'),
+ (0xFCE8, 'M', u'سه'),
+ (0xFCE9, 'M', u'شم'),
+ (0xFCEA, 'M', u'شه'),
+ (0xFCEB, 'M', u'كل'),
+ (0xFCEC, 'M', u'كم'),
+ (0xFCED, 'M', u'لم'),
+ (0xFCEE, 'M', u'نم'),
+ (0xFCEF, 'M', u'نه'),
+ (0xFCF0, 'M', u'يم'),
+ (0xFCF1, 'M', u'يه'),
+ (0xFCF2, 'M', u'ـَّ'),
+ (0xFCF3, 'M', u'ـُّ'),
+ (0xFCF4, 'M', u'ـِّ'),
+ (0xFCF5, 'M', u'طى'),
+ (0xFCF6, 'M', u'طي'),
+ (0xFCF7, 'M', u'عى'),
+ (0xFCF8, 'M', u'عي'),
+ (0xFCF9, 'M', u'غى'),
+ (0xFCFA, 'M', u'غي'),
+ (0xFCFB, 'M', u'سى'),
+ (0xFCFC, 'M', u'سي'),
+ (0xFCFD, 'M', u'شى'),
+ (0xFCFE, 'M', u'شي'),
+ (0xFCFF, 'M', u'حى'),
+ (0xFD00, 'M', u'حي'),
+ (0xFD01, 'M', u'جى'),
+ (0xFD02, 'M', u'جي'),
+ (0xFD03, 'M', u'خى'),
+ (0xFD04, 'M', u'خي'),
+ (0xFD05, 'M', u'صى'),
+ (0xFD06, 'M', u'صي'),
+ (0xFD07, 'M', u'ضى'),
+ (0xFD08, 'M', u'ضي'),
+ (0xFD09, 'M', u'شج'),
+ (0xFD0A, 'M', u'شح'),
+ (0xFD0B, 'M', u'شخ'),
+ (0xFD0C, 'M', u'شم'),
+ (0xFD0D, 'M', u'شر'),
+ (0xFD0E, 'M', u'سر'),
+ (0xFD0F, 'M', u'صر'),
+ (0xFD10, 'M', u'ضر'),
+ (0xFD11, 'M', u'طى'),
+ (0xFD12, 'M', u'طي'),
+ (0xFD13, 'M', u'عى'),
+ (0xFD14, 'M', u'عي'),
+ (0xFD15, 'M', u'غى'),
+ (0xFD16, 'M', u'غي'),
+ (0xFD17, 'M', u'سى'),
+ (0xFD18, 'M', u'سي'),
+ (0xFD19, 'M', u'شى'),
+ (0xFD1A, 'M', u'شي'),
+ (0xFD1B, 'M', u'حى'),
+ (0xFD1C, 'M', u'حي'),
+ (0xFD1D, 'M', u'جى'),
+ (0xFD1E, 'M', u'جي'),
+ (0xFD1F, 'M', u'خى'),
+ (0xFD20, 'M', u'خي'),
+ (0xFD21, 'M', u'صى'),
+ (0xFD22, 'M', u'صي'),
+ (0xFD23, 'M', u'ضى'),
+ (0xFD24, 'M', u'ضي'),
+ (0xFD25, 'M', u'شج'),
+ (0xFD26, 'M', u'شح'),
+ (0xFD27, 'M', u'شخ'),
+ ]
+
+def _seg_48():
+ return [
+ (0xFD28, 'M', u'شم'),
+ (0xFD29, 'M', u'شر'),
+ (0xFD2A, 'M', u'سر'),
+ (0xFD2B, 'M', u'صر'),
+ (0xFD2C, 'M', u'ضر'),
+ (0xFD2D, 'M', u'شج'),
+ (0xFD2E, 'M', u'شح'),
+ (0xFD2F, 'M', u'شخ'),
+ (0xFD30, 'M', u'شم'),
+ (0xFD31, 'M', u'سه'),
+ (0xFD32, 'M', u'شه'),
+ (0xFD33, 'M', u'طم'),
+ (0xFD34, 'M', u'سج'),
+ (0xFD35, 'M', u'سح'),
+ (0xFD36, 'M', u'سخ'),
+ (0xFD37, 'M', u'شج'),
+ (0xFD38, 'M', u'شح'),
+ (0xFD39, 'M', u'شخ'),
+ (0xFD3A, 'M', u'طم'),
+ (0xFD3B, 'M', u'ظم'),
+ (0xFD3C, 'M', u'اً'),
+ (0xFD3E, 'V'),
+ (0xFD40, 'X'),
+ (0xFD50, 'M', u'تجم'),
+ (0xFD51, 'M', u'تحج'),
+ (0xFD53, 'M', u'تحم'),
+ (0xFD54, 'M', u'تخم'),
+ (0xFD55, 'M', u'تمج'),
+ (0xFD56, 'M', u'تمح'),
+ (0xFD57, 'M', u'تمخ'),
+ (0xFD58, 'M', u'جمح'),
+ (0xFD5A, 'M', u'حمي'),
+ (0xFD5B, 'M', u'حمى'),
+ (0xFD5C, 'M', u'سحج'),
+ (0xFD5D, 'M', u'سجح'),
+ (0xFD5E, 'M', u'سجى'),
+ (0xFD5F, 'M', u'سمح'),
+ (0xFD61, 'M', u'سمج'),
+ (0xFD62, 'M', u'سمم'),
+ (0xFD64, 'M', u'صحح'),
+ (0xFD66, 'M', u'صمم'),
+ (0xFD67, 'M', u'شحم'),
+ (0xFD69, 'M', u'شجي'),
+ (0xFD6A, 'M', u'شمخ'),
+ (0xFD6C, 'M', u'شمم'),
+ (0xFD6E, 'M', u'ضحى'),
+ (0xFD6F, 'M', u'ضخم'),
+ (0xFD71, 'M', u'طمح'),
+ (0xFD73, 'M', u'طمم'),
+ (0xFD74, 'M', u'طمي'),
+ (0xFD75, 'M', u'عجم'),
+ (0xFD76, 'M', u'عمم'),
+ (0xFD78, 'M', u'عمى'),
+ (0xFD79, 'M', u'غمم'),
+ (0xFD7A, 'M', u'غمي'),
+ (0xFD7B, 'M', u'غمى'),
+ (0xFD7C, 'M', u'فخم'),
+ (0xFD7E, 'M', u'قمح'),
+ (0xFD7F, 'M', u'قمم'),
+ (0xFD80, 'M', u'لحم'),
+ (0xFD81, 'M', u'لحي'),
+ (0xFD82, 'M', u'لحى'),
+ (0xFD83, 'M', u'لجج'),
+ (0xFD85, 'M', u'لخم'),
+ (0xFD87, 'M', u'لمح'),
+ (0xFD89, 'M', u'محج'),
+ (0xFD8A, 'M', u'محم'),
+ (0xFD8B, 'M', u'محي'),
+ (0xFD8C, 'M', u'مجح'),
+ (0xFD8D, 'M', u'مجم'),
+ (0xFD8E, 'M', u'مخج'),
+ (0xFD8F, 'M', u'مخم'),
+ (0xFD90, 'X'),
+ (0xFD92, 'M', u'مجخ'),
+ (0xFD93, 'M', u'همج'),
+ (0xFD94, 'M', u'همم'),
+ (0xFD95, 'M', u'نحم'),
+ (0xFD96, 'M', u'نحى'),
+ (0xFD97, 'M', u'نجم'),
+ (0xFD99, 'M', u'نجى'),
+ (0xFD9A, 'M', u'نمي'),
+ (0xFD9B, 'M', u'نمى'),
+ (0xFD9C, 'M', u'يمم'),
+ (0xFD9E, 'M', u'بخي'),
+ (0xFD9F, 'M', u'تجي'),
+ (0xFDA0, 'M', u'تجى'),
+ (0xFDA1, 'M', u'تخي'),
+ (0xFDA2, 'M', u'تخى'),
+ (0xFDA3, 'M', u'تمي'),
+ (0xFDA4, 'M', u'تمى'),
+ (0xFDA5, 'M', u'جمي'),
+ (0xFDA6, 'M', u'جحى'),
+ (0xFDA7, 'M', u'جمى'),
+ (0xFDA8, 'M', u'سخى'),
+ (0xFDA9, 'M', u'صحي'),
+ (0xFDAA, 'M', u'شحي'),
+ (0xFDAB, 'M', u'ضحي'),
+ (0xFDAC, 'M', u'لجي'),
+ (0xFDAD, 'M', u'لمي'),
+ (0xFDAE, 'M', u'يحي'),
+ ]
+
+def _seg_49():
+ return [
+ (0xFDAF, 'M', u'يجي'),
+ (0xFDB0, 'M', u'يمي'),
+ (0xFDB1, 'M', u'ممي'),
+ (0xFDB2, 'M', u'قمي'),
+ (0xFDB3, 'M', u'نحي'),
+ (0xFDB4, 'M', u'قمح'),
+ (0xFDB5, 'M', u'لحم'),
+ (0xFDB6, 'M', u'عمي'),
+ (0xFDB7, 'M', u'كمي'),
+ (0xFDB8, 'M', u'نجح'),
+ (0xFDB9, 'M', u'مخي'),
+ (0xFDBA, 'M', u'لجم'),
+ (0xFDBB, 'M', u'كمم'),
+ (0xFDBC, 'M', u'لجم'),
+ (0xFDBD, 'M', u'نجح'),
+ (0xFDBE, 'M', u'جحي'),
+ (0xFDBF, 'M', u'حجي'),
+ (0xFDC0, 'M', u'مجي'),
+ (0xFDC1, 'M', u'فمي'),
+ (0xFDC2, 'M', u'بحي'),
+ (0xFDC3, 'M', u'كمم'),
+ (0xFDC4, 'M', u'عجم'),
+ (0xFDC5, 'M', u'صمم'),
+ (0xFDC6, 'M', u'سخي'),
+ (0xFDC7, 'M', u'نجي'),
+ (0xFDC8, 'X'),
+ (0xFDF0, 'M', u'صلے'),
+ (0xFDF1, 'M', u'قلے'),
+ (0xFDF2, 'M', u'الله'),
+ (0xFDF3, 'M', u'اكبر'),
+ (0xFDF4, 'M', u'محمد'),
+ (0xFDF5, 'M', u'صلعم'),
+ (0xFDF6, 'M', u'رسول'),
+ (0xFDF7, 'M', u'عليه'),
+ (0xFDF8, 'M', u'وسلم'),
+ (0xFDF9, 'M', u'صلى'),
+ (0xFDFA, '3', u'صلى الله عليه وسلم'),
+ (0xFDFB, '3', u'جل جلاله'),
+ (0xFDFC, 'M', u'ریال'),
+ (0xFDFD, 'V'),
+ (0xFDFE, 'X'),
+ (0xFE00, 'I'),
+ (0xFE10, '3', u','),
+ (0xFE11, 'M', u'、'),
+ (0xFE12, 'X'),
+ (0xFE13, '3', u':'),
+ (0xFE14, '3', u';'),
+ (0xFE15, '3', u'!'),
+ (0xFE16, '3', u'?'),
+ (0xFE17, 'M', u'〖'),
+ (0xFE18, 'M', u'〗'),
+ (0xFE19, 'X'),
+ (0xFE20, 'V'),
+ (0xFE30, 'X'),
+ (0xFE31, 'M', u'—'),
+ (0xFE32, 'M', u'–'),
+ (0xFE33, '3', u'_'),
+ (0xFE35, '3', u'('),
+ (0xFE36, '3', u')'),
+ (0xFE37, '3', u'{'),
+ (0xFE38, '3', u'}'),
+ (0xFE39, 'M', u'〔'),
+ (0xFE3A, 'M', u'〕'),
+ (0xFE3B, 'M', u'【'),
+ (0xFE3C, 'M', u'】'),
+ (0xFE3D, 'M', u'《'),
+ (0xFE3E, 'M', u'》'),
+ (0xFE3F, 'M', u'〈'),
+ (0xFE40, 'M', u'〉'),
+ (0xFE41, 'M', u'「'),
+ (0xFE42, 'M', u'」'),
+ (0xFE43, 'M', u'『'),
+ (0xFE44, 'M', u'』'),
+ (0xFE45, 'V'),
+ (0xFE47, '3', u'['),
+ (0xFE48, '3', u']'),
+ (0xFE49, '3', u' ̅'),
+ (0xFE4D, '3', u'_'),
+ (0xFE50, '3', u','),
+ (0xFE51, 'M', u'、'),
+ (0xFE52, 'X'),
+ (0xFE54, '3', u';'),
+ (0xFE55, '3', u':'),
+ (0xFE56, '3', u'?'),
+ (0xFE57, '3', u'!'),
+ (0xFE58, 'M', u'—'),
+ (0xFE59, '3', u'('),
+ (0xFE5A, '3', u')'),
+ (0xFE5B, '3', u'{'),
+ (0xFE5C, '3', u'}'),
+ (0xFE5D, 'M', u'〔'),
+ (0xFE5E, 'M', u'〕'),
+ (0xFE5F, '3', u'#'),
+ (0xFE60, '3', u'&'),
+ (0xFE61, '3', u'*'),
+ (0xFE62, '3', u'+'),
+ (0xFE63, 'M', u'-'),
+ (0xFE64, '3', u'<'),
+ (0xFE65, '3', u'>'),
+ (0xFE66, '3', u'='),
+ ]
+
+def _seg_50():
+ return [
+ (0xFE67, 'X'),
+ (0xFE68, '3', u'\\'),
+ (0xFE69, '3', u'$'),
+ (0xFE6A, '3', u'%'),
+ (0xFE6B, '3', u'@'),
+ (0xFE6C, 'X'),
+ (0xFE70, '3', u' ً'),
+ (0xFE71, 'M', u'ـً'),
+ (0xFE72, '3', u' ٌ'),
+ (0xFE73, 'V'),
+ (0xFE74, '3', u' ٍ'),
+ (0xFE75, 'X'),
+ (0xFE76, '3', u' َ'),
+ (0xFE77, 'M', u'ـَ'),
+ (0xFE78, '3', u' ُ'),
+ (0xFE79, 'M', u'ـُ'),
+ (0xFE7A, '3', u' ِ'),
+ (0xFE7B, 'M', u'ـِ'),
+ (0xFE7C, '3', u' ّ'),
+ (0xFE7D, 'M', u'ـّ'),
+ (0xFE7E, '3', u' ْ'),
+ (0xFE7F, 'M', u'ـْ'),
+ (0xFE80, 'M', u'ء'),
+ (0xFE81, 'M', u'آ'),
+ (0xFE83, 'M', u'أ'),
+ (0xFE85, 'M', u'ؤ'),
+ (0xFE87, 'M', u'إ'),
+ (0xFE89, 'M', u'ئ'),
+ (0xFE8D, 'M', u'ا'),
+ (0xFE8F, 'M', u'ب'),
+ (0xFE93, 'M', u'ة'),
+ (0xFE95, 'M', u'ت'),
+ (0xFE99, 'M', u'ث'),
+ (0xFE9D, 'M', u'ج'),
+ (0xFEA1, 'M', u'ح'),
+ (0xFEA5, 'M', u'خ'),
+ (0xFEA9, 'M', u'د'),
+ (0xFEAB, 'M', u'ذ'),
+ (0xFEAD, 'M', u'ر'),
+ (0xFEAF, 'M', u'ز'),
+ (0xFEB1, 'M', u'س'),
+ (0xFEB5, 'M', u'ش'),
+ (0xFEB9, 'M', u'ص'),
+ (0xFEBD, 'M', u'ض'),
+ (0xFEC1, 'M', u'ط'),
+ (0xFEC5, 'M', u'ظ'),
+ (0xFEC9, 'M', u'ع'),
+ (0xFECD, 'M', u'غ'),
+ (0xFED1, 'M', u'ف'),
+ (0xFED5, 'M', u'ق'),
+ (0xFED9, 'M', u'ك'),
+ (0xFEDD, 'M', u'ل'),
+ (0xFEE1, 'M', u'م'),
+ (0xFEE5, 'M', u'ن'),
+ (0xFEE9, 'M', u'ه'),
+ (0xFEED, 'M', u'و'),
+ (0xFEEF, 'M', u'ى'),
+ (0xFEF1, 'M', u'ي'),
+ (0xFEF5, 'M', u'لآ'),
+ (0xFEF7, 'M', u'لأ'),
+ (0xFEF9, 'M', u'لإ'),
+ (0xFEFB, 'M', u'لا'),
+ (0xFEFD, 'X'),
+ (0xFEFF, 'I'),
+ (0xFF00, 'X'),
+ (0xFF01, '3', u'!'),
+ (0xFF02, '3', u'"'),
+ (0xFF03, '3', u'#'),
+ (0xFF04, '3', u'$'),
+ (0xFF05, '3', u'%'),
+ (0xFF06, '3', u'&'),
+ (0xFF07, '3', u'\''),
+ (0xFF08, '3', u'('),
+ (0xFF09, '3', u')'),
+ (0xFF0A, '3', u'*'),
+ (0xFF0B, '3', u'+'),
+ (0xFF0C, '3', u','),
+ (0xFF0D, 'M', u'-'),
+ (0xFF0E, 'M', u'.'),
+ (0xFF0F, '3', u'/'),
+ (0xFF10, 'M', u'0'),
+ (0xFF11, 'M', u'1'),
+ (0xFF12, 'M', u'2'),
+ (0xFF13, 'M', u'3'),
+ (0xFF14, 'M', u'4'),
+ (0xFF15, 'M', u'5'),
+ (0xFF16, 'M', u'6'),
+ (0xFF17, 'M', u'7'),
+ (0xFF18, 'M', u'8'),
+ (0xFF19, 'M', u'9'),
+ (0xFF1A, '3', u':'),
+ (0xFF1B, '3', u';'),
+ (0xFF1C, '3', u'<'),
+ (0xFF1D, '3', u'='),
+ (0xFF1E, '3', u'>'),
+ (0xFF1F, '3', u'?'),
+ (0xFF20, '3', u'@'),
+ (0xFF21, 'M', u'a'),
+ (0xFF22, 'M', u'b'),
+ (0xFF23, 'M', u'c'),
+ ]
+
+def _seg_51():
+ return [
+ (0xFF24, 'M', u'd'),
+ (0xFF25, 'M', u'e'),
+ (0xFF26, 'M', u'f'),
+ (0xFF27, 'M', u'g'),
+ (0xFF28, 'M', u'h'),
+ (0xFF29, 'M', u'i'),
+ (0xFF2A, 'M', u'j'),
+ (0xFF2B, 'M', u'k'),
+ (0xFF2C, 'M', u'l'),
+ (0xFF2D, 'M', u'm'),
+ (0xFF2E, 'M', u'n'),
+ (0xFF2F, 'M', u'o'),
+ (0xFF30, 'M', u'p'),
+ (0xFF31, 'M', u'q'),
+ (0xFF32, 'M', u'r'),
+ (0xFF33, 'M', u's'),
+ (0xFF34, 'M', u't'),
+ (0xFF35, 'M', u'u'),
+ (0xFF36, 'M', u'v'),
+ (0xFF37, 'M', u'w'),
+ (0xFF38, 'M', u'x'),
+ (0xFF39, 'M', u'y'),
+ (0xFF3A, 'M', u'z'),
+ (0xFF3B, '3', u'['),
+ (0xFF3C, '3', u'\\'),
+ (0xFF3D, '3', u']'),
+ (0xFF3E, '3', u'^'),
+ (0xFF3F, '3', u'_'),
+ (0xFF40, '3', u'`'),
+ (0xFF41, 'M', u'a'),
+ (0xFF42, 'M', u'b'),
+ (0xFF43, 'M', u'c'),
+ (0xFF44, 'M', u'd'),
+ (0xFF45, 'M', u'e'),
+ (0xFF46, 'M', u'f'),
+ (0xFF47, 'M', u'g'),
+ (0xFF48, 'M', u'h'),
+ (0xFF49, 'M', u'i'),
+ (0xFF4A, 'M', u'j'),
+ (0xFF4B, 'M', u'k'),
+ (0xFF4C, 'M', u'l'),
+ (0xFF4D, 'M', u'm'),
+ (0xFF4E, 'M', u'n'),
+ (0xFF4F, 'M', u'o'),
+ (0xFF50, 'M', u'p'),
+ (0xFF51, 'M', u'q'),
+ (0xFF52, 'M', u'r'),
+ (0xFF53, 'M', u's'),
+ (0xFF54, 'M', u't'),
+ (0xFF55, 'M', u'u'),
+ (0xFF56, 'M', u'v'),
+ (0xFF57, 'M', u'w'),
+ (0xFF58, 'M', u'x'),
+ (0xFF59, 'M', u'y'),
+ (0xFF5A, 'M', u'z'),
+ (0xFF5B, '3', u'{'),
+ (0xFF5C, '3', u'|'),
+ (0xFF5D, '3', u'}'),
+ (0xFF5E, '3', u'~'),
+ (0xFF5F, 'M', u'⦅'),
+ (0xFF60, 'M', u'⦆'),
+ (0xFF61, 'M', u'.'),
+ (0xFF62, 'M', u'「'),
+ (0xFF63, 'M', u'」'),
+ (0xFF64, 'M', u'、'),
+ (0xFF65, 'M', u'・'),
+ (0xFF66, 'M', u'ヲ'),
+ (0xFF67, 'M', u'ァ'),
+ (0xFF68, 'M', u'ィ'),
+ (0xFF69, 'M', u'ゥ'),
+ (0xFF6A, 'M', u'ェ'),
+ (0xFF6B, 'M', u'ォ'),
+ (0xFF6C, 'M', u'ャ'),
+ (0xFF6D, 'M', u'ュ'),
+ (0xFF6E, 'M', u'ョ'),
+ (0xFF6F, 'M', u'ッ'),
+ (0xFF70, 'M', u'ー'),
+ (0xFF71, 'M', u'ア'),
+ (0xFF72, 'M', u'イ'),
+ (0xFF73, 'M', u'ウ'),
+ (0xFF74, 'M', u'エ'),
+ (0xFF75, 'M', u'オ'),
+ (0xFF76, 'M', u'カ'),
+ (0xFF77, 'M', u'キ'),
+ (0xFF78, 'M', u'ク'),
+ (0xFF79, 'M', u'ケ'),
+ (0xFF7A, 'M', u'コ'),
+ (0xFF7B, 'M', u'サ'),
+ (0xFF7C, 'M', u'シ'),
+ (0xFF7D, 'M', u'ス'),
+ (0xFF7E, 'M', u'セ'),
+ (0xFF7F, 'M', u'ソ'),
+ (0xFF80, 'M', u'タ'),
+ (0xFF81, 'M', u'チ'),
+ (0xFF82, 'M', u'ツ'),
+ (0xFF83, 'M', u'テ'),
+ (0xFF84, 'M', u'ト'),
+ (0xFF85, 'M', u'ナ'),
+ (0xFF86, 'M', u'ニ'),
+ (0xFF87, 'M', u'ヌ'),
+ ]
+
+def _seg_52():
+ return [
+ (0xFF88, 'M', u'ネ'),
+ (0xFF89, 'M', u'ノ'),
+ (0xFF8A, 'M', u'ハ'),
+ (0xFF8B, 'M', u'ヒ'),
+ (0xFF8C, 'M', u'フ'),
+ (0xFF8D, 'M', u'ヘ'),
+ (0xFF8E, 'M', u'ホ'),
+ (0xFF8F, 'M', u'マ'),
+ (0xFF90, 'M', u'ミ'),
+ (0xFF91, 'M', u'ム'),
+ (0xFF92, 'M', u'メ'),
+ (0xFF93, 'M', u'モ'),
+ (0xFF94, 'M', u'ヤ'),
+ (0xFF95, 'M', u'ユ'),
+ (0xFF96, 'M', u'ヨ'),
+ (0xFF97, 'M', u'ラ'),
+ (0xFF98, 'M', u'リ'),
+ (0xFF99, 'M', u'ル'),
+ (0xFF9A, 'M', u'レ'),
+ (0xFF9B, 'M', u'ロ'),
+ (0xFF9C, 'M', u'ワ'),
+ (0xFF9D, 'M', u'ン'),
+ (0xFF9E, 'M', u'゙'),
+ (0xFF9F, 'M', u'゚'),
+ (0xFFA0, 'X'),
+ (0xFFA1, 'M', u'ᄀ'),
+ (0xFFA2, 'M', u'ᄁ'),
+ (0xFFA3, 'M', u'ᆪ'),
+ (0xFFA4, 'M', u'ᄂ'),
+ (0xFFA5, 'M', u'ᆬ'),
+ (0xFFA6, 'M', u'ᆭ'),
+ (0xFFA7, 'M', u'ᄃ'),
+ (0xFFA8, 'M', u'ᄄ'),
+ (0xFFA9, 'M', u'ᄅ'),
+ (0xFFAA, 'M', u'ᆰ'),
+ (0xFFAB, 'M', u'ᆱ'),
+ (0xFFAC, 'M', u'ᆲ'),
+ (0xFFAD, 'M', u'ᆳ'),
+ (0xFFAE, 'M', u'ᆴ'),
+ (0xFFAF, 'M', u'ᆵ'),
+ (0xFFB0, 'M', u'ᄚ'),
+ (0xFFB1, 'M', u'ᄆ'),
+ (0xFFB2, 'M', u'ᄇ'),
+ (0xFFB3, 'M', u'ᄈ'),
+ (0xFFB4, 'M', u'ᄡ'),
+ (0xFFB5, 'M', u'ᄉ'),
+ (0xFFB6, 'M', u'ᄊ'),
+ (0xFFB7, 'M', u'ᄋ'),
+ (0xFFB8, 'M', u'ᄌ'),
+ (0xFFB9, 'M', u'ᄍ'),
+ (0xFFBA, 'M', u'ᄎ'),
+ (0xFFBB, 'M', u'ᄏ'),
+ (0xFFBC, 'M', u'ᄐ'),
+ (0xFFBD, 'M', u'ᄑ'),
+ (0xFFBE, 'M', u'ᄒ'),
+ (0xFFBF, 'X'),
+ (0xFFC2, 'M', u'ᅡ'),
+ (0xFFC3, 'M', u'ᅢ'),
+ (0xFFC4, 'M', u'ᅣ'),
+ (0xFFC5, 'M', u'ᅤ'),
+ (0xFFC6, 'M', u'ᅥ'),
+ (0xFFC7, 'M', u'ᅦ'),
+ (0xFFC8, 'X'),
+ (0xFFCA, 'M', u'ᅧ'),
+ (0xFFCB, 'M', u'ᅨ'),
+ (0xFFCC, 'M', u'ᅩ'),
+ (0xFFCD, 'M', u'ᅪ'),
+ (0xFFCE, 'M', u'ᅫ'),
+ (0xFFCF, 'M', u'ᅬ'),
+ (0xFFD0, 'X'),
+ (0xFFD2, 'M', u'ᅭ'),
+ (0xFFD3, 'M', u'ᅮ'),
+ (0xFFD4, 'M', u'ᅯ'),
+ (0xFFD5, 'M', u'ᅰ'),
+ (0xFFD6, 'M', u'ᅱ'),
+ (0xFFD7, 'M', u'ᅲ'),
+ (0xFFD8, 'X'),
+ (0xFFDA, 'M', u'ᅳ'),
+ (0xFFDB, 'M', u'ᅴ'),
+ (0xFFDC, 'M', u'ᅵ'),
+ (0xFFDD, 'X'),
+ (0xFFE0, 'M', u'¢'),
+ (0xFFE1, 'M', u'£'),
+ (0xFFE2, 'M', u'¬'),
+ (0xFFE3, '3', u' ̄'),
+ (0xFFE4, 'M', u'¦'),
+ (0xFFE5, 'M', u'¥'),
+ (0xFFE6, 'M', u'₩'),
+ (0xFFE7, 'X'),
+ (0xFFE8, 'M', u'│'),
+ (0xFFE9, 'M', u'←'),
+ (0xFFEA, 'M', u'↑'),
+ (0xFFEB, 'M', u'→'),
+ (0xFFEC, 'M', u'↓'),
+ (0xFFED, 'M', u'■'),
+ (0xFFEE, 'M', u'○'),
+ (0xFFEF, 'X'),
+ (0x10000, 'V'),
+ (0x1000C, 'X'),
+ (0x1000D, 'V'),
+ ]
+
+def _seg_53():
+ return [
+ (0x10027, 'X'),
+ (0x10028, 'V'),
+ (0x1003B, 'X'),
+ (0x1003C, 'V'),
+ (0x1003E, 'X'),
+ (0x1003F, 'V'),
+ (0x1004E, 'X'),
+ (0x10050, 'V'),
+ (0x1005E, 'X'),
+ (0x10080, 'V'),
+ (0x100FB, 'X'),
+ (0x10100, 'V'),
+ (0x10103, 'X'),
+ (0x10107, 'V'),
+ (0x10134, 'X'),
+ (0x10137, 'V'),
+ (0x1018F, 'X'),
+ (0x10190, 'V'),
+ (0x1019D, 'X'),
+ (0x101A0, 'V'),
+ (0x101A1, 'X'),
+ (0x101D0, 'V'),
+ (0x101FE, 'X'),
+ (0x10280, 'V'),
+ (0x1029D, 'X'),
+ (0x102A0, 'V'),
+ (0x102D1, 'X'),
+ (0x102E0, 'V'),
+ (0x102FC, 'X'),
+ (0x10300, 'V'),
+ (0x10324, 'X'),
+ (0x1032D, 'V'),
+ (0x1034B, 'X'),
+ (0x10350, 'V'),
+ (0x1037B, 'X'),
+ (0x10380, 'V'),
+ (0x1039E, 'X'),
+ (0x1039F, 'V'),
+ (0x103C4, 'X'),
+ (0x103C8, 'V'),
+ (0x103D6, 'X'),
+ (0x10400, 'M', u'𐐨'),
+ (0x10401, 'M', u'𐐩'),
+ (0x10402, 'M', u'𐐪'),
+ (0x10403, 'M', u'𐐫'),
+ (0x10404, 'M', u'𐐬'),
+ (0x10405, 'M', u'𐐭'),
+ (0x10406, 'M', u'𐐮'),
+ (0x10407, 'M', u'𐐯'),
+ (0x10408, 'M', u'𐐰'),
+ (0x10409, 'M', u'𐐱'),
+ (0x1040A, 'M', u'𐐲'),
+ (0x1040B, 'M', u'𐐳'),
+ (0x1040C, 'M', u'𐐴'),
+ (0x1040D, 'M', u'𐐵'),
+ (0x1040E, 'M', u'𐐶'),
+ (0x1040F, 'M', u'𐐷'),
+ (0x10410, 'M', u'𐐸'),
+ (0x10411, 'M', u'𐐹'),
+ (0x10412, 'M', u'𐐺'),
+ (0x10413, 'M', u'𐐻'),
+ (0x10414, 'M', u'𐐼'),
+ (0x10415, 'M', u'𐐽'),
+ (0x10416, 'M', u'𐐾'),
+ (0x10417, 'M', u'𐐿'),
+ (0x10418, 'M', u'𐑀'),
+ (0x10419, 'M', u'𐑁'),
+ (0x1041A, 'M', u'𐑂'),
+ (0x1041B, 'M', u'𐑃'),
+ (0x1041C, 'M', u'𐑄'),
+ (0x1041D, 'M', u'𐑅'),
+ (0x1041E, 'M', u'𐑆'),
+ (0x1041F, 'M', u'𐑇'),
+ (0x10420, 'M', u'𐑈'),
+ (0x10421, 'M', u'𐑉'),
+ (0x10422, 'M', u'𐑊'),
+ (0x10423, 'M', u'𐑋'),
+ (0x10424, 'M', u'𐑌'),
+ (0x10425, 'M', u'𐑍'),
+ (0x10426, 'M', u'𐑎'),
+ (0x10427, 'M', u'𐑏'),
+ (0x10428, 'V'),
+ (0x1049E, 'X'),
+ (0x104A0, 'V'),
+ (0x104AA, 'X'),
+ (0x104B0, 'M', u'𐓘'),
+ (0x104B1, 'M', u'𐓙'),
+ (0x104B2, 'M', u'𐓚'),
+ (0x104B3, 'M', u'𐓛'),
+ (0x104B4, 'M', u'𐓜'),
+ (0x104B5, 'M', u'𐓝'),
+ (0x104B6, 'M', u'𐓞'),
+ (0x104B7, 'M', u'𐓟'),
+ (0x104B8, 'M', u'𐓠'),
+ (0x104B9, 'M', u'𐓡'),
+ (0x104BA, 'M', u'𐓢'),
+ (0x104BB, 'M', u'𐓣'),
+ (0x104BC, 'M', u'𐓤'),
+ (0x104BD, 'M', u'𐓥'),
+ (0x104BE, 'M', u'𐓦'),
+ ]
+
+def _seg_54():
+ return [
+ (0x104BF, 'M', u'𐓧'),
+ (0x104C0, 'M', u'𐓨'),
+ (0x104C1, 'M', u'𐓩'),
+ (0x104C2, 'M', u'𐓪'),
+ (0x104C3, 'M', u'𐓫'),
+ (0x104C4, 'M', u'𐓬'),
+ (0x104C5, 'M', u'𐓭'),
+ (0x104C6, 'M', u'𐓮'),
+ (0x104C7, 'M', u'𐓯'),
+ (0x104C8, 'M', u'𐓰'),
+ (0x104C9, 'M', u'𐓱'),
+ (0x104CA, 'M', u'𐓲'),
+ (0x104CB, 'M', u'𐓳'),
+ (0x104CC, 'M', u'𐓴'),
+ (0x104CD, 'M', u'𐓵'),
+ (0x104CE, 'M', u'𐓶'),
+ (0x104CF, 'M', u'𐓷'),
+ (0x104D0, 'M', u'𐓸'),
+ (0x104D1, 'M', u'𐓹'),
+ (0x104D2, 'M', u'𐓺'),
+ (0x104D3, 'M', u'𐓻'),
+ (0x104D4, 'X'),
+ (0x104D8, 'V'),
+ (0x104FC, 'X'),
+ (0x10500, 'V'),
+ (0x10528, 'X'),
+ (0x10530, 'V'),
+ (0x10564, 'X'),
+ (0x1056F, 'V'),
+ (0x10570, 'X'),
+ (0x10600, 'V'),
+ (0x10737, 'X'),
+ (0x10740, 'V'),
+ (0x10756, 'X'),
+ (0x10760, 'V'),
+ (0x10768, 'X'),
+ (0x10800, 'V'),
+ (0x10806, 'X'),
+ (0x10808, 'V'),
+ (0x10809, 'X'),
+ (0x1080A, 'V'),
+ (0x10836, 'X'),
+ (0x10837, 'V'),
+ (0x10839, 'X'),
+ (0x1083C, 'V'),
+ (0x1083D, 'X'),
+ (0x1083F, 'V'),
+ (0x10856, 'X'),
+ (0x10857, 'V'),
+ (0x1089F, 'X'),
+ (0x108A7, 'V'),
+ (0x108B0, 'X'),
+ (0x108E0, 'V'),
+ (0x108F3, 'X'),
+ (0x108F4, 'V'),
+ (0x108F6, 'X'),
+ (0x108FB, 'V'),
+ (0x1091C, 'X'),
+ (0x1091F, 'V'),
+ (0x1093A, 'X'),
+ (0x1093F, 'V'),
+ (0x10940, 'X'),
+ (0x10980, 'V'),
+ (0x109B8, 'X'),
+ (0x109BC, 'V'),
+ (0x109D0, 'X'),
+ (0x109D2, 'V'),
+ (0x10A04, 'X'),
+ (0x10A05, 'V'),
+ (0x10A07, 'X'),
+ (0x10A0C, 'V'),
+ (0x10A14, 'X'),
+ (0x10A15, 'V'),
+ (0x10A18, 'X'),
+ (0x10A19, 'V'),
+ (0x10A36, 'X'),
+ (0x10A38, 'V'),
+ (0x10A3B, 'X'),
+ (0x10A3F, 'V'),
+ (0x10A49, 'X'),
+ (0x10A50, 'V'),
+ (0x10A59, 'X'),
+ (0x10A60, 'V'),
+ (0x10AA0, 'X'),
+ (0x10AC0, 'V'),
+ (0x10AE7, 'X'),
+ (0x10AEB, 'V'),
+ (0x10AF7, 'X'),
+ (0x10B00, 'V'),
+ (0x10B36, 'X'),
+ (0x10B39, 'V'),
+ (0x10B56, 'X'),
+ (0x10B58, 'V'),
+ (0x10B73, 'X'),
+ (0x10B78, 'V'),
+ (0x10B92, 'X'),
+ (0x10B99, 'V'),
+ (0x10B9D, 'X'),
+ (0x10BA9, 'V'),
+ (0x10BB0, 'X'),
+ ]
+
+def _seg_55():
+ return [
+ (0x10C00, 'V'),
+ (0x10C49, 'X'),
+ (0x10C80, 'M', u'𐳀'),
+ (0x10C81, 'M', u'𐳁'),
+ (0x10C82, 'M', u'𐳂'),
+ (0x10C83, 'M', u'𐳃'),
+ (0x10C84, 'M', u'𐳄'),
+ (0x10C85, 'M', u'𐳅'),
+ (0x10C86, 'M', u'𐳆'),
+ (0x10C87, 'M', u'𐳇'),
+ (0x10C88, 'M', u'𐳈'),
+ (0x10C89, 'M', u'𐳉'),
+ (0x10C8A, 'M', u'𐳊'),
+ (0x10C8B, 'M', u'𐳋'),
+ (0x10C8C, 'M', u'𐳌'),
+ (0x10C8D, 'M', u'𐳍'),
+ (0x10C8E, 'M', u'𐳎'),
+ (0x10C8F, 'M', u'𐳏'),
+ (0x10C90, 'M', u'𐳐'),
+ (0x10C91, 'M', u'𐳑'),
+ (0x10C92, 'M', u'𐳒'),
+ (0x10C93, 'M', u'𐳓'),
+ (0x10C94, 'M', u'𐳔'),
+ (0x10C95, 'M', u'𐳕'),
+ (0x10C96, 'M', u'𐳖'),
+ (0x10C97, 'M', u'𐳗'),
+ (0x10C98, 'M', u'𐳘'),
+ (0x10C99, 'M', u'𐳙'),
+ (0x10C9A, 'M', u'𐳚'),
+ (0x10C9B, 'M', u'𐳛'),
+ (0x10C9C, 'M', u'𐳜'),
+ (0x10C9D, 'M', u'𐳝'),
+ (0x10C9E, 'M', u'𐳞'),
+ (0x10C9F, 'M', u'𐳟'),
+ (0x10CA0, 'M', u'𐳠'),
+ (0x10CA1, 'M', u'𐳡'),
+ (0x10CA2, 'M', u'𐳢'),
+ (0x10CA3, 'M', u'𐳣'),
+ (0x10CA4, 'M', u'𐳤'),
+ (0x10CA5, 'M', u'𐳥'),
+ (0x10CA6, 'M', u'𐳦'),
+ (0x10CA7, 'M', u'𐳧'),
+ (0x10CA8, 'M', u'𐳨'),
+ (0x10CA9, 'M', u'𐳩'),
+ (0x10CAA, 'M', u'𐳪'),
+ (0x10CAB, 'M', u'𐳫'),
+ (0x10CAC, 'M', u'𐳬'),
+ (0x10CAD, 'M', u'𐳭'),
+ (0x10CAE, 'M', u'𐳮'),
+ (0x10CAF, 'M', u'𐳯'),
+ (0x10CB0, 'M', u'𐳰'),
+ (0x10CB1, 'M', u'𐳱'),
+ (0x10CB2, 'M', u'𐳲'),
+ (0x10CB3, 'X'),
+ (0x10CC0, 'V'),
+ (0x10CF3, 'X'),
+ (0x10CFA, 'V'),
+ (0x10D28, 'X'),
+ (0x10D30, 'V'),
+ (0x10D3A, 'X'),
+ (0x10E60, 'V'),
+ (0x10E7F, 'X'),
+ (0x10E80, 'V'),
+ (0x10EAA, 'X'),
+ (0x10EAB, 'V'),
+ (0x10EAE, 'X'),
+ (0x10EB0, 'V'),
+ (0x10EB2, 'X'),
+ (0x10F00, 'V'),
+ (0x10F28, 'X'),
+ (0x10F30, 'V'),
+ (0x10F5A, 'X'),
+ (0x10FB0, 'V'),
+ (0x10FCC, 'X'),
+ (0x10FE0, 'V'),
+ (0x10FF7, 'X'),
+ (0x11000, 'V'),
+ (0x1104E, 'X'),
+ (0x11052, 'V'),
+ (0x11070, 'X'),
+ (0x1107F, 'V'),
+ (0x110BD, 'X'),
+ (0x110BE, 'V'),
+ (0x110C2, 'X'),
+ (0x110D0, 'V'),
+ (0x110E9, 'X'),
+ (0x110F0, 'V'),
+ (0x110FA, 'X'),
+ (0x11100, 'V'),
+ (0x11135, 'X'),
+ (0x11136, 'V'),
+ (0x11148, 'X'),
+ (0x11150, 'V'),
+ (0x11177, 'X'),
+ (0x11180, 'V'),
+ (0x111E0, 'X'),
+ (0x111E1, 'V'),
+ (0x111F5, 'X'),
+ (0x11200, 'V'),
+ (0x11212, 'X'),
+ ]
+
+def _seg_56():
+ return [
+ (0x11213, 'V'),
+ (0x1123F, 'X'),
+ (0x11280, 'V'),
+ (0x11287, 'X'),
+ (0x11288, 'V'),
+ (0x11289, 'X'),
+ (0x1128A, 'V'),
+ (0x1128E, 'X'),
+ (0x1128F, 'V'),
+ (0x1129E, 'X'),
+ (0x1129F, 'V'),
+ (0x112AA, 'X'),
+ (0x112B0, 'V'),
+ (0x112EB, 'X'),
+ (0x112F0, 'V'),
+ (0x112FA, 'X'),
+ (0x11300, 'V'),
+ (0x11304, 'X'),
+ (0x11305, 'V'),
+ (0x1130D, 'X'),
+ (0x1130F, 'V'),
+ (0x11311, 'X'),
+ (0x11313, 'V'),
+ (0x11329, 'X'),
+ (0x1132A, 'V'),
+ (0x11331, 'X'),
+ (0x11332, 'V'),
+ (0x11334, 'X'),
+ (0x11335, 'V'),
+ (0x1133A, 'X'),
+ (0x1133B, 'V'),
+ (0x11345, 'X'),
+ (0x11347, 'V'),
+ (0x11349, 'X'),
+ (0x1134B, 'V'),
+ (0x1134E, 'X'),
+ (0x11350, 'V'),
+ (0x11351, 'X'),
+ (0x11357, 'V'),
+ (0x11358, 'X'),
+ (0x1135D, 'V'),
+ (0x11364, 'X'),
+ (0x11366, 'V'),
+ (0x1136D, 'X'),
+ (0x11370, 'V'),
+ (0x11375, 'X'),
+ (0x11400, 'V'),
+ (0x1145C, 'X'),
+ (0x1145D, 'V'),
+ (0x11462, 'X'),
+ (0x11480, 'V'),
+ (0x114C8, 'X'),
+ (0x114D0, 'V'),
+ (0x114DA, 'X'),
+ (0x11580, 'V'),
+ (0x115B6, 'X'),
+ (0x115B8, 'V'),
+ (0x115DE, 'X'),
+ (0x11600, 'V'),
+ (0x11645, 'X'),
+ (0x11650, 'V'),
+ (0x1165A, 'X'),
+ (0x11660, 'V'),
+ (0x1166D, 'X'),
+ (0x11680, 'V'),
+ (0x116B9, 'X'),
+ (0x116C0, 'V'),
+ (0x116CA, 'X'),
+ (0x11700, 'V'),
+ (0x1171B, 'X'),
+ (0x1171D, 'V'),
+ (0x1172C, 'X'),
+ (0x11730, 'V'),
+ (0x11740, 'X'),
+ (0x11800, 'V'),
+ (0x1183C, 'X'),
+ (0x118A0, 'M', u'𑣀'),
+ (0x118A1, 'M', u'𑣁'),
+ (0x118A2, 'M', u'𑣂'),
+ (0x118A3, 'M', u'𑣃'),
+ (0x118A4, 'M', u'𑣄'),
+ (0x118A5, 'M', u'𑣅'),
+ (0x118A6, 'M', u'𑣆'),
+ (0x118A7, 'M', u'𑣇'),
+ (0x118A8, 'M', u'𑣈'),
+ (0x118A9, 'M', u'𑣉'),
+ (0x118AA, 'M', u'𑣊'),
+ (0x118AB, 'M', u'𑣋'),
+ (0x118AC, 'M', u'𑣌'),
+ (0x118AD, 'M', u'𑣍'),
+ (0x118AE, 'M', u'𑣎'),
+ (0x118AF, 'M', u'𑣏'),
+ (0x118B0, 'M', u'𑣐'),
+ (0x118B1, 'M', u'𑣑'),
+ (0x118B2, 'M', u'𑣒'),
+ (0x118B3, 'M', u'𑣓'),
+ (0x118B4, 'M', u'𑣔'),
+ (0x118B5, 'M', u'𑣕'),
+ (0x118B6, 'M', u'𑣖'),
+ (0x118B7, 'M', u'𑣗'),
+ ]
+
+def _seg_57():
+ return [
+ (0x118B8, 'M', u'𑣘'),
+ (0x118B9, 'M', u'𑣙'),
+ (0x118BA, 'M', u'𑣚'),
+ (0x118BB, 'M', u'𑣛'),
+ (0x118BC, 'M', u'𑣜'),
+ (0x118BD, 'M', u'𑣝'),
+ (0x118BE, 'M', u'𑣞'),
+ (0x118BF, 'M', u'𑣟'),
+ (0x118C0, 'V'),
+ (0x118F3, 'X'),
+ (0x118FF, 'V'),
+ (0x11907, 'X'),
+ (0x11909, 'V'),
+ (0x1190A, 'X'),
+ (0x1190C, 'V'),
+ (0x11914, 'X'),
+ (0x11915, 'V'),
+ (0x11917, 'X'),
+ (0x11918, 'V'),
+ (0x11936, 'X'),
+ (0x11937, 'V'),
+ (0x11939, 'X'),
+ (0x1193B, 'V'),
+ (0x11947, 'X'),
+ (0x11950, 'V'),
+ (0x1195A, 'X'),
+ (0x119A0, 'V'),
+ (0x119A8, 'X'),
+ (0x119AA, 'V'),
+ (0x119D8, 'X'),
+ (0x119DA, 'V'),
+ (0x119E5, 'X'),
+ (0x11A00, 'V'),
+ (0x11A48, 'X'),
+ (0x11A50, 'V'),
+ (0x11AA3, 'X'),
+ (0x11AC0, 'V'),
+ (0x11AF9, 'X'),
+ (0x11C00, 'V'),
+ (0x11C09, 'X'),
+ (0x11C0A, 'V'),
+ (0x11C37, 'X'),
+ (0x11C38, 'V'),
+ (0x11C46, 'X'),
+ (0x11C50, 'V'),
+ (0x11C6D, 'X'),
+ (0x11C70, 'V'),
+ (0x11C90, 'X'),
+ (0x11C92, 'V'),
+ (0x11CA8, 'X'),
+ (0x11CA9, 'V'),
+ (0x11CB7, 'X'),
+ (0x11D00, 'V'),
+ (0x11D07, 'X'),
+ (0x11D08, 'V'),
+ (0x11D0A, 'X'),
+ (0x11D0B, 'V'),
+ (0x11D37, 'X'),
+ (0x11D3A, 'V'),
+ (0x11D3B, 'X'),
+ (0x11D3C, 'V'),
+ (0x11D3E, 'X'),
+ (0x11D3F, 'V'),
+ (0x11D48, 'X'),
+ (0x11D50, 'V'),
+ (0x11D5A, 'X'),
+ (0x11D60, 'V'),
+ (0x11D66, 'X'),
+ (0x11D67, 'V'),
+ (0x11D69, 'X'),
+ (0x11D6A, 'V'),
+ (0x11D8F, 'X'),
+ (0x11D90, 'V'),
+ (0x11D92, 'X'),
+ (0x11D93, 'V'),
+ (0x11D99, 'X'),
+ (0x11DA0, 'V'),
+ (0x11DAA, 'X'),
+ (0x11EE0, 'V'),
+ (0x11EF9, 'X'),
+ (0x11FB0, 'V'),
+ (0x11FB1, 'X'),
+ (0x11FC0, 'V'),
+ (0x11FF2, 'X'),
+ (0x11FFF, 'V'),
+ (0x1239A, 'X'),
+ (0x12400, 'V'),
+ (0x1246F, 'X'),
+ (0x12470, 'V'),
+ (0x12475, 'X'),
+ (0x12480, 'V'),
+ (0x12544, 'X'),
+ (0x13000, 'V'),
+ (0x1342F, 'X'),
+ (0x14400, 'V'),
+ (0x14647, 'X'),
+ (0x16800, 'V'),
+ (0x16A39, 'X'),
+ (0x16A40, 'V'),
+ (0x16A5F, 'X'),
+ ]
+
+def _seg_58():
+ return [
+ (0x16A60, 'V'),
+ (0x16A6A, 'X'),
+ (0x16A6E, 'V'),
+ (0x16A70, 'X'),
+ (0x16AD0, 'V'),
+ (0x16AEE, 'X'),
+ (0x16AF0, 'V'),
+ (0x16AF6, 'X'),
+ (0x16B00, 'V'),
+ (0x16B46, 'X'),
+ (0x16B50, 'V'),
+ (0x16B5A, 'X'),
+ (0x16B5B, 'V'),
+ (0x16B62, 'X'),
+ (0x16B63, 'V'),
+ (0x16B78, 'X'),
+ (0x16B7D, 'V'),
+ (0x16B90, 'X'),
+ (0x16E40, 'M', u'𖹠'),
+ (0x16E41, 'M', u'𖹡'),
+ (0x16E42, 'M', u'𖹢'),
+ (0x16E43, 'M', u'𖹣'),
+ (0x16E44, 'M', u'𖹤'),
+ (0x16E45, 'M', u'𖹥'),
+ (0x16E46, 'M', u'𖹦'),
+ (0x16E47, 'M', u'𖹧'),
+ (0x16E48, 'M', u'𖹨'),
+ (0x16E49, 'M', u'𖹩'),
+ (0x16E4A, 'M', u'𖹪'),
+ (0x16E4B, 'M', u'𖹫'),
+ (0x16E4C, 'M', u'𖹬'),
+ (0x16E4D, 'M', u'𖹭'),
+ (0x16E4E, 'M', u'𖹮'),
+ (0x16E4F, 'M', u'𖹯'),
+ (0x16E50, 'M', u'𖹰'),
+ (0x16E51, 'M', u'𖹱'),
+ (0x16E52, 'M', u'𖹲'),
+ (0x16E53, 'M', u'𖹳'),
+ (0x16E54, 'M', u'𖹴'),
+ (0x16E55, 'M', u'𖹵'),
+ (0x16E56, 'M', u'𖹶'),
+ (0x16E57, 'M', u'𖹷'),
+ (0x16E58, 'M', u'𖹸'),
+ (0x16E59, 'M', u'𖹹'),
+ (0x16E5A, 'M', u'𖹺'),
+ (0x16E5B, 'M', u'𖹻'),
+ (0x16E5C, 'M', u'𖹼'),
+ (0x16E5D, 'M', u'𖹽'),
+ (0x16E5E, 'M', u'𖹾'),
+ (0x16E5F, 'M', u'𖹿'),
+ (0x16E60, 'V'),
+ (0x16E9B, 'X'),
+ (0x16F00, 'V'),
+ (0x16F4B, 'X'),
+ (0x16F4F, 'V'),
+ (0x16F88, 'X'),
+ (0x16F8F, 'V'),
+ (0x16FA0, 'X'),
+ (0x16FE0, 'V'),
+ (0x16FE5, 'X'),
+ (0x16FF0, 'V'),
+ (0x16FF2, 'X'),
+ (0x17000, 'V'),
+ (0x187F8, 'X'),
+ (0x18800, 'V'),
+ (0x18CD6, 'X'),
+ (0x18D00, 'V'),
+ (0x18D09, 'X'),
+ (0x1B000, 'V'),
+ (0x1B11F, 'X'),
+ (0x1B150, 'V'),
+ (0x1B153, 'X'),
+ (0x1B164, 'V'),
+ (0x1B168, 'X'),
+ (0x1B170, 'V'),
+ (0x1B2FC, 'X'),
+ (0x1BC00, 'V'),
+ (0x1BC6B, 'X'),
+ (0x1BC70, 'V'),
+ (0x1BC7D, 'X'),
+ (0x1BC80, 'V'),
+ (0x1BC89, 'X'),
+ (0x1BC90, 'V'),
+ (0x1BC9A, 'X'),
+ (0x1BC9C, 'V'),
+ (0x1BCA0, 'I'),
+ (0x1BCA4, 'X'),
+ (0x1D000, 'V'),
+ (0x1D0F6, 'X'),
+ (0x1D100, 'V'),
+ (0x1D127, 'X'),
+ (0x1D129, 'V'),
+ (0x1D15E, 'M', u'𝅗𝅥'),
+ (0x1D15F, 'M', u'𝅘𝅥'),
+ (0x1D160, 'M', u'𝅘𝅥𝅮'),
+ (0x1D161, 'M', u'𝅘𝅥𝅯'),
+ (0x1D162, 'M', u'𝅘𝅥𝅰'),
+ (0x1D163, 'M', u'𝅘𝅥𝅱'),
+ (0x1D164, 'M', u'𝅘𝅥𝅲'),
+ (0x1D165, 'V'),
+ ]
+
+def _seg_59():
+ return [
+ (0x1D173, 'X'),
+ (0x1D17B, 'V'),
+ (0x1D1BB, 'M', u'𝆹𝅥'),
+ (0x1D1BC, 'M', u'𝆺𝅥'),
+ (0x1D1BD, 'M', u'𝆹𝅥𝅮'),
+ (0x1D1BE, 'M', u'𝆺𝅥𝅮'),
+ (0x1D1BF, 'M', u'𝆹𝅥𝅯'),
+ (0x1D1C0, 'M', u'𝆺𝅥𝅯'),
+ (0x1D1C1, 'V'),
+ (0x1D1E9, 'X'),
+ (0x1D200, 'V'),
+ (0x1D246, 'X'),
+ (0x1D2E0, 'V'),
+ (0x1D2F4, 'X'),
+ (0x1D300, 'V'),
+ (0x1D357, 'X'),
+ (0x1D360, 'V'),
+ (0x1D379, 'X'),
+ (0x1D400, 'M', u'a'),
+ (0x1D401, 'M', u'b'),
+ (0x1D402, 'M', u'c'),
+ (0x1D403, 'M', u'd'),
+ (0x1D404, 'M', u'e'),
+ (0x1D405, 'M', u'f'),
+ (0x1D406, 'M', u'g'),
+ (0x1D407, 'M', u'h'),
+ (0x1D408, 'M', u'i'),
+ (0x1D409, 'M', u'j'),
+ (0x1D40A, 'M', u'k'),
+ (0x1D40B, 'M', u'l'),
+ (0x1D40C, 'M', u'm'),
+ (0x1D40D, 'M', u'n'),
+ (0x1D40E, 'M', u'o'),
+ (0x1D40F, 'M', u'p'),
+ (0x1D410, 'M', u'q'),
+ (0x1D411, 'M', u'r'),
+ (0x1D412, 'M', u's'),
+ (0x1D413, 'M', u't'),
+ (0x1D414, 'M', u'u'),
+ (0x1D415, 'M', u'v'),
+ (0x1D416, 'M', u'w'),
+ (0x1D417, 'M', u'x'),
+ (0x1D418, 'M', u'y'),
+ (0x1D419, 'M', u'z'),
+ (0x1D41A, 'M', u'a'),
+ (0x1D41B, 'M', u'b'),
+ (0x1D41C, 'M', u'c'),
+ (0x1D41D, 'M', u'd'),
+ (0x1D41E, 'M', u'e'),
+ (0x1D41F, 'M', u'f'),
+ (0x1D420, 'M', u'g'),
+ (0x1D421, 'M', u'h'),
+ (0x1D422, 'M', u'i'),
+ (0x1D423, 'M', u'j'),
+ (0x1D424, 'M', u'k'),
+ (0x1D425, 'M', u'l'),
+ (0x1D426, 'M', u'm'),
+ (0x1D427, 'M', u'n'),
+ (0x1D428, 'M', u'o'),
+ (0x1D429, 'M', u'p'),
+ (0x1D42A, 'M', u'q'),
+ (0x1D42B, 'M', u'r'),
+ (0x1D42C, 'M', u's'),
+ (0x1D42D, 'M', u't'),
+ (0x1D42E, 'M', u'u'),
+ (0x1D42F, 'M', u'v'),
+ (0x1D430, 'M', u'w'),
+ (0x1D431, 'M', u'x'),
+ (0x1D432, 'M', u'y'),
+ (0x1D433, 'M', u'z'),
+ (0x1D434, 'M', u'a'),
+ (0x1D435, 'M', u'b'),
+ (0x1D436, 'M', u'c'),
+ (0x1D437, 'M', u'd'),
+ (0x1D438, 'M', u'e'),
+ (0x1D439, 'M', u'f'),
+ (0x1D43A, 'M', u'g'),
+ (0x1D43B, 'M', u'h'),
+ (0x1D43C, 'M', u'i'),
+ (0x1D43D, 'M', u'j'),
+ (0x1D43E, 'M', u'k'),
+ (0x1D43F, 'M', u'l'),
+ (0x1D440, 'M', u'm'),
+ (0x1D441, 'M', u'n'),
+ (0x1D442, 'M', u'o'),
+ (0x1D443, 'M', u'p'),
+ (0x1D444, 'M', u'q'),
+ (0x1D445, 'M', u'r'),
+ (0x1D446, 'M', u's'),
+ (0x1D447, 'M', u't'),
+ (0x1D448, 'M', u'u'),
+ (0x1D449, 'M', u'v'),
+ (0x1D44A, 'M', u'w'),
+ (0x1D44B, 'M', u'x'),
+ (0x1D44C, 'M', u'y'),
+ (0x1D44D, 'M', u'z'),
+ (0x1D44E, 'M', u'a'),
+ (0x1D44F, 'M', u'b'),
+ (0x1D450, 'M', u'c'),
+ (0x1D451, 'M', u'd'),
+ ]
+
+def _seg_60():
+ return [
+ (0x1D452, 'M', u'e'),
+ (0x1D453, 'M', u'f'),
+ (0x1D454, 'M', u'g'),
+ (0x1D455, 'X'),
+ (0x1D456, 'M', u'i'),
+ (0x1D457, 'M', u'j'),
+ (0x1D458, 'M', u'k'),
+ (0x1D459, 'M', u'l'),
+ (0x1D45A, 'M', u'm'),
+ (0x1D45B, 'M', u'n'),
+ (0x1D45C, 'M', u'o'),
+ (0x1D45D, 'M', u'p'),
+ (0x1D45E, 'M', u'q'),
+ (0x1D45F, 'M', u'r'),
+ (0x1D460, 'M', u's'),
+ (0x1D461, 'M', u't'),
+ (0x1D462, 'M', u'u'),
+ (0x1D463, 'M', u'v'),
+ (0x1D464, 'M', u'w'),
+ (0x1D465, 'M', u'x'),
+ (0x1D466, 'M', u'y'),
+ (0x1D467, 'M', u'z'),
+ (0x1D468, 'M', u'a'),
+ (0x1D469, 'M', u'b'),
+ (0x1D46A, 'M', u'c'),
+ (0x1D46B, 'M', u'd'),
+ (0x1D46C, 'M', u'e'),
+ (0x1D46D, 'M', u'f'),
+ (0x1D46E, 'M', u'g'),
+ (0x1D46F, 'M', u'h'),
+ (0x1D470, 'M', u'i'),
+ (0x1D471, 'M', u'j'),
+ (0x1D472, 'M', u'k'),
+ (0x1D473, 'M', u'l'),
+ (0x1D474, 'M', u'm'),
+ (0x1D475, 'M', u'n'),
+ (0x1D476, 'M', u'o'),
+ (0x1D477, 'M', u'p'),
+ (0x1D478, 'M', u'q'),
+ (0x1D479, 'M', u'r'),
+ (0x1D47A, 'M', u's'),
+ (0x1D47B, 'M', u't'),
+ (0x1D47C, 'M', u'u'),
+ (0x1D47D, 'M', u'v'),
+ (0x1D47E, 'M', u'w'),
+ (0x1D47F, 'M', u'x'),
+ (0x1D480, 'M', u'y'),
+ (0x1D481, 'M', u'z'),
+ (0x1D482, 'M', u'a'),
+ (0x1D483, 'M', u'b'),
+ (0x1D484, 'M', u'c'),
+ (0x1D485, 'M', u'd'),
+ (0x1D486, 'M', u'e'),
+ (0x1D487, 'M', u'f'),
+ (0x1D488, 'M', u'g'),
+ (0x1D489, 'M', u'h'),
+ (0x1D48A, 'M', u'i'),
+ (0x1D48B, 'M', u'j'),
+ (0x1D48C, 'M', u'k'),
+ (0x1D48D, 'M', u'l'),
+ (0x1D48E, 'M', u'm'),
+ (0x1D48F, 'M', u'n'),
+ (0x1D490, 'M', u'o'),
+ (0x1D491, 'M', u'p'),
+ (0x1D492, 'M', u'q'),
+ (0x1D493, 'M', u'r'),
+ (0x1D494, 'M', u's'),
+ (0x1D495, 'M', u't'),
+ (0x1D496, 'M', u'u'),
+ (0x1D497, 'M', u'v'),
+ (0x1D498, 'M', u'w'),
+ (0x1D499, 'M', u'x'),
+ (0x1D49A, 'M', u'y'),
+ (0x1D49B, 'M', u'z'),
+ (0x1D49C, 'M', u'a'),
+ (0x1D49D, 'X'),
+ (0x1D49E, 'M', u'c'),
+ (0x1D49F, 'M', u'd'),
+ (0x1D4A0, 'X'),
+ (0x1D4A2, 'M', u'g'),
+ (0x1D4A3, 'X'),
+ (0x1D4A5, 'M', u'j'),
+ (0x1D4A6, 'M', u'k'),
+ (0x1D4A7, 'X'),
+ (0x1D4A9, 'M', u'n'),
+ (0x1D4AA, 'M', u'o'),
+ (0x1D4AB, 'M', u'p'),
+ (0x1D4AC, 'M', u'q'),
+ (0x1D4AD, 'X'),
+ (0x1D4AE, 'M', u's'),
+ (0x1D4AF, 'M', u't'),
+ (0x1D4B0, 'M', u'u'),
+ (0x1D4B1, 'M', u'v'),
+ (0x1D4B2, 'M', u'w'),
+ (0x1D4B3, 'M', u'x'),
+ (0x1D4B4, 'M', u'y'),
+ (0x1D4B5, 'M', u'z'),
+ (0x1D4B6, 'M', u'a'),
+ (0x1D4B7, 'M', u'b'),
+ (0x1D4B8, 'M', u'c'),
+ ]
+
+def _seg_61():
+ return [
+ (0x1D4B9, 'M', u'd'),
+ (0x1D4BA, 'X'),
+ (0x1D4BB, 'M', u'f'),
+ (0x1D4BC, 'X'),
+ (0x1D4BD, 'M', u'h'),
+ (0x1D4BE, 'M', u'i'),
+ (0x1D4BF, 'M', u'j'),
+ (0x1D4C0, 'M', u'k'),
+ (0x1D4C1, 'M', u'l'),
+ (0x1D4C2, 'M', u'm'),
+ (0x1D4C3, 'M', u'n'),
+ (0x1D4C4, 'X'),
+ (0x1D4C5, 'M', u'p'),
+ (0x1D4C6, 'M', u'q'),
+ (0x1D4C7, 'M', u'r'),
+ (0x1D4C8, 'M', u's'),
+ (0x1D4C9, 'M', u't'),
+ (0x1D4CA, 'M', u'u'),
+ (0x1D4CB, 'M', u'v'),
+ (0x1D4CC, 'M', u'w'),
+ (0x1D4CD, 'M', u'x'),
+ (0x1D4CE, 'M', u'y'),
+ (0x1D4CF, 'M', u'z'),
+ (0x1D4D0, 'M', u'a'),
+ (0x1D4D1, 'M', u'b'),
+ (0x1D4D2, 'M', u'c'),
+ (0x1D4D3, 'M', u'd'),
+ (0x1D4D4, 'M', u'e'),
+ (0x1D4D5, 'M', u'f'),
+ (0x1D4D6, 'M', u'g'),
+ (0x1D4D7, 'M', u'h'),
+ (0x1D4D8, 'M', u'i'),
+ (0x1D4D9, 'M', u'j'),
+ (0x1D4DA, 'M', u'k'),
+ (0x1D4DB, 'M', u'l'),
+ (0x1D4DC, 'M', u'm'),
+ (0x1D4DD, 'M', u'n'),
+ (0x1D4DE, 'M', u'o'),
+ (0x1D4DF, 'M', u'p'),
+ (0x1D4E0, 'M', u'q'),
+ (0x1D4E1, 'M', u'r'),
+ (0x1D4E2, 'M', u's'),
+ (0x1D4E3, 'M', u't'),
+ (0x1D4E4, 'M', u'u'),
+ (0x1D4E5, 'M', u'v'),
+ (0x1D4E6, 'M', u'w'),
+ (0x1D4E7, 'M', u'x'),
+ (0x1D4E8, 'M', u'y'),
+ (0x1D4E9, 'M', u'z'),
+ (0x1D4EA, 'M', u'a'),
+ (0x1D4EB, 'M', u'b'),
+ (0x1D4EC, 'M', u'c'),
+ (0x1D4ED, 'M', u'd'),
+ (0x1D4EE, 'M', u'e'),
+ (0x1D4EF, 'M', u'f'),
+ (0x1D4F0, 'M', u'g'),
+ (0x1D4F1, 'M', u'h'),
+ (0x1D4F2, 'M', u'i'),
+ (0x1D4F3, 'M', u'j'),
+ (0x1D4F4, 'M', u'k'),
+ (0x1D4F5, 'M', u'l'),
+ (0x1D4F6, 'M', u'm'),
+ (0x1D4F7, 'M', u'n'),
+ (0x1D4F8, 'M', u'o'),
+ (0x1D4F9, 'M', u'p'),
+ (0x1D4FA, 'M', u'q'),
+ (0x1D4FB, 'M', u'r'),
+ (0x1D4FC, 'M', u's'),
+ (0x1D4FD, 'M', u't'),
+ (0x1D4FE, 'M', u'u'),
+ (0x1D4FF, 'M', u'v'),
+ (0x1D500, 'M', u'w'),
+ (0x1D501, 'M', u'x'),
+ (0x1D502, 'M', u'y'),
+ (0x1D503, 'M', u'z'),
+ (0x1D504, 'M', u'a'),
+ (0x1D505, 'M', u'b'),
+ (0x1D506, 'X'),
+ (0x1D507, 'M', u'd'),
+ (0x1D508, 'M', u'e'),
+ (0x1D509, 'M', u'f'),
+ (0x1D50A, 'M', u'g'),
+ (0x1D50B, 'X'),
+ (0x1D50D, 'M', u'j'),
+ (0x1D50E, 'M', u'k'),
+ (0x1D50F, 'M', u'l'),
+ (0x1D510, 'M', u'm'),
+ (0x1D511, 'M', u'n'),
+ (0x1D512, 'M', u'o'),
+ (0x1D513, 'M', u'p'),
+ (0x1D514, 'M', u'q'),
+ (0x1D515, 'X'),
+ (0x1D516, 'M', u's'),
+ (0x1D517, 'M', u't'),
+ (0x1D518, 'M', u'u'),
+ (0x1D519, 'M', u'v'),
+ (0x1D51A, 'M', u'w'),
+ (0x1D51B, 'M', u'x'),
+ (0x1D51C, 'M', u'y'),
+ (0x1D51D, 'X'),
+ ]
+
+def _seg_62():
+ return [
+ (0x1D51E, 'M', u'a'),
+ (0x1D51F, 'M', u'b'),
+ (0x1D520, 'M', u'c'),
+ (0x1D521, 'M', u'd'),
+ (0x1D522, 'M', u'e'),
+ (0x1D523, 'M', u'f'),
+ (0x1D524, 'M', u'g'),
+ (0x1D525, 'M', u'h'),
+ (0x1D526, 'M', u'i'),
+ (0x1D527, 'M', u'j'),
+ (0x1D528, 'M', u'k'),
+ (0x1D529, 'M', u'l'),
+ (0x1D52A, 'M', u'm'),
+ (0x1D52B, 'M', u'n'),
+ (0x1D52C, 'M', u'o'),
+ (0x1D52D, 'M', u'p'),
+ (0x1D52E, 'M', u'q'),
+ (0x1D52F, 'M', u'r'),
+ (0x1D530, 'M', u's'),
+ (0x1D531, 'M', u't'),
+ (0x1D532, 'M', u'u'),
+ (0x1D533, 'M', u'v'),
+ (0x1D534, 'M', u'w'),
+ (0x1D535, 'M', u'x'),
+ (0x1D536, 'M', u'y'),
+ (0x1D537, 'M', u'z'),
+ (0x1D538, 'M', u'a'),
+ (0x1D539, 'M', u'b'),
+ (0x1D53A, 'X'),
+ (0x1D53B, 'M', u'd'),
+ (0x1D53C, 'M', u'e'),
+ (0x1D53D, 'M', u'f'),
+ (0x1D53E, 'M', u'g'),
+ (0x1D53F, 'X'),
+ (0x1D540, 'M', u'i'),
+ (0x1D541, 'M', u'j'),
+ (0x1D542, 'M', u'k'),
+ (0x1D543, 'M', u'l'),
+ (0x1D544, 'M', u'm'),
+ (0x1D545, 'X'),
+ (0x1D546, 'M', u'o'),
+ (0x1D547, 'X'),
+ (0x1D54A, 'M', u's'),
+ (0x1D54B, 'M', u't'),
+ (0x1D54C, 'M', u'u'),
+ (0x1D54D, 'M', u'v'),
+ (0x1D54E, 'M', u'w'),
+ (0x1D54F, 'M', u'x'),
+ (0x1D550, 'M', u'y'),
+ (0x1D551, 'X'),
+ (0x1D552, 'M', u'a'),
+ (0x1D553, 'M', u'b'),
+ (0x1D554, 'M', u'c'),
+ (0x1D555, 'M', u'd'),
+ (0x1D556, 'M', u'e'),
+ (0x1D557, 'M', u'f'),
+ (0x1D558, 'M', u'g'),
+ (0x1D559, 'M', u'h'),
+ (0x1D55A, 'M', u'i'),
+ (0x1D55B, 'M', u'j'),
+ (0x1D55C, 'M', u'k'),
+ (0x1D55D, 'M', u'l'),
+ (0x1D55E, 'M', u'm'),
+ (0x1D55F, 'M', u'n'),
+ (0x1D560, 'M', u'o'),
+ (0x1D561, 'M', u'p'),
+ (0x1D562, 'M', u'q'),
+ (0x1D563, 'M', u'r'),
+ (0x1D564, 'M', u's'),
+ (0x1D565, 'M', u't'),
+ (0x1D566, 'M', u'u'),
+ (0x1D567, 'M', u'v'),
+ (0x1D568, 'M', u'w'),
+ (0x1D569, 'M', u'x'),
+ (0x1D56A, 'M', u'y'),
+ (0x1D56B, 'M', u'z'),
+ (0x1D56C, 'M', u'a'),
+ (0x1D56D, 'M', u'b'),
+ (0x1D56E, 'M', u'c'),
+ (0x1D56F, 'M', u'd'),
+ (0x1D570, 'M', u'e'),
+ (0x1D571, 'M', u'f'),
+ (0x1D572, 'M', u'g'),
+ (0x1D573, 'M', u'h'),
+ (0x1D574, 'M', u'i'),
+ (0x1D575, 'M', u'j'),
+ (0x1D576, 'M', u'k'),
+ (0x1D577, 'M', u'l'),
+ (0x1D578, 'M', u'm'),
+ (0x1D579, 'M', u'n'),
+ (0x1D57A, 'M', u'o'),
+ (0x1D57B, 'M', u'p'),
+ (0x1D57C, 'M', u'q'),
+ (0x1D57D, 'M', u'r'),
+ (0x1D57E, 'M', u's'),
+ (0x1D57F, 'M', u't'),
+ (0x1D580, 'M', u'u'),
+ (0x1D581, 'M', u'v'),
+ (0x1D582, 'M', u'w'),
+ (0x1D583, 'M', u'x'),
+ ]
+
+def _seg_63():
+ return [
+ (0x1D584, 'M', u'y'),
+ (0x1D585, 'M', u'z'),
+ (0x1D586, 'M', u'a'),
+ (0x1D587, 'M', u'b'),
+ (0x1D588, 'M', u'c'),
+ (0x1D589, 'M', u'd'),
+ (0x1D58A, 'M', u'e'),
+ (0x1D58B, 'M', u'f'),
+ (0x1D58C, 'M', u'g'),
+ (0x1D58D, 'M', u'h'),
+ (0x1D58E, 'M', u'i'),
+ (0x1D58F, 'M', u'j'),
+ (0x1D590, 'M', u'k'),
+ (0x1D591, 'M', u'l'),
+ (0x1D592, 'M', u'm'),
+ (0x1D593, 'M', u'n'),
+ (0x1D594, 'M', u'o'),
+ (0x1D595, 'M', u'p'),
+ (0x1D596, 'M', u'q'),
+ (0x1D597, 'M', u'r'),
+ (0x1D598, 'M', u's'),
+ (0x1D599, 'M', u't'),
+ (0x1D59A, 'M', u'u'),
+ (0x1D59B, 'M', u'v'),
+ (0x1D59C, 'M', u'w'),
+ (0x1D59D, 'M', u'x'),
+ (0x1D59E, 'M', u'y'),
+ (0x1D59F, 'M', u'z'),
+ (0x1D5A0, 'M', u'a'),
+ (0x1D5A1, 'M', u'b'),
+ (0x1D5A2, 'M', u'c'),
+ (0x1D5A3, 'M', u'd'),
+ (0x1D5A4, 'M', u'e'),
+ (0x1D5A5, 'M', u'f'),
+ (0x1D5A6, 'M', u'g'),
+ (0x1D5A7, 'M', u'h'),
+ (0x1D5A8, 'M', u'i'),
+ (0x1D5A9, 'M', u'j'),
+ (0x1D5AA, 'M', u'k'),
+ (0x1D5AB, 'M', u'l'),
+ (0x1D5AC, 'M', u'm'),
+ (0x1D5AD, 'M', u'n'),
+ (0x1D5AE, 'M', u'o'),
+ (0x1D5AF, 'M', u'p'),
+ (0x1D5B0, 'M', u'q'),
+ (0x1D5B1, 'M', u'r'),
+ (0x1D5B2, 'M', u's'),
+ (0x1D5B3, 'M', u't'),
+ (0x1D5B4, 'M', u'u'),
+ (0x1D5B5, 'M', u'v'),
+ (0x1D5B6, 'M', u'w'),
+ (0x1D5B7, 'M', u'x'),
+ (0x1D5B8, 'M', u'y'),
+ (0x1D5B9, 'M', u'z'),
+ (0x1D5BA, 'M', u'a'),
+ (0x1D5BB, 'M', u'b'),
+ (0x1D5BC, 'M', u'c'),
+ (0x1D5BD, 'M', u'd'),
+ (0x1D5BE, 'M', u'e'),
+ (0x1D5BF, 'M', u'f'),
+ (0x1D5C0, 'M', u'g'),
+ (0x1D5C1, 'M', u'h'),
+ (0x1D5C2, 'M', u'i'),
+ (0x1D5C3, 'M', u'j'),
+ (0x1D5C4, 'M', u'k'),
+ (0x1D5C5, 'M', u'l'),
+ (0x1D5C6, 'M', u'm'),
+ (0x1D5C7, 'M', u'n'),
+ (0x1D5C8, 'M', u'o'),
+ (0x1D5C9, 'M', u'p'),
+ (0x1D5CA, 'M', u'q'),
+ (0x1D5CB, 'M', u'r'),
+ (0x1D5CC, 'M', u's'),
+ (0x1D5CD, 'M', u't'),
+ (0x1D5CE, 'M', u'u'),
+ (0x1D5CF, 'M', u'v'),
+ (0x1D5D0, 'M', u'w'),
+ (0x1D5D1, 'M', u'x'),
+ (0x1D5D2, 'M', u'y'),
+ (0x1D5D3, 'M', u'z'),
+ (0x1D5D4, 'M', u'a'),
+ (0x1D5D5, 'M', u'b'),
+ (0x1D5D6, 'M', u'c'),
+ (0x1D5D7, 'M', u'd'),
+ (0x1D5D8, 'M', u'e'),
+ (0x1D5D9, 'M', u'f'),
+ (0x1D5DA, 'M', u'g'),
+ (0x1D5DB, 'M', u'h'),
+ (0x1D5DC, 'M', u'i'),
+ (0x1D5DD, 'M', u'j'),
+ (0x1D5DE, 'M', u'k'),
+ (0x1D5DF, 'M', u'l'),
+ (0x1D5E0, 'M', u'm'),
+ (0x1D5E1, 'M', u'n'),
+ (0x1D5E2, 'M', u'o'),
+ (0x1D5E3, 'M', u'p'),
+ (0x1D5E4, 'M', u'q'),
+ (0x1D5E5, 'M', u'r'),
+ (0x1D5E6, 'M', u's'),
+ (0x1D5E7, 'M', u't'),
+ ]
+
+def _seg_64():
+ return [
+ (0x1D5E8, 'M', u'u'),
+ (0x1D5E9, 'M', u'v'),
+ (0x1D5EA, 'M', u'w'),
+ (0x1D5EB, 'M', u'x'),
+ (0x1D5EC, 'M', u'y'),
+ (0x1D5ED, 'M', u'z'),
+ (0x1D5EE, 'M', u'a'),
+ (0x1D5EF, 'M', u'b'),
+ (0x1D5F0, 'M', u'c'),
+ (0x1D5F1, 'M', u'd'),
+ (0x1D5F2, 'M', u'e'),
+ (0x1D5F3, 'M', u'f'),
+ (0x1D5F4, 'M', u'g'),
+ (0x1D5F5, 'M', u'h'),
+ (0x1D5F6, 'M', u'i'),
+ (0x1D5F7, 'M', u'j'),
+ (0x1D5F8, 'M', u'k'),
+ (0x1D5F9, 'M', u'l'),
+ (0x1D5FA, 'M', u'm'),
+ (0x1D5FB, 'M', u'n'),
+ (0x1D5FC, 'M', u'o'),
+ (0x1D5FD, 'M', u'p'),
+ (0x1D5FE, 'M', u'q'),
+ (0x1D5FF, 'M', u'r'),
+ (0x1D600, 'M', u's'),
+ (0x1D601, 'M', u't'),
+ (0x1D602, 'M', u'u'),
+ (0x1D603, 'M', u'v'),
+ (0x1D604, 'M', u'w'),
+ (0x1D605, 'M', u'x'),
+ (0x1D606, 'M', u'y'),
+ (0x1D607, 'M', u'z'),
+ (0x1D608, 'M', u'a'),
+ (0x1D609, 'M', u'b'),
+ (0x1D60A, 'M', u'c'),
+ (0x1D60B, 'M', u'd'),
+ (0x1D60C, 'M', u'e'),
+ (0x1D60D, 'M', u'f'),
+ (0x1D60E, 'M', u'g'),
+ (0x1D60F, 'M', u'h'),
+ (0x1D610, 'M', u'i'),
+ (0x1D611, 'M', u'j'),
+ (0x1D612, 'M', u'k'),
+ (0x1D613, 'M', u'l'),
+ (0x1D614, 'M', u'm'),
+ (0x1D615, 'M', u'n'),
+ (0x1D616, 'M', u'o'),
+ (0x1D617, 'M', u'p'),
+ (0x1D618, 'M', u'q'),
+ (0x1D619, 'M', u'r'),
+ (0x1D61A, 'M', u's'),
+ (0x1D61B, 'M', u't'),
+ (0x1D61C, 'M', u'u'),
+ (0x1D61D, 'M', u'v'),
+ (0x1D61E, 'M', u'w'),
+ (0x1D61F, 'M', u'x'),
+ (0x1D620, 'M', u'y'),
+ (0x1D621, 'M', u'z'),
+ (0x1D622, 'M', u'a'),
+ (0x1D623, 'M', u'b'),
+ (0x1D624, 'M', u'c'),
+ (0x1D625, 'M', u'd'),
+ (0x1D626, 'M', u'e'),
+ (0x1D627, 'M', u'f'),
+ (0x1D628, 'M', u'g'),
+ (0x1D629, 'M', u'h'),
+ (0x1D62A, 'M', u'i'),
+ (0x1D62B, 'M', u'j'),
+ (0x1D62C, 'M', u'k'),
+ (0x1D62D, 'M', u'l'),
+ (0x1D62E, 'M', u'm'),
+ (0x1D62F, 'M', u'n'),
+ (0x1D630, 'M', u'o'),
+ (0x1D631, 'M', u'p'),
+ (0x1D632, 'M', u'q'),
+ (0x1D633, 'M', u'r'),
+ (0x1D634, 'M', u's'),
+ (0x1D635, 'M', u't'),
+ (0x1D636, 'M', u'u'),
+ (0x1D637, 'M', u'v'),
+ (0x1D638, 'M', u'w'),
+ (0x1D639, 'M', u'x'),
+ (0x1D63A, 'M', u'y'),
+ (0x1D63B, 'M', u'z'),
+ (0x1D63C, 'M', u'a'),
+ (0x1D63D, 'M', u'b'),
+ (0x1D63E, 'M', u'c'),
+ (0x1D63F, 'M', u'd'),
+ (0x1D640, 'M', u'e'),
+ (0x1D641, 'M', u'f'),
+ (0x1D642, 'M', u'g'),
+ (0x1D643, 'M', u'h'),
+ (0x1D644, 'M', u'i'),
+ (0x1D645, 'M', u'j'),
+ (0x1D646, 'M', u'k'),
+ (0x1D647, 'M', u'l'),
+ (0x1D648, 'M', u'm'),
+ (0x1D649, 'M', u'n'),
+ (0x1D64A, 'M', u'o'),
+ (0x1D64B, 'M', u'p'),
+ ]
+
+def _seg_65():
+ return [
+ (0x1D64C, 'M', u'q'),
+ (0x1D64D, 'M', u'r'),
+ (0x1D64E, 'M', u's'),
+ (0x1D64F, 'M', u't'),
+ (0x1D650, 'M', u'u'),
+ (0x1D651, 'M', u'v'),
+ (0x1D652, 'M', u'w'),
+ (0x1D653, 'M', u'x'),
+ (0x1D654, 'M', u'y'),
+ (0x1D655, 'M', u'z'),
+ (0x1D656, 'M', u'a'),
+ (0x1D657, 'M', u'b'),
+ (0x1D658, 'M', u'c'),
+ (0x1D659, 'M', u'd'),
+ (0x1D65A, 'M', u'e'),
+ (0x1D65B, 'M', u'f'),
+ (0x1D65C, 'M', u'g'),
+ (0x1D65D, 'M', u'h'),
+ (0x1D65E, 'M', u'i'),
+ (0x1D65F, 'M', u'j'),
+ (0x1D660, 'M', u'k'),
+ (0x1D661, 'M', u'l'),
+ (0x1D662, 'M', u'm'),
+ (0x1D663, 'M', u'n'),
+ (0x1D664, 'M', u'o'),
+ (0x1D665, 'M', u'p'),
+ (0x1D666, 'M', u'q'),
+ (0x1D667, 'M', u'r'),
+ (0x1D668, 'M', u's'),
+ (0x1D669, 'M', u't'),
+ (0x1D66A, 'M', u'u'),
+ (0x1D66B, 'M', u'v'),
+ (0x1D66C, 'M', u'w'),
+ (0x1D66D, 'M', u'x'),
+ (0x1D66E, 'M', u'y'),
+ (0x1D66F, 'M', u'z'),
+ (0x1D670, 'M', u'a'),
+ (0x1D671, 'M', u'b'),
+ (0x1D672, 'M', u'c'),
+ (0x1D673, 'M', u'd'),
+ (0x1D674, 'M', u'e'),
+ (0x1D675, 'M', u'f'),
+ (0x1D676, 'M', u'g'),
+ (0x1D677, 'M', u'h'),
+ (0x1D678, 'M', u'i'),
+ (0x1D679, 'M', u'j'),
+ (0x1D67A, 'M', u'k'),
+ (0x1D67B, 'M', u'l'),
+ (0x1D67C, 'M', u'm'),
+ (0x1D67D, 'M', u'n'),
+ (0x1D67E, 'M', u'o'),
+ (0x1D67F, 'M', u'p'),
+ (0x1D680, 'M', u'q'),
+ (0x1D681, 'M', u'r'),
+ (0x1D682, 'M', u's'),
+ (0x1D683, 'M', u't'),
+ (0x1D684, 'M', u'u'),
+ (0x1D685, 'M', u'v'),
+ (0x1D686, 'M', u'w'),
+ (0x1D687, 'M', u'x'),
+ (0x1D688, 'M', u'y'),
+ (0x1D689, 'M', u'z'),
+ (0x1D68A, 'M', u'a'),
+ (0x1D68B, 'M', u'b'),
+ (0x1D68C, 'M', u'c'),
+ (0x1D68D, 'M', u'd'),
+ (0x1D68E, 'M', u'e'),
+ (0x1D68F, 'M', u'f'),
+ (0x1D690, 'M', u'g'),
+ (0x1D691, 'M', u'h'),
+ (0x1D692, 'M', u'i'),
+ (0x1D693, 'M', u'j'),
+ (0x1D694, 'M', u'k'),
+ (0x1D695, 'M', u'l'),
+ (0x1D696, 'M', u'm'),
+ (0x1D697, 'M', u'n'),
+ (0x1D698, 'M', u'o'),
+ (0x1D699, 'M', u'p'),
+ (0x1D69A, 'M', u'q'),
+ (0x1D69B, 'M', u'r'),
+ (0x1D69C, 'M', u's'),
+ (0x1D69D, 'M', u't'),
+ (0x1D69E, 'M', u'u'),
+ (0x1D69F, 'M', u'v'),
+ (0x1D6A0, 'M', u'w'),
+ (0x1D6A1, 'M', u'x'),
+ (0x1D6A2, 'M', u'y'),
+ (0x1D6A3, 'M', u'z'),
+ (0x1D6A4, 'M', u'ı'),
+ (0x1D6A5, 'M', u'ȷ'),
+ (0x1D6A6, 'X'),
+ (0x1D6A8, 'M', u'α'),
+ (0x1D6A9, 'M', u'β'),
+ (0x1D6AA, 'M', u'γ'),
+ (0x1D6AB, 'M', u'δ'),
+ (0x1D6AC, 'M', u'ε'),
+ (0x1D6AD, 'M', u'ζ'),
+ (0x1D6AE, 'M', u'η'),
+ (0x1D6AF, 'M', u'θ'),
+ (0x1D6B0, 'M', u'ι'),
+ ]
+
+def _seg_66():
+ return [
+ (0x1D6B1, 'M', u'κ'),
+ (0x1D6B2, 'M', u'λ'),
+ (0x1D6B3, 'M', u'μ'),
+ (0x1D6B4, 'M', u'ν'),
+ (0x1D6B5, 'M', u'ξ'),
+ (0x1D6B6, 'M', u'ο'),
+ (0x1D6B7, 'M', u'π'),
+ (0x1D6B8, 'M', u'ρ'),
+ (0x1D6B9, 'M', u'θ'),
+ (0x1D6BA, 'M', u'σ'),
+ (0x1D6BB, 'M', u'τ'),
+ (0x1D6BC, 'M', u'υ'),
+ (0x1D6BD, 'M', u'φ'),
+ (0x1D6BE, 'M', u'χ'),
+ (0x1D6BF, 'M', u'ψ'),
+ (0x1D6C0, 'M', u'ω'),
+ (0x1D6C1, 'M', u'∇'),
+ (0x1D6C2, 'M', u'α'),
+ (0x1D6C3, 'M', u'β'),
+ (0x1D6C4, 'M', u'γ'),
+ (0x1D6C5, 'M', u'δ'),
+ (0x1D6C6, 'M', u'ε'),
+ (0x1D6C7, 'M', u'ζ'),
+ (0x1D6C8, 'M', u'η'),
+ (0x1D6C9, 'M', u'θ'),
+ (0x1D6CA, 'M', u'ι'),
+ (0x1D6CB, 'M', u'κ'),
+ (0x1D6CC, 'M', u'λ'),
+ (0x1D6CD, 'M', u'μ'),
+ (0x1D6CE, 'M', u'ν'),
+ (0x1D6CF, 'M', u'ξ'),
+ (0x1D6D0, 'M', u'ο'),
+ (0x1D6D1, 'M', u'π'),
+ (0x1D6D2, 'M', u'ρ'),
+ (0x1D6D3, 'M', u'σ'),
+ (0x1D6D5, 'M', u'τ'),
+ (0x1D6D6, 'M', u'υ'),
+ (0x1D6D7, 'M', u'φ'),
+ (0x1D6D8, 'M', u'χ'),
+ (0x1D6D9, 'M', u'ψ'),
+ (0x1D6DA, 'M', u'ω'),
+ (0x1D6DB, 'M', u'∂'),
+ (0x1D6DC, 'M', u'ε'),
+ (0x1D6DD, 'M', u'θ'),
+ (0x1D6DE, 'M', u'κ'),
+ (0x1D6DF, 'M', u'φ'),
+ (0x1D6E0, 'M', u'ρ'),
+ (0x1D6E1, 'M', u'π'),
+ (0x1D6E2, 'M', u'α'),
+ (0x1D6E3, 'M', u'β'),
+ (0x1D6E4, 'M', u'γ'),
+ (0x1D6E5, 'M', u'δ'),
+ (0x1D6E6, 'M', u'ε'),
+ (0x1D6E7, 'M', u'ζ'),
+ (0x1D6E8, 'M', u'η'),
+ (0x1D6E9, 'M', u'θ'),
+ (0x1D6EA, 'M', u'ι'),
+ (0x1D6EB, 'M', u'κ'),
+ (0x1D6EC, 'M', u'λ'),
+ (0x1D6ED, 'M', u'μ'),
+ (0x1D6EE, 'M', u'ν'),
+ (0x1D6EF, 'M', u'ξ'),
+ (0x1D6F0, 'M', u'ο'),
+ (0x1D6F1, 'M', u'π'),
+ (0x1D6F2, 'M', u'ρ'),
+ (0x1D6F3, 'M', u'θ'),
+ (0x1D6F4, 'M', u'σ'),
+ (0x1D6F5, 'M', u'τ'),
+ (0x1D6F6, 'M', u'υ'),
+ (0x1D6F7, 'M', u'φ'),
+ (0x1D6F8, 'M', u'χ'),
+ (0x1D6F9, 'M', u'ψ'),
+ (0x1D6FA, 'M', u'ω'),
+ (0x1D6FB, 'M', u'∇'),
+ (0x1D6FC, 'M', u'α'),
+ (0x1D6FD, 'M', u'β'),
+ (0x1D6FE, 'M', u'γ'),
+ (0x1D6FF, 'M', u'δ'),
+ (0x1D700, 'M', u'ε'),
+ (0x1D701, 'M', u'ζ'),
+ (0x1D702, 'M', u'η'),
+ (0x1D703, 'M', u'θ'),
+ (0x1D704, 'M', u'ι'),
+ (0x1D705, 'M', u'κ'),
+ (0x1D706, 'M', u'λ'),
+ (0x1D707, 'M', u'μ'),
+ (0x1D708, 'M', u'ν'),
+ (0x1D709, 'M', u'ξ'),
+ (0x1D70A, 'M', u'ο'),
+ (0x1D70B, 'M', u'π'),
+ (0x1D70C, 'M', u'ρ'),
+ (0x1D70D, 'M', u'σ'),
+ (0x1D70F, 'M', u'τ'),
+ (0x1D710, 'M', u'υ'),
+ (0x1D711, 'M', u'φ'),
+ (0x1D712, 'M', u'χ'),
+ (0x1D713, 'M', u'ψ'),
+ (0x1D714, 'M', u'ω'),
+ (0x1D715, 'M', u'∂'),
+ (0x1D716, 'M', u'ε'),
+ ]
+
+def _seg_67():
+ return [
+ (0x1D717, 'M', u'θ'),
+ (0x1D718, 'M', u'κ'),
+ (0x1D719, 'M', u'φ'),
+ (0x1D71A, 'M', u'ρ'),
+ (0x1D71B, 'M', u'π'),
+ (0x1D71C, 'M', u'α'),
+ (0x1D71D, 'M', u'β'),
+ (0x1D71E, 'M', u'γ'),
+ (0x1D71F, 'M', u'δ'),
+ (0x1D720, 'M', u'ε'),
+ (0x1D721, 'M', u'ζ'),
+ (0x1D722, 'M', u'η'),
+ (0x1D723, 'M', u'θ'),
+ (0x1D724, 'M', u'ι'),
+ (0x1D725, 'M', u'κ'),
+ (0x1D726, 'M', u'λ'),
+ (0x1D727, 'M', u'μ'),
+ (0x1D728, 'M', u'ν'),
+ (0x1D729, 'M', u'ξ'),
+ (0x1D72A, 'M', u'ο'),
+ (0x1D72B, 'M', u'π'),
+ (0x1D72C, 'M', u'ρ'),
+ (0x1D72D, 'M', u'θ'),
+ (0x1D72E, 'M', u'σ'),
+ (0x1D72F, 'M', u'τ'),
+ (0x1D730, 'M', u'υ'),
+ (0x1D731, 'M', u'φ'),
+ (0x1D732, 'M', u'χ'),
+ (0x1D733, 'M', u'ψ'),
+ (0x1D734, 'M', u'ω'),
+ (0x1D735, 'M', u'∇'),
+ (0x1D736, 'M', u'α'),
+ (0x1D737, 'M', u'β'),
+ (0x1D738, 'M', u'γ'),
+ (0x1D739, 'M', u'δ'),
+ (0x1D73A, 'M', u'ε'),
+ (0x1D73B, 'M', u'ζ'),
+ (0x1D73C, 'M', u'η'),
+ (0x1D73D, 'M', u'θ'),
+ (0x1D73E, 'M', u'ι'),
+ (0x1D73F, 'M', u'κ'),
+ (0x1D740, 'M', u'λ'),
+ (0x1D741, 'M', u'μ'),
+ (0x1D742, 'M', u'ν'),
+ (0x1D743, 'M', u'ξ'),
+ (0x1D744, 'M', u'ο'),
+ (0x1D745, 'M', u'π'),
+ (0x1D746, 'M', u'ρ'),
+ (0x1D747, 'M', u'σ'),
+ (0x1D749, 'M', u'τ'),
+ (0x1D74A, 'M', u'υ'),
+ (0x1D74B, 'M', u'φ'),
+ (0x1D74C, 'M', u'χ'),
+ (0x1D74D, 'M', u'ψ'),
+ (0x1D74E, 'M', u'ω'),
+ (0x1D74F, 'M', u'∂'),
+ (0x1D750, 'M', u'ε'),
+ (0x1D751, 'M', u'θ'),
+ (0x1D752, 'M', u'κ'),
+ (0x1D753, 'M', u'φ'),
+ (0x1D754, 'M', u'ρ'),
+ (0x1D755, 'M', u'π'),
+ (0x1D756, 'M', u'α'),
+ (0x1D757, 'M', u'β'),
+ (0x1D758, 'M', u'γ'),
+ (0x1D759, 'M', u'δ'),
+ (0x1D75A, 'M', u'ε'),
+ (0x1D75B, 'M', u'ζ'),
+ (0x1D75C, 'M', u'η'),
+ (0x1D75D, 'M', u'θ'),
+ (0x1D75E, 'M', u'ι'),
+ (0x1D75F, 'M', u'κ'),
+ (0x1D760, 'M', u'λ'),
+ (0x1D761, 'M', u'μ'),
+ (0x1D762, 'M', u'ν'),
+ (0x1D763, 'M', u'ξ'),
+ (0x1D764, 'M', u'ο'),
+ (0x1D765, 'M', u'π'),
+ (0x1D766, 'M', u'ρ'),
+ (0x1D767, 'M', u'θ'),
+ (0x1D768, 'M', u'σ'),
+ (0x1D769, 'M', u'τ'),
+ (0x1D76A, 'M', u'υ'),
+ (0x1D76B, 'M', u'φ'),
+ (0x1D76C, 'M', u'χ'),
+ (0x1D76D, 'M', u'ψ'),
+ (0x1D76E, 'M', u'ω'),
+ (0x1D76F, 'M', u'∇'),
+ (0x1D770, 'M', u'α'),
+ (0x1D771, 'M', u'β'),
+ (0x1D772, 'M', u'γ'),
+ (0x1D773, 'M', u'δ'),
+ (0x1D774, 'M', u'ε'),
+ (0x1D775, 'M', u'ζ'),
+ (0x1D776, 'M', u'η'),
+ (0x1D777, 'M', u'θ'),
+ (0x1D778, 'M', u'ι'),
+ (0x1D779, 'M', u'κ'),
+ (0x1D77A, 'M', u'λ'),
+ (0x1D77B, 'M', u'μ'),
+ ]
+
+def _seg_68():
+ return [
+ (0x1D77C, 'M', u'ν'),
+ (0x1D77D, 'M', u'ξ'),
+ (0x1D77E, 'M', u'ο'),
+ (0x1D77F, 'M', u'π'),
+ (0x1D780, 'M', u'ρ'),
+ (0x1D781, 'M', u'σ'),
+ (0x1D783, 'M', u'τ'),
+ (0x1D784, 'M', u'υ'),
+ (0x1D785, 'M', u'φ'),
+ (0x1D786, 'M', u'χ'),
+ (0x1D787, 'M', u'ψ'),
+ (0x1D788, 'M', u'ω'),
+ (0x1D789, 'M', u'∂'),
+ (0x1D78A, 'M', u'ε'),
+ (0x1D78B, 'M', u'θ'),
+ (0x1D78C, 'M', u'κ'),
+ (0x1D78D, 'M', u'φ'),
+ (0x1D78E, 'M', u'ρ'),
+ (0x1D78F, 'M', u'π'),
+ (0x1D790, 'M', u'α'),
+ (0x1D791, 'M', u'β'),
+ (0x1D792, 'M', u'γ'),
+ (0x1D793, 'M', u'δ'),
+ (0x1D794, 'M', u'ε'),
+ (0x1D795, 'M', u'ζ'),
+ (0x1D796, 'M', u'η'),
+ (0x1D797, 'M', u'θ'),
+ (0x1D798, 'M', u'ι'),
+ (0x1D799, 'M', u'κ'),
+ (0x1D79A, 'M', u'λ'),
+ (0x1D79B, 'M', u'μ'),
+ (0x1D79C, 'M', u'ν'),
+ (0x1D79D, 'M', u'ξ'),
+ (0x1D79E, 'M', u'ο'),
+ (0x1D79F, 'M', u'π'),
+ (0x1D7A0, 'M', u'ρ'),
+ (0x1D7A1, 'M', u'θ'),
+ (0x1D7A2, 'M', u'σ'),
+ (0x1D7A3, 'M', u'τ'),
+ (0x1D7A4, 'M', u'υ'),
+ (0x1D7A5, 'M', u'φ'),
+ (0x1D7A6, 'M', u'χ'),
+ (0x1D7A7, 'M', u'ψ'),
+ (0x1D7A8, 'M', u'ω'),
+ (0x1D7A9, 'M', u'∇'),
+ (0x1D7AA, 'M', u'α'),
+ (0x1D7AB, 'M', u'β'),
+ (0x1D7AC, 'M', u'γ'),
+ (0x1D7AD, 'M', u'δ'),
+ (0x1D7AE, 'M', u'ε'),
+ (0x1D7AF, 'M', u'ζ'),
+ (0x1D7B0, 'M', u'η'),
+ (0x1D7B1, 'M', u'θ'),
+ (0x1D7B2, 'M', u'ι'),
+ (0x1D7B3, 'M', u'κ'),
+ (0x1D7B4, 'M', u'λ'),
+ (0x1D7B5, 'M', u'μ'),
+ (0x1D7B6, 'M', u'ν'),
+ (0x1D7B7, 'M', u'ξ'),
+ (0x1D7B8, 'M', u'ο'),
+ (0x1D7B9, 'M', u'π'),
+ (0x1D7BA, 'M', u'ρ'),
+ (0x1D7BB, 'M', u'σ'),
+ (0x1D7BD, 'M', u'τ'),
+ (0x1D7BE, 'M', u'υ'),
+ (0x1D7BF, 'M', u'φ'),
+ (0x1D7C0, 'M', u'χ'),
+ (0x1D7C1, 'M', u'ψ'),
+ (0x1D7C2, 'M', u'ω'),
+ (0x1D7C3, 'M', u'∂'),
+ (0x1D7C4, 'M', u'ε'),
+ (0x1D7C5, 'M', u'θ'),
+ (0x1D7C6, 'M', u'κ'),
+ (0x1D7C7, 'M', u'φ'),
+ (0x1D7C8, 'M', u'ρ'),
+ (0x1D7C9, 'M', u'π'),
+ (0x1D7CA, 'M', u'ϝ'),
+ (0x1D7CC, 'X'),
+ (0x1D7CE, 'M', u'0'),
+ (0x1D7CF, 'M', u'1'),
+ (0x1D7D0, 'M', u'2'),
+ (0x1D7D1, 'M', u'3'),
+ (0x1D7D2, 'M', u'4'),
+ (0x1D7D3, 'M', u'5'),
+ (0x1D7D4, 'M', u'6'),
+ (0x1D7D5, 'M', u'7'),
+ (0x1D7D6, 'M', u'8'),
+ (0x1D7D7, 'M', u'9'),
+ (0x1D7D8, 'M', u'0'),
+ (0x1D7D9, 'M', u'1'),
+ (0x1D7DA, 'M', u'2'),
+ (0x1D7DB, 'M', u'3'),
+ (0x1D7DC, 'M', u'4'),
+ (0x1D7DD, 'M', u'5'),
+ (0x1D7DE, 'M', u'6'),
+ (0x1D7DF, 'M', u'7'),
+ (0x1D7E0, 'M', u'8'),
+ (0x1D7E1, 'M', u'9'),
+ (0x1D7E2, 'M', u'0'),
+ (0x1D7E3, 'M', u'1'),
+ ]
+
+def _seg_69():
+ return [
+ (0x1D7E4, 'M', u'2'),
+ (0x1D7E5, 'M', u'3'),
+ (0x1D7E6, 'M', u'4'),
+ (0x1D7E7, 'M', u'5'),
+ (0x1D7E8, 'M', u'6'),
+ (0x1D7E9, 'M', u'7'),
+ (0x1D7EA, 'M', u'8'),
+ (0x1D7EB, 'M', u'9'),
+ (0x1D7EC, 'M', u'0'),
+ (0x1D7ED, 'M', u'1'),
+ (0x1D7EE, 'M', u'2'),
+ (0x1D7EF, 'M', u'3'),
+ (0x1D7F0, 'M', u'4'),
+ (0x1D7F1, 'M', u'5'),
+ (0x1D7F2, 'M', u'6'),
+ (0x1D7F3, 'M', u'7'),
+ (0x1D7F4, 'M', u'8'),
+ (0x1D7F5, 'M', u'9'),
+ (0x1D7F6, 'M', u'0'),
+ (0x1D7F7, 'M', u'1'),
+ (0x1D7F8, 'M', u'2'),
+ (0x1D7F9, 'M', u'3'),
+ (0x1D7FA, 'M', u'4'),
+ (0x1D7FB, 'M', u'5'),
+ (0x1D7FC, 'M', u'6'),
+ (0x1D7FD, 'M', u'7'),
+ (0x1D7FE, 'M', u'8'),
+ (0x1D7FF, 'M', u'9'),
+ (0x1D800, 'V'),
+ (0x1DA8C, 'X'),
+ (0x1DA9B, 'V'),
+ (0x1DAA0, 'X'),
+ (0x1DAA1, 'V'),
+ (0x1DAB0, 'X'),
+ (0x1E000, 'V'),
+ (0x1E007, 'X'),
+ (0x1E008, 'V'),
+ (0x1E019, 'X'),
+ (0x1E01B, 'V'),
+ (0x1E022, 'X'),
+ (0x1E023, 'V'),
+ (0x1E025, 'X'),
+ (0x1E026, 'V'),
+ (0x1E02B, 'X'),
+ (0x1E100, 'V'),
+ (0x1E12D, 'X'),
+ (0x1E130, 'V'),
+ (0x1E13E, 'X'),
+ (0x1E140, 'V'),
+ (0x1E14A, 'X'),
+ (0x1E14E, 'V'),
+ (0x1E150, 'X'),
+ (0x1E2C0, 'V'),
+ (0x1E2FA, 'X'),
+ (0x1E2FF, 'V'),
+ (0x1E300, 'X'),
+ (0x1E800, 'V'),
+ (0x1E8C5, 'X'),
+ (0x1E8C7, 'V'),
+ (0x1E8D7, 'X'),
+ (0x1E900, 'M', u'𞤢'),
+ (0x1E901, 'M', u'𞤣'),
+ (0x1E902, 'M', u'𞤤'),
+ (0x1E903, 'M', u'𞤥'),
+ (0x1E904, 'M', u'𞤦'),
+ (0x1E905, 'M', u'𞤧'),
+ (0x1E906, 'M', u'𞤨'),
+ (0x1E907, 'M', u'𞤩'),
+ (0x1E908, 'M', u'𞤪'),
+ (0x1E909, 'M', u'𞤫'),
+ (0x1E90A, 'M', u'𞤬'),
+ (0x1E90B, 'M', u'𞤭'),
+ (0x1E90C, 'M', u'𞤮'),
+ (0x1E90D, 'M', u'𞤯'),
+ (0x1E90E, 'M', u'𞤰'),
+ (0x1E90F, 'M', u'𞤱'),
+ (0x1E910, 'M', u'𞤲'),
+ (0x1E911, 'M', u'𞤳'),
+ (0x1E912, 'M', u'𞤴'),
+ (0x1E913, 'M', u'𞤵'),
+ (0x1E914, 'M', u'𞤶'),
+ (0x1E915, 'M', u'𞤷'),
+ (0x1E916, 'M', u'𞤸'),
+ (0x1E917, 'M', u'𞤹'),
+ (0x1E918, 'M', u'𞤺'),
+ (0x1E919, 'M', u'𞤻'),
+ (0x1E91A, 'M', u'𞤼'),
+ (0x1E91B, 'M', u'𞤽'),
+ (0x1E91C, 'M', u'𞤾'),
+ (0x1E91D, 'M', u'𞤿'),
+ (0x1E91E, 'M', u'𞥀'),
+ (0x1E91F, 'M', u'𞥁'),
+ (0x1E920, 'M', u'𞥂'),
+ (0x1E921, 'M', u'𞥃'),
+ (0x1E922, 'V'),
+ (0x1E94C, 'X'),
+ (0x1E950, 'V'),
+ (0x1E95A, 'X'),
+ (0x1E95E, 'V'),
+ (0x1E960, 'X'),
+ ]
+
+def _seg_70():
+ return [
+ (0x1EC71, 'V'),
+ (0x1ECB5, 'X'),
+ (0x1ED01, 'V'),
+ (0x1ED3E, 'X'),
+ (0x1EE00, 'M', u'ا'),
+ (0x1EE01, 'M', u'ب'),
+ (0x1EE02, 'M', u'ج'),
+ (0x1EE03, 'M', u'د'),
+ (0x1EE04, 'X'),
+ (0x1EE05, 'M', u'و'),
+ (0x1EE06, 'M', u'ز'),
+ (0x1EE07, 'M', u'ح'),
+ (0x1EE08, 'M', u'ط'),
+ (0x1EE09, 'M', u'ي'),
+ (0x1EE0A, 'M', u'ك'),
+ (0x1EE0B, 'M', u'ل'),
+ (0x1EE0C, 'M', u'م'),
+ (0x1EE0D, 'M', u'ن'),
+ (0x1EE0E, 'M', u'س'),
+ (0x1EE0F, 'M', u'ع'),
+ (0x1EE10, 'M', u'ف'),
+ (0x1EE11, 'M', u'ص'),
+ (0x1EE12, 'M', u'ق'),
+ (0x1EE13, 'M', u'ر'),
+ (0x1EE14, 'M', u'ش'),
+ (0x1EE15, 'M', u'ت'),
+ (0x1EE16, 'M', u'ث'),
+ (0x1EE17, 'M', u'خ'),
+ (0x1EE18, 'M', u'ذ'),
+ (0x1EE19, 'M', u'ض'),
+ (0x1EE1A, 'M', u'ظ'),
+ (0x1EE1B, 'M', u'غ'),
+ (0x1EE1C, 'M', u'ٮ'),
+ (0x1EE1D, 'M', u'ں'),
+ (0x1EE1E, 'M', u'ڡ'),
+ (0x1EE1F, 'M', u'ٯ'),
+ (0x1EE20, 'X'),
+ (0x1EE21, 'M', u'ب'),
+ (0x1EE22, 'M', u'ج'),
+ (0x1EE23, 'X'),
+ (0x1EE24, 'M', u'ه'),
+ (0x1EE25, 'X'),
+ (0x1EE27, 'M', u'ح'),
+ (0x1EE28, 'X'),
+ (0x1EE29, 'M', u'ي'),
+ (0x1EE2A, 'M', u'ك'),
+ (0x1EE2B, 'M', u'ل'),
+ (0x1EE2C, 'M', u'م'),
+ (0x1EE2D, 'M', u'ن'),
+ (0x1EE2E, 'M', u'س'),
+ (0x1EE2F, 'M', u'ع'),
+ (0x1EE30, 'M', u'ف'),
+ (0x1EE31, 'M', u'ص'),
+ (0x1EE32, 'M', u'ق'),
+ (0x1EE33, 'X'),
+ (0x1EE34, 'M', u'ش'),
+ (0x1EE35, 'M', u'ت'),
+ (0x1EE36, 'M', u'ث'),
+ (0x1EE37, 'M', u'خ'),
+ (0x1EE38, 'X'),
+ (0x1EE39, 'M', u'ض'),
+ (0x1EE3A, 'X'),
+ (0x1EE3B, 'M', u'غ'),
+ (0x1EE3C, 'X'),
+ (0x1EE42, 'M', u'ج'),
+ (0x1EE43, 'X'),
+ (0x1EE47, 'M', u'ح'),
+ (0x1EE48, 'X'),
+ (0x1EE49, 'M', u'ي'),
+ (0x1EE4A, 'X'),
+ (0x1EE4B, 'M', u'ل'),
+ (0x1EE4C, 'X'),
+ (0x1EE4D, 'M', u'ن'),
+ (0x1EE4E, 'M', u'س'),
+ (0x1EE4F, 'M', u'ع'),
+ (0x1EE50, 'X'),
+ (0x1EE51, 'M', u'ص'),
+ (0x1EE52, 'M', u'ق'),
+ (0x1EE53, 'X'),
+ (0x1EE54, 'M', u'ش'),
+ (0x1EE55, 'X'),
+ (0x1EE57, 'M', u'خ'),
+ (0x1EE58, 'X'),
+ (0x1EE59, 'M', u'ض'),
+ (0x1EE5A, 'X'),
+ (0x1EE5B, 'M', u'غ'),
+ (0x1EE5C, 'X'),
+ (0x1EE5D, 'M', u'ں'),
+ (0x1EE5E, 'X'),
+ (0x1EE5F, 'M', u'ٯ'),
+ (0x1EE60, 'X'),
+ (0x1EE61, 'M', u'ب'),
+ (0x1EE62, 'M', u'ج'),
+ (0x1EE63, 'X'),
+ (0x1EE64, 'M', u'ه'),
+ (0x1EE65, 'X'),
+ (0x1EE67, 'M', u'ح'),
+ (0x1EE68, 'M', u'ط'),
+ (0x1EE69, 'M', u'ي'),
+ (0x1EE6A, 'M', u'ك'),
+ ]
+
+def _seg_71():
+ return [
+ (0x1EE6B, 'X'),
+ (0x1EE6C, 'M', u'م'),
+ (0x1EE6D, 'M', u'ن'),
+ (0x1EE6E, 'M', u'س'),
+ (0x1EE6F, 'M', u'ع'),
+ (0x1EE70, 'M', u'ف'),
+ (0x1EE71, 'M', u'ص'),
+ (0x1EE72, 'M', u'ق'),
+ (0x1EE73, 'X'),
+ (0x1EE74, 'M', u'ش'),
+ (0x1EE75, 'M', u'ت'),
+ (0x1EE76, 'M', u'ث'),
+ (0x1EE77, 'M', u'خ'),
+ (0x1EE78, 'X'),
+ (0x1EE79, 'M', u'ض'),
+ (0x1EE7A, 'M', u'ظ'),
+ (0x1EE7B, 'M', u'غ'),
+ (0x1EE7C, 'M', u'ٮ'),
+ (0x1EE7D, 'X'),
+ (0x1EE7E, 'M', u'ڡ'),
+ (0x1EE7F, 'X'),
+ (0x1EE80, 'M', u'ا'),
+ (0x1EE81, 'M', u'ب'),
+ (0x1EE82, 'M', u'ج'),
+ (0x1EE83, 'M', u'د'),
+ (0x1EE84, 'M', u'ه'),
+ (0x1EE85, 'M', u'و'),
+ (0x1EE86, 'M', u'ز'),
+ (0x1EE87, 'M', u'ح'),
+ (0x1EE88, 'M', u'ط'),
+ (0x1EE89, 'M', u'ي'),
+ (0x1EE8A, 'X'),
+ (0x1EE8B, 'M', u'ل'),
+ (0x1EE8C, 'M', u'م'),
+ (0x1EE8D, 'M', u'ن'),
+ (0x1EE8E, 'M', u'س'),
+ (0x1EE8F, 'M', u'ع'),
+ (0x1EE90, 'M', u'ف'),
+ (0x1EE91, 'M', u'ص'),
+ (0x1EE92, 'M', u'ق'),
+ (0x1EE93, 'M', u'ر'),
+ (0x1EE94, 'M', u'ش'),
+ (0x1EE95, 'M', u'ت'),
+ (0x1EE96, 'M', u'ث'),
+ (0x1EE97, 'M', u'خ'),
+ (0x1EE98, 'M', u'ذ'),
+ (0x1EE99, 'M', u'ض'),
+ (0x1EE9A, 'M', u'ظ'),
+ (0x1EE9B, 'M', u'غ'),
+ (0x1EE9C, 'X'),
+ (0x1EEA1, 'M', u'ب'),
+ (0x1EEA2, 'M', u'ج'),
+ (0x1EEA3, 'M', u'د'),
+ (0x1EEA4, 'X'),
+ (0x1EEA5, 'M', u'و'),
+ (0x1EEA6, 'M', u'ز'),
+ (0x1EEA7, 'M', u'ح'),
+ (0x1EEA8, 'M', u'ط'),
+ (0x1EEA9, 'M', u'ي'),
+ (0x1EEAA, 'X'),
+ (0x1EEAB, 'M', u'ل'),
+ (0x1EEAC, 'M', u'م'),
+ (0x1EEAD, 'M', u'ن'),
+ (0x1EEAE, 'M', u'س'),
+ (0x1EEAF, 'M', u'ع'),
+ (0x1EEB0, 'M', u'ف'),
+ (0x1EEB1, 'M', u'ص'),
+ (0x1EEB2, 'M', u'ق'),
+ (0x1EEB3, 'M', u'ر'),
+ (0x1EEB4, 'M', u'ش'),
+ (0x1EEB5, 'M', u'ت'),
+ (0x1EEB6, 'M', u'ث'),
+ (0x1EEB7, 'M', u'خ'),
+ (0x1EEB8, 'M', u'ذ'),
+ (0x1EEB9, 'M', u'ض'),
+ (0x1EEBA, 'M', u'ظ'),
+ (0x1EEBB, 'M', u'غ'),
+ (0x1EEBC, 'X'),
+ (0x1EEF0, 'V'),
+ (0x1EEF2, 'X'),
+ (0x1F000, 'V'),
+ (0x1F02C, 'X'),
+ (0x1F030, 'V'),
+ (0x1F094, 'X'),
+ (0x1F0A0, 'V'),
+ (0x1F0AF, 'X'),
+ (0x1F0B1, 'V'),
+ (0x1F0C0, 'X'),
+ (0x1F0C1, 'V'),
+ (0x1F0D0, 'X'),
+ (0x1F0D1, 'V'),
+ (0x1F0F6, 'X'),
+ (0x1F101, '3', u'0,'),
+ (0x1F102, '3', u'1,'),
+ (0x1F103, '3', u'2,'),
+ (0x1F104, '3', u'3,'),
+ (0x1F105, '3', u'4,'),
+ (0x1F106, '3', u'5,'),
+ (0x1F107, '3', u'6,'),
+ (0x1F108, '3', u'7,'),
+ ]
+
+def _seg_72():
+ return [
+ (0x1F109, '3', u'8,'),
+ (0x1F10A, '3', u'9,'),
+ (0x1F10B, 'V'),
+ (0x1F110, '3', u'(a)'),
+ (0x1F111, '3', u'(b)'),
+ (0x1F112, '3', u'(c)'),
+ (0x1F113, '3', u'(d)'),
+ (0x1F114, '3', u'(e)'),
+ (0x1F115, '3', u'(f)'),
+ (0x1F116, '3', u'(g)'),
+ (0x1F117, '3', u'(h)'),
+ (0x1F118, '3', u'(i)'),
+ (0x1F119, '3', u'(j)'),
+ (0x1F11A, '3', u'(k)'),
+ (0x1F11B, '3', u'(l)'),
+ (0x1F11C, '3', u'(m)'),
+ (0x1F11D, '3', u'(n)'),
+ (0x1F11E, '3', u'(o)'),
+ (0x1F11F, '3', u'(p)'),
+ (0x1F120, '3', u'(q)'),
+ (0x1F121, '3', u'(r)'),
+ (0x1F122, '3', u'(s)'),
+ (0x1F123, '3', u'(t)'),
+ (0x1F124, '3', u'(u)'),
+ (0x1F125, '3', u'(v)'),
+ (0x1F126, '3', u'(w)'),
+ (0x1F127, '3', u'(x)'),
+ (0x1F128, '3', u'(y)'),
+ (0x1F129, '3', u'(z)'),
+ (0x1F12A, 'M', u'〔s〕'),
+ (0x1F12B, 'M', u'c'),
+ (0x1F12C, 'M', u'r'),
+ (0x1F12D, 'M', u'cd'),
+ (0x1F12E, 'M', u'wz'),
+ (0x1F12F, 'V'),
+ (0x1F130, 'M', u'a'),
+ (0x1F131, 'M', u'b'),
+ (0x1F132, 'M', u'c'),
+ (0x1F133, 'M', u'd'),
+ (0x1F134, 'M', u'e'),
+ (0x1F135, 'M', u'f'),
+ (0x1F136, 'M', u'g'),
+ (0x1F137, 'M', u'h'),
+ (0x1F138, 'M', u'i'),
+ (0x1F139, 'M', u'j'),
+ (0x1F13A, 'M', u'k'),
+ (0x1F13B, 'M', u'l'),
+ (0x1F13C, 'M', u'm'),
+ (0x1F13D, 'M', u'n'),
+ (0x1F13E, 'M', u'o'),
+ (0x1F13F, 'M', u'p'),
+ (0x1F140, 'M', u'q'),
+ (0x1F141, 'M', u'r'),
+ (0x1F142, 'M', u's'),
+ (0x1F143, 'M', u't'),
+ (0x1F144, 'M', u'u'),
+ (0x1F145, 'M', u'v'),
+ (0x1F146, 'M', u'w'),
+ (0x1F147, 'M', u'x'),
+ (0x1F148, 'M', u'y'),
+ (0x1F149, 'M', u'z'),
+ (0x1F14A, 'M', u'hv'),
+ (0x1F14B, 'M', u'mv'),
+ (0x1F14C, 'M', u'sd'),
+ (0x1F14D, 'M', u'ss'),
+ (0x1F14E, 'M', u'ppv'),
+ (0x1F14F, 'M', u'wc'),
+ (0x1F150, 'V'),
+ (0x1F16A, 'M', u'mc'),
+ (0x1F16B, 'M', u'md'),
+ (0x1F16C, 'M', u'mr'),
+ (0x1F16D, 'V'),
+ (0x1F190, 'M', u'dj'),
+ (0x1F191, 'V'),
+ (0x1F1AE, 'X'),
+ (0x1F1E6, 'V'),
+ (0x1F200, 'M', u'ほか'),
+ (0x1F201, 'M', u'ココ'),
+ (0x1F202, 'M', u'サ'),
+ (0x1F203, 'X'),
+ (0x1F210, 'M', u'手'),
+ (0x1F211, 'M', u'字'),
+ (0x1F212, 'M', u'双'),
+ (0x1F213, 'M', u'デ'),
+ (0x1F214, 'M', u'二'),
+ (0x1F215, 'M', u'多'),
+ (0x1F216, 'M', u'解'),
+ (0x1F217, 'M', u'天'),
+ (0x1F218, 'M', u'交'),
+ (0x1F219, 'M', u'映'),
+ (0x1F21A, 'M', u'無'),
+ (0x1F21B, 'M', u'料'),
+ (0x1F21C, 'M', u'前'),
+ (0x1F21D, 'M', u'後'),
+ (0x1F21E, 'M', u'再'),
+ (0x1F21F, 'M', u'新'),
+ (0x1F220, 'M', u'初'),
+ (0x1F221, 'M', u'終'),
+ (0x1F222, 'M', u'生'),
+ (0x1F223, 'M', u'販'),
+ ]
+
+def _seg_73():
+ return [
+ (0x1F224, 'M', u'声'),
+ (0x1F225, 'M', u'吹'),
+ (0x1F226, 'M', u'演'),
+ (0x1F227, 'M', u'投'),
+ (0x1F228, 'M', u'捕'),
+ (0x1F229, 'M', u'一'),
+ (0x1F22A, 'M', u'三'),
+ (0x1F22B, 'M', u'遊'),
+ (0x1F22C, 'M', u'左'),
+ (0x1F22D, 'M', u'中'),
+ (0x1F22E, 'M', u'右'),
+ (0x1F22F, 'M', u'指'),
+ (0x1F230, 'M', u'走'),
+ (0x1F231, 'M', u'打'),
+ (0x1F232, 'M', u'禁'),
+ (0x1F233, 'M', u'空'),
+ (0x1F234, 'M', u'合'),
+ (0x1F235, 'M', u'満'),
+ (0x1F236, 'M', u'有'),
+ (0x1F237, 'M', u'月'),
+ (0x1F238, 'M', u'申'),
+ (0x1F239, 'M', u'割'),
+ (0x1F23A, 'M', u'営'),
+ (0x1F23B, 'M', u'配'),
+ (0x1F23C, 'X'),
+ (0x1F240, 'M', u'〔本〕'),
+ (0x1F241, 'M', u'〔三〕'),
+ (0x1F242, 'M', u'〔二〕'),
+ (0x1F243, 'M', u'〔安〕'),
+ (0x1F244, 'M', u'〔点〕'),
+ (0x1F245, 'M', u'〔打〕'),
+ (0x1F246, 'M', u'〔盗〕'),
+ (0x1F247, 'M', u'〔勝〕'),
+ (0x1F248, 'M', u'〔敗〕'),
+ (0x1F249, 'X'),
+ (0x1F250, 'M', u'得'),
+ (0x1F251, 'M', u'可'),
+ (0x1F252, 'X'),
+ (0x1F260, 'V'),
+ (0x1F266, 'X'),
+ (0x1F300, 'V'),
+ (0x1F6D8, 'X'),
+ (0x1F6E0, 'V'),
+ (0x1F6ED, 'X'),
+ (0x1F6F0, 'V'),
+ (0x1F6FD, 'X'),
+ (0x1F700, 'V'),
+ (0x1F774, 'X'),
+ (0x1F780, 'V'),
+ (0x1F7D9, 'X'),
+ (0x1F7E0, 'V'),
+ (0x1F7EC, 'X'),
+ (0x1F800, 'V'),
+ (0x1F80C, 'X'),
+ (0x1F810, 'V'),
+ (0x1F848, 'X'),
+ (0x1F850, 'V'),
+ (0x1F85A, 'X'),
+ (0x1F860, 'V'),
+ (0x1F888, 'X'),
+ (0x1F890, 'V'),
+ (0x1F8AE, 'X'),
+ (0x1F8B0, 'V'),
+ (0x1F8B2, 'X'),
+ (0x1F900, 'V'),
+ (0x1F979, 'X'),
+ (0x1F97A, 'V'),
+ (0x1F9CC, 'X'),
+ (0x1F9CD, 'V'),
+ (0x1FA54, 'X'),
+ (0x1FA60, 'V'),
+ (0x1FA6E, 'X'),
+ (0x1FA70, 'V'),
+ (0x1FA75, 'X'),
+ (0x1FA78, 'V'),
+ (0x1FA7B, 'X'),
+ (0x1FA80, 'V'),
+ (0x1FA87, 'X'),
+ (0x1FA90, 'V'),
+ (0x1FAA9, 'X'),
+ (0x1FAB0, 'V'),
+ (0x1FAB7, 'X'),
+ (0x1FAC0, 'V'),
+ (0x1FAC3, 'X'),
+ (0x1FAD0, 'V'),
+ (0x1FAD7, 'X'),
+ (0x1FB00, 'V'),
+ (0x1FB93, 'X'),
+ (0x1FB94, 'V'),
+ (0x1FBCB, 'X'),
+ (0x1FBF0, 'M', u'0'),
+ (0x1FBF1, 'M', u'1'),
+ (0x1FBF2, 'M', u'2'),
+ (0x1FBF3, 'M', u'3'),
+ (0x1FBF4, 'M', u'4'),
+ (0x1FBF5, 'M', u'5'),
+ (0x1FBF6, 'M', u'6'),
+ (0x1FBF7, 'M', u'7'),
+ (0x1FBF8, 'M', u'8'),
+ (0x1FBF9, 'M', u'9'),
+ ]
+
+def _seg_74():
+ return [
+ (0x1FBFA, 'X'),
+ (0x20000, 'V'),
+ (0x2A6DE, 'X'),
+ (0x2A700, 'V'),
+ (0x2B735, 'X'),
+ (0x2B740, 'V'),
+ (0x2B81E, 'X'),
+ (0x2B820, 'V'),
+ (0x2CEA2, 'X'),
+ (0x2CEB0, 'V'),
+ (0x2EBE1, 'X'),
+ (0x2F800, 'M', u'丽'),
+ (0x2F801, 'M', u'丸'),
+ (0x2F802, 'M', u'乁'),
+ (0x2F803, 'M', u'𠄢'),
+ (0x2F804, 'M', u'你'),
+ (0x2F805, 'M', u'侮'),
+ (0x2F806, 'M', u'侻'),
+ (0x2F807, 'M', u'倂'),
+ (0x2F808, 'M', u'偺'),
+ (0x2F809, 'M', u'備'),
+ (0x2F80A, 'M', u'僧'),
+ (0x2F80B, 'M', u'像'),
+ (0x2F80C, 'M', u'㒞'),
+ (0x2F80D, 'M', u'𠘺'),
+ (0x2F80E, 'M', u'免'),
+ (0x2F80F, 'M', u'兔'),
+ (0x2F810, 'M', u'兤'),
+ (0x2F811, 'M', u'具'),
+ (0x2F812, 'M', u'𠔜'),
+ (0x2F813, 'M', u'㒹'),
+ (0x2F814, 'M', u'內'),
+ (0x2F815, 'M', u'再'),
+ (0x2F816, 'M', u'𠕋'),
+ (0x2F817, 'M', u'冗'),
+ (0x2F818, 'M', u'冤'),
+ (0x2F819, 'M', u'仌'),
+ (0x2F81A, 'M', u'冬'),
+ (0x2F81B, 'M', u'况'),
+ (0x2F81C, 'M', u'𩇟'),
+ (0x2F81D, 'M', u'凵'),
+ (0x2F81E, 'M', u'刃'),
+ (0x2F81F, 'M', u'㓟'),
+ (0x2F820, 'M', u'刻'),
+ (0x2F821, 'M', u'剆'),
+ (0x2F822, 'M', u'割'),
+ (0x2F823, 'M', u'剷'),
+ (0x2F824, 'M', u'㔕'),
+ (0x2F825, 'M', u'勇'),
+ (0x2F826, 'M', u'勉'),
+ (0x2F827, 'M', u'勤'),
+ (0x2F828, 'M', u'勺'),
+ (0x2F829, 'M', u'包'),
+ (0x2F82A, 'M', u'匆'),
+ (0x2F82B, 'M', u'北'),
+ (0x2F82C, 'M', u'卉'),
+ (0x2F82D, 'M', u'卑'),
+ (0x2F82E, 'M', u'博'),
+ (0x2F82F, 'M', u'即'),
+ (0x2F830, 'M', u'卽'),
+ (0x2F831, 'M', u'卿'),
+ (0x2F834, 'M', u'𠨬'),
+ (0x2F835, 'M', u'灰'),
+ (0x2F836, 'M', u'及'),
+ (0x2F837, 'M', u'叟'),
+ (0x2F838, 'M', u'𠭣'),
+ (0x2F839, 'M', u'叫'),
+ (0x2F83A, 'M', u'叱'),
+ (0x2F83B, 'M', u'吆'),
+ (0x2F83C, 'M', u'咞'),
+ (0x2F83D, 'M', u'吸'),
+ (0x2F83E, 'M', u'呈'),
+ (0x2F83F, 'M', u'周'),
+ (0x2F840, 'M', u'咢'),
+ (0x2F841, 'M', u'哶'),
+ (0x2F842, 'M', u'唐'),
+ (0x2F843, 'M', u'啓'),
+ (0x2F844, 'M', u'啣'),
+ (0x2F845, 'M', u'善'),
+ (0x2F847, 'M', u'喙'),
+ (0x2F848, 'M', u'喫'),
+ (0x2F849, 'M', u'喳'),
+ (0x2F84A, 'M', u'嗂'),
+ (0x2F84B, 'M', u'圖'),
+ (0x2F84C, 'M', u'嘆'),
+ (0x2F84D, 'M', u'圗'),
+ (0x2F84E, 'M', u'噑'),
+ (0x2F84F, 'M', u'噴'),
+ (0x2F850, 'M', u'切'),
+ (0x2F851, 'M', u'壮'),
+ (0x2F852, 'M', u'城'),
+ (0x2F853, 'M', u'埴'),
+ (0x2F854, 'M', u'堍'),
+ (0x2F855, 'M', u'型'),
+ (0x2F856, 'M', u'堲'),
+ (0x2F857, 'M', u'報'),
+ (0x2F858, 'M', u'墬'),
+ (0x2F859, 'M', u'𡓤'),
+ (0x2F85A, 'M', u'売'),
+ (0x2F85B, 'M', u'壷'),
+ ]
+
+def _seg_75():
+ return [
+ (0x2F85C, 'M', u'夆'),
+ (0x2F85D, 'M', u'多'),
+ (0x2F85E, 'M', u'夢'),
+ (0x2F85F, 'M', u'奢'),
+ (0x2F860, 'M', u'𡚨'),
+ (0x2F861, 'M', u'𡛪'),
+ (0x2F862, 'M', u'姬'),
+ (0x2F863, 'M', u'娛'),
+ (0x2F864, 'M', u'娧'),
+ (0x2F865, 'M', u'姘'),
+ (0x2F866, 'M', u'婦'),
+ (0x2F867, 'M', u'㛮'),
+ (0x2F868, 'X'),
+ (0x2F869, 'M', u'嬈'),
+ (0x2F86A, 'M', u'嬾'),
+ (0x2F86C, 'M', u'𡧈'),
+ (0x2F86D, 'M', u'寃'),
+ (0x2F86E, 'M', u'寘'),
+ (0x2F86F, 'M', u'寧'),
+ (0x2F870, 'M', u'寳'),
+ (0x2F871, 'M', u'𡬘'),
+ (0x2F872, 'M', u'寿'),
+ (0x2F873, 'M', u'将'),
+ (0x2F874, 'X'),
+ (0x2F875, 'M', u'尢'),
+ (0x2F876, 'M', u'㞁'),
+ (0x2F877, 'M', u'屠'),
+ (0x2F878, 'M', u'屮'),
+ (0x2F879, 'M', u'峀'),
+ (0x2F87A, 'M', u'岍'),
+ (0x2F87B, 'M', u'𡷤'),
+ (0x2F87C, 'M', u'嵃'),
+ (0x2F87D, 'M', u'𡷦'),
+ (0x2F87E, 'M', u'嵮'),
+ (0x2F87F, 'M', u'嵫'),
+ (0x2F880, 'M', u'嵼'),
+ (0x2F881, 'M', u'巡'),
+ (0x2F882, 'M', u'巢'),
+ (0x2F883, 'M', u'㠯'),
+ (0x2F884, 'M', u'巽'),
+ (0x2F885, 'M', u'帨'),
+ (0x2F886, 'M', u'帽'),
+ (0x2F887, 'M', u'幩'),
+ (0x2F888, 'M', u'㡢'),
+ (0x2F889, 'M', u'𢆃'),
+ (0x2F88A, 'M', u'㡼'),
+ (0x2F88B, 'M', u'庰'),
+ (0x2F88C, 'M', u'庳'),
+ (0x2F88D, 'M', u'庶'),
+ (0x2F88E, 'M', u'廊'),
+ (0x2F88F, 'M', u'𪎒'),
+ (0x2F890, 'M', u'廾'),
+ (0x2F891, 'M', u'𢌱'),
+ (0x2F893, 'M', u'舁'),
+ (0x2F894, 'M', u'弢'),
+ (0x2F896, 'M', u'㣇'),
+ (0x2F897, 'M', u'𣊸'),
+ (0x2F898, 'M', u'𦇚'),
+ (0x2F899, 'M', u'形'),
+ (0x2F89A, 'M', u'彫'),
+ (0x2F89B, 'M', u'㣣'),
+ (0x2F89C, 'M', u'徚'),
+ (0x2F89D, 'M', u'忍'),
+ (0x2F89E, 'M', u'志'),
+ (0x2F89F, 'M', u'忹'),
+ (0x2F8A0, 'M', u'悁'),
+ (0x2F8A1, 'M', u'㤺'),
+ (0x2F8A2, 'M', u'㤜'),
+ (0x2F8A3, 'M', u'悔'),
+ (0x2F8A4, 'M', u'𢛔'),
+ (0x2F8A5, 'M', u'惇'),
+ (0x2F8A6, 'M', u'慈'),
+ (0x2F8A7, 'M', u'慌'),
+ (0x2F8A8, 'M', u'慎'),
+ (0x2F8A9, 'M', u'慌'),
+ (0x2F8AA, 'M', u'慺'),
+ (0x2F8AB, 'M', u'憎'),
+ (0x2F8AC, 'M', u'憲'),
+ (0x2F8AD, 'M', u'憤'),
+ (0x2F8AE, 'M', u'憯'),
+ (0x2F8AF, 'M', u'懞'),
+ (0x2F8B0, 'M', u'懲'),
+ (0x2F8B1, 'M', u'懶'),
+ (0x2F8B2, 'M', u'成'),
+ (0x2F8B3, 'M', u'戛'),
+ (0x2F8B4, 'M', u'扝'),
+ (0x2F8B5, 'M', u'抱'),
+ (0x2F8B6, 'M', u'拔'),
+ (0x2F8B7, 'M', u'捐'),
+ (0x2F8B8, 'M', u'𢬌'),
+ (0x2F8B9, 'M', u'挽'),
+ (0x2F8BA, 'M', u'拼'),
+ (0x2F8BB, 'M', u'捨'),
+ (0x2F8BC, 'M', u'掃'),
+ (0x2F8BD, 'M', u'揤'),
+ (0x2F8BE, 'M', u'𢯱'),
+ (0x2F8BF, 'M', u'搢'),
+ (0x2F8C0, 'M', u'揅'),
+ (0x2F8C1, 'M', u'掩'),
+ (0x2F8C2, 'M', u'㨮'),
+ ]
+
+def _seg_76():
+ return [
+ (0x2F8C3, 'M', u'摩'),
+ (0x2F8C4, 'M', u'摾'),
+ (0x2F8C5, 'M', u'撝'),
+ (0x2F8C6, 'M', u'摷'),
+ (0x2F8C7, 'M', u'㩬'),
+ (0x2F8C8, 'M', u'敏'),
+ (0x2F8C9, 'M', u'敬'),
+ (0x2F8CA, 'M', u'𣀊'),
+ (0x2F8CB, 'M', u'旣'),
+ (0x2F8CC, 'M', u'書'),
+ (0x2F8CD, 'M', u'晉'),
+ (0x2F8CE, 'M', u'㬙'),
+ (0x2F8CF, 'M', u'暑'),
+ (0x2F8D0, 'M', u'㬈'),
+ (0x2F8D1, 'M', u'㫤'),
+ (0x2F8D2, 'M', u'冒'),
+ (0x2F8D3, 'M', u'冕'),
+ (0x2F8D4, 'M', u'最'),
+ (0x2F8D5, 'M', u'暜'),
+ (0x2F8D6, 'M', u'肭'),
+ (0x2F8D7, 'M', u'䏙'),
+ (0x2F8D8, 'M', u'朗'),
+ (0x2F8D9, 'M', u'望'),
+ (0x2F8DA, 'M', u'朡'),
+ (0x2F8DB, 'M', u'杞'),
+ (0x2F8DC, 'M', u'杓'),
+ (0x2F8DD, 'M', u'𣏃'),
+ (0x2F8DE, 'M', u'㭉'),
+ (0x2F8DF, 'M', u'柺'),
+ (0x2F8E0, 'M', u'枅'),
+ (0x2F8E1, 'M', u'桒'),
+ (0x2F8E2, 'M', u'梅'),
+ (0x2F8E3, 'M', u'𣑭'),
+ (0x2F8E4, 'M', u'梎'),
+ (0x2F8E5, 'M', u'栟'),
+ (0x2F8E6, 'M', u'椔'),
+ (0x2F8E7, 'M', u'㮝'),
+ (0x2F8E8, 'M', u'楂'),
+ (0x2F8E9, 'M', u'榣'),
+ (0x2F8EA, 'M', u'槪'),
+ (0x2F8EB, 'M', u'檨'),
+ (0x2F8EC, 'M', u'𣚣'),
+ (0x2F8ED, 'M', u'櫛'),
+ (0x2F8EE, 'M', u'㰘'),
+ (0x2F8EF, 'M', u'次'),
+ (0x2F8F0, 'M', u'𣢧'),
+ (0x2F8F1, 'M', u'歔'),
+ (0x2F8F2, 'M', u'㱎'),
+ (0x2F8F3, 'M', u'歲'),
+ (0x2F8F4, 'M', u'殟'),
+ (0x2F8F5, 'M', u'殺'),
+ (0x2F8F6, 'M', u'殻'),
+ (0x2F8F7, 'M', u'𣪍'),
+ (0x2F8F8, 'M', u'𡴋'),
+ (0x2F8F9, 'M', u'𣫺'),
+ (0x2F8FA, 'M', u'汎'),
+ (0x2F8FB, 'M', u'𣲼'),
+ (0x2F8FC, 'M', u'沿'),
+ (0x2F8FD, 'M', u'泍'),
+ (0x2F8FE, 'M', u'汧'),
+ (0x2F8FF, 'M', u'洖'),
+ (0x2F900, 'M', u'派'),
+ (0x2F901, 'M', u'海'),
+ (0x2F902, 'M', u'流'),
+ (0x2F903, 'M', u'浩'),
+ (0x2F904, 'M', u'浸'),
+ (0x2F905, 'M', u'涅'),
+ (0x2F906, 'M', u'𣴞'),
+ (0x2F907, 'M', u'洴'),
+ (0x2F908, 'M', u'港'),
+ (0x2F909, 'M', u'湮'),
+ (0x2F90A, 'M', u'㴳'),
+ (0x2F90B, 'M', u'滋'),
+ (0x2F90C, 'M', u'滇'),
+ (0x2F90D, 'M', u'𣻑'),
+ (0x2F90E, 'M', u'淹'),
+ (0x2F90F, 'M', u'潮'),
+ (0x2F910, 'M', u'𣽞'),
+ (0x2F911, 'M', u'𣾎'),
+ (0x2F912, 'M', u'濆'),
+ (0x2F913, 'M', u'瀹'),
+ (0x2F914, 'M', u'瀞'),
+ (0x2F915, 'M', u'瀛'),
+ (0x2F916, 'M', u'㶖'),
+ (0x2F917, 'M', u'灊'),
+ (0x2F918, 'M', u'災'),
+ (0x2F919, 'M', u'灷'),
+ (0x2F91A, 'M', u'炭'),
+ (0x2F91B, 'M', u'𠔥'),
+ (0x2F91C, 'M', u'煅'),
+ (0x2F91D, 'M', u'𤉣'),
+ (0x2F91E, 'M', u'熜'),
+ (0x2F91F, 'X'),
+ (0x2F920, 'M', u'爨'),
+ (0x2F921, 'M', u'爵'),
+ (0x2F922, 'M', u'牐'),
+ (0x2F923, 'M', u'𤘈'),
+ (0x2F924, 'M', u'犀'),
+ (0x2F925, 'M', u'犕'),
+ (0x2F926, 'M', u'𤜵'),
+ ]
+
+def _seg_77():
+ return [
+ (0x2F927, 'M', u'𤠔'),
+ (0x2F928, 'M', u'獺'),
+ (0x2F929, 'M', u'王'),
+ (0x2F92A, 'M', u'㺬'),
+ (0x2F92B, 'M', u'玥'),
+ (0x2F92C, 'M', u'㺸'),
+ (0x2F92E, 'M', u'瑇'),
+ (0x2F92F, 'M', u'瑜'),
+ (0x2F930, 'M', u'瑱'),
+ (0x2F931, 'M', u'璅'),
+ (0x2F932, 'M', u'瓊'),
+ (0x2F933, 'M', u'㼛'),
+ (0x2F934, 'M', u'甤'),
+ (0x2F935, 'M', u'𤰶'),
+ (0x2F936, 'M', u'甾'),
+ (0x2F937, 'M', u'𤲒'),
+ (0x2F938, 'M', u'異'),
+ (0x2F939, 'M', u'𢆟'),
+ (0x2F93A, 'M', u'瘐'),
+ (0x2F93B, 'M', u'𤾡'),
+ (0x2F93C, 'M', u'𤾸'),
+ (0x2F93D, 'M', u'𥁄'),
+ (0x2F93E, 'M', u'㿼'),
+ (0x2F93F, 'M', u'䀈'),
+ (0x2F940, 'M', u'直'),
+ (0x2F941, 'M', u'𥃳'),
+ (0x2F942, 'M', u'𥃲'),
+ (0x2F943, 'M', u'𥄙'),
+ (0x2F944, 'M', u'𥄳'),
+ (0x2F945, 'M', u'眞'),
+ (0x2F946, 'M', u'真'),
+ (0x2F948, 'M', u'睊'),
+ (0x2F949, 'M', u'䀹'),
+ (0x2F94A, 'M', u'瞋'),
+ (0x2F94B, 'M', u'䁆'),
+ (0x2F94C, 'M', u'䂖'),
+ (0x2F94D, 'M', u'𥐝'),
+ (0x2F94E, 'M', u'硎'),
+ (0x2F94F, 'M', u'碌'),
+ (0x2F950, 'M', u'磌'),
+ (0x2F951, 'M', u'䃣'),
+ (0x2F952, 'M', u'𥘦'),
+ (0x2F953, 'M', u'祖'),
+ (0x2F954, 'M', u'𥚚'),
+ (0x2F955, 'M', u'𥛅'),
+ (0x2F956, 'M', u'福'),
+ (0x2F957, 'M', u'秫'),
+ (0x2F958, 'M', u'䄯'),
+ (0x2F959, 'M', u'穀'),
+ (0x2F95A, 'M', u'穊'),
+ (0x2F95B, 'M', u'穏'),
+ (0x2F95C, 'M', u'𥥼'),
+ (0x2F95D, 'M', u'𥪧'),
+ (0x2F95F, 'X'),
+ (0x2F960, 'M', u'䈂'),
+ (0x2F961, 'M', u'𥮫'),
+ (0x2F962, 'M', u'篆'),
+ (0x2F963, 'M', u'築'),
+ (0x2F964, 'M', u'䈧'),
+ (0x2F965, 'M', u'𥲀'),
+ (0x2F966, 'M', u'糒'),
+ (0x2F967, 'M', u'䊠'),
+ (0x2F968, 'M', u'糨'),
+ (0x2F969, 'M', u'糣'),
+ (0x2F96A, 'M', u'紀'),
+ (0x2F96B, 'M', u'𥾆'),
+ (0x2F96C, 'M', u'絣'),
+ (0x2F96D, 'M', u'䌁'),
+ (0x2F96E, 'M', u'緇'),
+ (0x2F96F, 'M', u'縂'),
+ (0x2F970, 'M', u'繅'),
+ (0x2F971, 'M', u'䌴'),
+ (0x2F972, 'M', u'𦈨'),
+ (0x2F973, 'M', u'𦉇'),
+ (0x2F974, 'M', u'䍙'),
+ (0x2F975, 'M', u'𦋙'),
+ (0x2F976, 'M', u'罺'),
+ (0x2F977, 'M', u'𦌾'),
+ (0x2F978, 'M', u'羕'),
+ (0x2F979, 'M', u'翺'),
+ (0x2F97A, 'M', u'者'),
+ (0x2F97B, 'M', u'𦓚'),
+ (0x2F97C, 'M', u'𦔣'),
+ (0x2F97D, 'M', u'聠'),
+ (0x2F97E, 'M', u'𦖨'),
+ (0x2F97F, 'M', u'聰'),
+ (0x2F980, 'M', u'𣍟'),
+ (0x2F981, 'M', u'䏕'),
+ (0x2F982, 'M', u'育'),
+ (0x2F983, 'M', u'脃'),
+ (0x2F984, 'M', u'䐋'),
+ (0x2F985, 'M', u'脾'),
+ (0x2F986, 'M', u'媵'),
+ (0x2F987, 'M', u'𦞧'),
+ (0x2F988, 'M', u'𦞵'),
+ (0x2F989, 'M', u'𣎓'),
+ (0x2F98A, 'M', u'𣎜'),
+ (0x2F98B, 'M', u'舁'),
+ (0x2F98C, 'M', u'舄'),
+ (0x2F98D, 'M', u'辞'),
+ ]
+
+def _seg_78():
+ return [
+ (0x2F98E, 'M', u'䑫'),
+ (0x2F98F, 'M', u'芑'),
+ (0x2F990, 'M', u'芋'),
+ (0x2F991, 'M', u'芝'),
+ (0x2F992, 'M', u'劳'),
+ (0x2F993, 'M', u'花'),
+ (0x2F994, 'M', u'芳'),
+ (0x2F995, 'M', u'芽'),
+ (0x2F996, 'M', u'苦'),
+ (0x2F997, 'M', u'𦬼'),
+ (0x2F998, 'M', u'若'),
+ (0x2F999, 'M', u'茝'),
+ (0x2F99A, 'M', u'荣'),
+ (0x2F99B, 'M', u'莭'),
+ (0x2F99C, 'M', u'茣'),
+ (0x2F99D, 'M', u'莽'),
+ (0x2F99E, 'M', u'菧'),
+ (0x2F99F, 'M', u'著'),
+ (0x2F9A0, 'M', u'荓'),
+ (0x2F9A1, 'M', u'菊'),
+ (0x2F9A2, 'M', u'菌'),
+ (0x2F9A3, 'M', u'菜'),
+ (0x2F9A4, 'M', u'𦰶'),
+ (0x2F9A5, 'M', u'𦵫'),
+ (0x2F9A6, 'M', u'𦳕'),
+ (0x2F9A7, 'M', u'䔫'),
+ (0x2F9A8, 'M', u'蓱'),
+ (0x2F9A9, 'M', u'蓳'),
+ (0x2F9AA, 'M', u'蔖'),
+ (0x2F9AB, 'M', u'𧏊'),
+ (0x2F9AC, 'M', u'蕤'),
+ (0x2F9AD, 'M', u'𦼬'),
+ (0x2F9AE, 'M', u'䕝'),
+ (0x2F9AF, 'M', u'䕡'),
+ (0x2F9B0, 'M', u'𦾱'),
+ (0x2F9B1, 'M', u'𧃒'),
+ (0x2F9B2, 'M', u'䕫'),
+ (0x2F9B3, 'M', u'虐'),
+ (0x2F9B4, 'M', u'虜'),
+ (0x2F9B5, 'M', u'虧'),
+ (0x2F9B6, 'M', u'虩'),
+ (0x2F9B7, 'M', u'蚩'),
+ (0x2F9B8, 'M', u'蚈'),
+ (0x2F9B9, 'M', u'蜎'),
+ (0x2F9BA, 'M', u'蛢'),
+ (0x2F9BB, 'M', u'蝹'),
+ (0x2F9BC, 'M', u'蜨'),
+ (0x2F9BD, 'M', u'蝫'),
+ (0x2F9BE, 'M', u'螆'),
+ (0x2F9BF, 'X'),
+ (0x2F9C0, 'M', u'蟡'),
+ (0x2F9C1, 'M', u'蠁'),
+ (0x2F9C2, 'M', u'䗹'),
+ (0x2F9C3, 'M', u'衠'),
+ (0x2F9C4, 'M', u'衣'),
+ (0x2F9C5, 'M', u'𧙧'),
+ (0x2F9C6, 'M', u'裗'),
+ (0x2F9C7, 'M', u'裞'),
+ (0x2F9C8, 'M', u'䘵'),
+ (0x2F9C9, 'M', u'裺'),
+ (0x2F9CA, 'M', u'㒻'),
+ (0x2F9CB, 'M', u'𧢮'),
+ (0x2F9CC, 'M', u'𧥦'),
+ (0x2F9CD, 'M', u'䚾'),
+ (0x2F9CE, 'M', u'䛇'),
+ (0x2F9CF, 'M', u'誠'),
+ (0x2F9D0, 'M', u'諭'),
+ (0x2F9D1, 'M', u'變'),
+ (0x2F9D2, 'M', u'豕'),
+ (0x2F9D3, 'M', u'𧲨'),
+ (0x2F9D4, 'M', u'貫'),
+ (0x2F9D5, 'M', u'賁'),
+ (0x2F9D6, 'M', u'贛'),
+ (0x2F9D7, 'M', u'起'),
+ (0x2F9D8, 'M', u'𧼯'),
+ (0x2F9D9, 'M', u'𠠄'),
+ (0x2F9DA, 'M', u'跋'),
+ (0x2F9DB, 'M', u'趼'),
+ (0x2F9DC, 'M', u'跰'),
+ (0x2F9DD, 'M', u'𠣞'),
+ (0x2F9DE, 'M', u'軔'),
+ (0x2F9DF, 'M', u'輸'),
+ (0x2F9E0, 'M', u'𨗒'),
+ (0x2F9E1, 'M', u'𨗭'),
+ (0x2F9E2, 'M', u'邔'),
+ (0x2F9E3, 'M', u'郱'),
+ (0x2F9E4, 'M', u'鄑'),
+ (0x2F9E5, 'M', u'𨜮'),
+ (0x2F9E6, 'M', u'鄛'),
+ (0x2F9E7, 'M', u'鈸'),
+ (0x2F9E8, 'M', u'鋗'),
+ (0x2F9E9, 'M', u'鋘'),
+ (0x2F9EA, 'M', u'鉼'),
+ (0x2F9EB, 'M', u'鏹'),
+ (0x2F9EC, 'M', u'鐕'),
+ (0x2F9ED, 'M', u'𨯺'),
+ (0x2F9EE, 'M', u'開'),
+ (0x2F9EF, 'M', u'䦕'),
+ (0x2F9F0, 'M', u'閷'),
+ (0x2F9F1, 'M', u'𨵷'),
+ ]
+
+def _seg_79():
+ return [
+ (0x2F9F2, 'M', u'䧦'),
+ (0x2F9F3, 'M', u'雃'),
+ (0x2F9F4, 'M', u'嶲'),
+ (0x2F9F5, 'M', u'霣'),
+ (0x2F9F6, 'M', u'𩅅'),
+ (0x2F9F7, 'M', u'𩈚'),
+ (0x2F9F8, 'M', u'䩮'),
+ (0x2F9F9, 'M', u'䩶'),
+ (0x2F9FA, 'M', u'韠'),
+ (0x2F9FB, 'M', u'𩐊'),
+ (0x2F9FC, 'M', u'䪲'),
+ (0x2F9FD, 'M', u'𩒖'),
+ (0x2F9FE, 'M', u'頋'),
+ (0x2FA00, 'M', u'頩'),
+ (0x2FA01, 'M', u'𩖶'),
+ (0x2FA02, 'M', u'飢'),
+ (0x2FA03, 'M', u'䬳'),
+ (0x2FA04, 'M', u'餩'),
+ (0x2FA05, 'M', u'馧'),
+ (0x2FA06, 'M', u'駂'),
+ (0x2FA07, 'M', u'駾'),
+ (0x2FA08, 'M', u'䯎'),
+ (0x2FA09, 'M', u'𩬰'),
+ (0x2FA0A, 'M', u'鬒'),
+ (0x2FA0B, 'M', u'鱀'),
+ (0x2FA0C, 'M', u'鳽'),
+ (0x2FA0D, 'M', u'䳎'),
+ (0x2FA0E, 'M', u'䳭'),
+ (0x2FA0F, 'M', u'鵧'),
+ (0x2FA10, 'M', u'𪃎'),
+ (0x2FA11, 'M', u'䳸'),
+ (0x2FA12, 'M', u'𪄅'),
+ (0x2FA13, 'M', u'𪈎'),
+ (0x2FA14, 'M', u'𪊑'),
+ (0x2FA15, 'M', u'麻'),
+ (0x2FA16, 'M', u'䵖'),
+ (0x2FA17, 'M', u'黹'),
+ (0x2FA18, 'M', u'黾'),
+ (0x2FA19, 'M', u'鼅'),
+ (0x2FA1A, 'M', u'鼏'),
+ (0x2FA1B, 'M', u'鼖'),
+ (0x2FA1C, 'M', u'鼻'),
+ (0x2FA1D, 'M', u'𪘀'),
+ (0x2FA1E, 'X'),
+ (0x30000, 'V'),
+ (0x3134B, 'X'),
+ (0xE0100, 'I'),
+ (0xE01F0, 'X'),
+ ]
+
+uts46data = tuple(
+ _seg_0()
+ + _seg_1()
+ + _seg_2()
+ + _seg_3()
+ + _seg_4()
+ + _seg_5()
+ + _seg_6()
+ + _seg_7()
+ + _seg_8()
+ + _seg_9()
+ + _seg_10()
+ + _seg_11()
+ + _seg_12()
+ + _seg_13()
+ + _seg_14()
+ + _seg_15()
+ + _seg_16()
+ + _seg_17()
+ + _seg_18()
+ + _seg_19()
+ + _seg_20()
+ + _seg_21()
+ + _seg_22()
+ + _seg_23()
+ + _seg_24()
+ + _seg_25()
+ + _seg_26()
+ + _seg_27()
+ + _seg_28()
+ + _seg_29()
+ + _seg_30()
+ + _seg_31()
+ + _seg_32()
+ + _seg_33()
+ + _seg_34()
+ + _seg_35()
+ + _seg_36()
+ + _seg_37()
+ + _seg_38()
+ + _seg_39()
+ + _seg_40()
+ + _seg_41()
+ + _seg_42()
+ + _seg_43()
+ + _seg_44()
+ + _seg_45()
+ + _seg_46()
+ + _seg_47()
+ + _seg_48()
+ + _seg_49()
+ + _seg_50()
+ + _seg_51()
+ + _seg_52()
+ + _seg_53()
+ + _seg_54()
+ + _seg_55()
+ + _seg_56()
+ + _seg_57()
+ + _seg_58()
+ + _seg_59()
+ + _seg_60()
+ + _seg_61()
+ + _seg_62()
+ + _seg_63()
+ + _seg_64()
+ + _seg_65()
+ + _seg_66()
+ + _seg_67()
+ + _seg_68()
+ + _seg_69()
+ + _seg_70()
+ + _seg_71()
+ + _seg_72()
+ + _seg_73()
+ + _seg_74()
+ + _seg_75()
+ + _seg_76()
+ + _seg_77()
+ + _seg_78()
+ + _seg_79()
+)
diff --git a/venv/Lib/site-packages/msgpack-1.0.0.dist-info/COPYING b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/COPYING
new file mode 100644
index 000000000..f067af3aa
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/COPYING
@@ -0,0 +1,14 @@
+Copyright (C) 2008-2011 INADA Naoki
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/venv/Lib/site-packages/msgpack-1.0.0.dist-info/INSTALLER b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/msgpack-1.0.0.dist-info/METADATA b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/METADATA
new file mode 100644
index 000000000..58e2e171f
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/METADATA
@@ -0,0 +1,299 @@
+Metadata-Version: 2.1
+Name: msgpack
+Version: 1.0.0
+Summary: MessagePack (de)serializer.
+Home-page: https://msgpack.org/
+Author: Inada Naoki
+Author-email: songofacandy@gmail.com
+License: Apache 2.0
+Project-URL: Documentation, https://msgpack-python.readthedocs.io/
+Project-URL: Source, https://github.com/msgpack/msgpack-python
+Project-URL: Tracker, https://github.com/msgpack/msgpack-python/issues
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Description-Content-Type: text/markdown
+
+# MessagePack for Python
+
+[![Build Status](https://travis-ci.org/msgpack/msgpack-python.svg?branch=master)](https://travis-ci.org/msgpack/msgpack-python)
+[![Documentation Status](https://readthedocs.org/projects/msgpack-python/badge/?version=latest)](https://msgpack-python.readthedocs.io/en/latest/?badge=latest)
+
+## What's this
+
+[MessagePack](https://msgpack.org/) is an efficient binary serialization format.
+It lets you exchange data among multiple languages like JSON.
+But it's faster and smaller.
+This package provides CPython bindings for reading and writing MessagePack data.
+
+
+## Very important notes for existing users
+
+### PyPI package name
+
+TL;DR: When upgrading from msgpack-0.4 or earlier, don't do `pip install -U msgpack-python`.
+Do `pip uninstall msgpack-python; pip install msgpack` instead.
+
+Package name on PyPI was changed to msgpack from 0.5.
+I upload transitional package (msgpack-python 0.5 which depending on msgpack)
+for smooth transition from msgpack-python to msgpack.
+
+Sadly, this doesn't work for upgrade install. After `pip install -U msgpack-python`,
+msgpack is removed, and `import msgpack` fail.
+
+
+### Compatibility with the old format
+
+You can use `use_bin_type=False` option to pack `bytes`
+object into raw type in the old msgpack spec, instead of bin type in new msgpack spec.
+
+You can unpack old msgpack format using `raw=True` option.
+It unpacks str (raw) type in msgpack into Python bytes.
+
+See note below for detail.
+
+
+### Major breaking changes in msgpack 1.0
+
+* Python 2
+
+ * The extension module does not support Python 2 anymore.
+ The pure Python implementation (`msgpack.fallback`) is used for Python 2.
+
+* Packer
+
+ * `use_bin_type=True` by default. bytes are encoded in bin type in msgpack.
+ **If you are still sing Python 2, you must use unicode for all string types.**
+ You can use `use_bin_type=False` to encode into old msgpack format.
+ * `encoding` option is removed. UTF-8 is used always.
+
+* Unpacker
+
+ * `raw=False` by default. It assumes str types are valid UTF-8 string
+ and decode them to Python str (unicode) object.
+ * `encoding` option is removed. You can use `raw=True` to support old format.
+ * Default value of `max_buffer_size` is changed from 0 to 100 MiB.
+ * Default value of `strict_map_key` is changed to True to avoid hashdos.
+ You need to pass `strict_map_key=False` if you have data which contain map keys
+ which type is not bytes or str.
+
+
+## Install
+
+
+ $ pip install msgpack
+
+
+### Pure Python implementation
+
+The extension module in msgpack (`msgpack._cmsgpack`) does not support
+Python 2 and PyPy.
+
+But msgpack provides a pure Python implementation (`msgpack.fallback`)
+for PyPy and Python 2.
+
+Since the [pip](https://pip.pypa.io/) uses the pure Python implementation,
+Python 2 support will not be dropped in the foreseeable future.
+
+
+### Windows
+
+When you can't use a binary distribution, you need to install Visual Studio
+or Windows SDK on Windows.
+Without extension, using pure Python implementation on CPython runs slowly.
+
+
+## How to use
+
+NOTE: In examples below, I use `raw=False` and `use_bin_type=True` for users
+using msgpack < 1.0. These options are default from msgpack 1.0 so you can omit them.
+
+
+### One-shot pack & unpack
+
+Use `packb` for packing and `unpackb` for unpacking.
+msgpack provides `dumps` and `loads` as an alias for compatibility with
+`json` and `pickle`.
+
+`pack` and `dump` packs to a file-like object.
+`unpack` and `load` unpacks from a file-like object.
+
+```pycon
+ >>> import msgpack
+ >>> msgpack.packb([1, 2, 3], use_bin_type=True)
+ '\x93\x01\x02\x03'
+ >>> msgpack.unpackb(_, raw=False)
+ [1, 2, 3]
+```
+
+`unpack` unpacks msgpack's array to Python's list, but can also unpack to tuple:
+
+```pycon
+ >>> msgpack.unpackb(b'\x93\x01\x02\x03', use_list=False, raw=False)
+ (1, 2, 3)
+```
+
+You should always specify the `use_list` keyword argument for backward compatibility.
+See performance issues relating to `use_list option`_ below.
+
+Read the docstring for other options.
+
+
+### Streaming unpacking
+
+`Unpacker` is a "streaming unpacker". It unpacks multiple objects from one
+stream (or from bytes provided through its `feed` method).
+
+```py
+ import msgpack
+ from io import BytesIO
+
+ buf = BytesIO()
+ for i in range(100):
+ buf.write(msgpack.packb(i, use_bin_type=True))
+
+ buf.seek(0)
+
+ unpacker = msgpack.Unpacker(buf, raw=False)
+ for unpacked in unpacker:
+ print(unpacked)
+```
+
+
+### Packing/unpacking of custom data type
+
+It is also possible to pack/unpack custom data types. Here is an example for
+`datetime.datetime`.
+
+```py
+ import datetime
+ import msgpack
+
+ useful_dict = {
+ "id": 1,
+ "created": datetime.datetime.now(),
+ }
+
+ def decode_datetime(obj):
+ if b'__datetime__' in obj:
+ obj = datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
+ return obj
+
+ def encode_datetime(obj):
+ if isinstance(obj, datetime.datetime):
+ return {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f")}
+ return obj
+
+
+ packed_dict = msgpack.packb(useful_dict, default=encode_datetime, use_bin_type=True)
+ this_dict_again = msgpack.unpackb(packed_dict, object_hook=decode_datetime, raw=False)
+```
+
+`Unpacker`'s `object_hook` callback receives a dict; the
+`object_pairs_hook` callback may instead be used to receive a list of
+key-value pairs.
+
+
+### Extended types
+
+It is also possible to pack/unpack custom data types using the **ext** type.
+
+```pycon
+ >>> import msgpack
+ >>> import array
+ >>> def default(obj):
+ ... if isinstance(obj, array.array) and obj.typecode == 'd':
+ ... return msgpack.ExtType(42, obj.tostring())
+ ... raise TypeError("Unknown type: %r" % (obj,))
+ ...
+ >>> def ext_hook(code, data):
+ ... if code == 42:
+ ... a = array.array('d')
+ ... a.fromstring(data)
+ ... return a
+ ... return ExtType(code, data)
+ ...
+ >>> data = array.array('d', [1.2, 3.4])
+ >>> packed = msgpack.packb(data, default=default, use_bin_type=True)
+ >>> unpacked = msgpack.unpackb(packed, ext_hook=ext_hook, raw=False)
+ >>> data == unpacked
+ True
+```
+
+
+### Advanced unpacking control
+
+As an alternative to iteration, `Unpacker` objects provide `unpack`,
+`skip`, `read_array_header` and `read_map_header` methods. The former two
+read an entire message from the stream, respectively de-serialising and returning
+the result, or ignoring it. The latter two methods return the number of elements
+in the upcoming container, so that each element in an array, or key-value pair
+in a map, can be unpacked or skipped individually.
+
+
+## Notes
+
+### string and binary type
+
+Early versions of msgpack didn't distinguish string and binary types.
+The type for representing both string and binary types was named **raw**.
+
+You can pack into and unpack from this old spec using `use_bin_type=False`
+and `raw=True` options.
+
+```pycon
+ >>> import msgpack
+ >>> msgpack.unpackb(msgpack.packb([b'spam', u'eggs'], use_bin_type=False), raw=True)
+ [b'spam', b'eggs']
+ >>> msgpack.unpackb(msgpack.packb([b'spam', u'eggs'], use_bin_type=True), raw=False)
+ [b'spam', 'eggs']
+```
+
+### ext type
+
+To use the **ext** type, pass `msgpack.ExtType` object to packer.
+
+```pycon
+ >>> import msgpack
+ >>> packed = msgpack.packb(msgpack.ExtType(42, b'xyzzy'))
+ >>> msgpack.unpackb(packed)
+ ExtType(code=42, data='xyzzy')
+```
+
+You can use it with `default` and `ext_hook`. See below.
+
+
+### Security
+
+To unpacking data received from unreliable source, msgpack provides
+two security options.
+
+`max_buffer_size` (default: `100*1024*1024`) limits the internal buffer size.
+It is used to limit the preallocated list size too.
+
+`strict_map_key` (default: `True`) limits the type of map keys to bytes and str.
+While msgpack spec doesn't limit the types of the map keys,
+there is a risk of the hashdos.
+If you need to support other types for map keys, use `strict_map_key=False`.
+
+
+### Performance tips
+
+CPython's GC starts when growing allocated object.
+This means unpacking may cause useless GC.
+You can use `gc.disable()` when unpacking large message.
+
+List is the default sequence type of Python.
+But tuple is lighter than list.
+You can use `use_list=False` while unpacking when performance is important.
+
+
diff --git a/venv/Lib/site-packages/msgpack-1.0.0.dist-info/RECORD b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/RECORD
new file mode 100644
index 000000000..cce4999af
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/RECORD
@@ -0,0 +1,17 @@
+msgpack-1.0.0.dist-info/COPYING,sha256=T73_QuukWTwW96fqcHiIOytzSHCh4rY2KEzi3OYr9Pc,628
+msgpack-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+msgpack-1.0.0.dist-info/METADATA,sha256=hRAVJbKdktOK2pPWhVokgS8ID5WfluRuc-iqb82ZJAE,9398
+msgpack-1.0.0.dist-info/RECORD,,
+msgpack-1.0.0.dist-info/WHEEL,sha256=ZFeOeZQCWkgYx9PG5WAxk1yIHroxd2erWFNpu0USMOg,102
+msgpack-1.0.0.dist-info/top_level.txt,sha256=2tykSY1pXdiA2xYTDR6jPw0qI5ZGxRihyhf4S5hZyXk,8
+msgpack/__init__.py,sha256=OhoFouHD7wOYMP2PN-Hlyk9RAZw39V-iPTDRsmkoIns,1172
+msgpack/__pycache__/__init__.cpython-36.pyc,,
+msgpack/__pycache__/_version.cpython-36.pyc,,
+msgpack/__pycache__/exceptions.cpython-36.pyc,,
+msgpack/__pycache__/ext.cpython-36.pyc,,
+msgpack/__pycache__/fallback.cpython-36.pyc,,
+msgpack/_cmsgpack.cp36-win32.pyd,sha256=L7NxGkhWBJbBmN4yCjR0fTHkjrIH6BuYrM_TdhpHL_Y,108032
+msgpack/_version.py,sha256=MV-0jAzw7knOtkCPPik1WoQu00ZjgtJ_Dnifwq6hjdk,21
+msgpack/exceptions.py,sha256=2fCtczricqQgdT3NtW6cTqmZn3WA7GQtmlPuT-NhLyM,1129
+msgpack/ext.py,sha256=D_ZK6tFXxA4FSm9JiJVPfhwiICHXr_6LcHZAgJjjBxs,6225
+msgpack/fallback.py,sha256=9KtOi1INOHWeJSBLe6gIhBFdwMDL4Mxk6KWyPS29X2U,38196
diff --git a/venv/Lib/site-packages/msgpack-1.0.0.dist-info/WHEEL b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/WHEEL
new file mode 100644
index 000000000..f2456e30b
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: false
+Tag: cp36-cp36m-win32
+
diff --git a/venv/Lib/site-packages/msgpack-1.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/top_level.txt
new file mode 100644
index 000000000..3aae276bc
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack-1.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+msgpack
diff --git a/venv/Lib/site-packages/msgpack/__init__.py b/venv/Lib/site-packages/msgpack/__init__.py
new file mode 100644
index 000000000..d6705e22b
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack/__init__.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+from ._version import version
+from .exceptions import *
+from .ext import ExtType, Timestamp
+
+import os
+import sys
+
+
+if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2:
+ from .fallback import Packer, unpackb, Unpacker
+else:
+ try:
+ from ._cmsgpack import Packer, unpackb, Unpacker
+ except ImportError:
+ from .fallback import Packer, unpackb, Unpacker
+
+
+def pack(o, stream, **kwargs):
+ """
+ Pack object `o` and write it to `stream`
+
+ See :class:`Packer` for options.
+ """
+ packer = Packer(**kwargs)
+ stream.write(packer.pack(o))
+
+
+def packb(o, **kwargs):
+ """
+ Pack object `o` and return packed bytes
+
+ See :class:`Packer` for options.
+ """
+ return Packer(**kwargs).pack(o)
+
+
+def unpack(stream, **kwargs):
+ """
+ Unpack an object from `stream`.
+
+ Raises `ExtraData` when `stream` contains extra bytes.
+ See :class:`Unpacker` for options.
+ """
+ data = stream.read()
+ return unpackb(data, **kwargs)
+
+
+# alias for compatibility to simplejson/marshal/pickle.
+load = unpack
+loads = unpackb
+
+dump = pack
+dumps = packb
diff --git a/venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..594e6d607
Binary files /dev/null and b/venv/Lib/site-packages/msgpack/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/msgpack/__pycache__/_version.cpython-36.pyc b/venv/Lib/site-packages/msgpack/__pycache__/_version.cpython-36.pyc
new file mode 100644
index 000000000..3a2a839c1
Binary files /dev/null and b/venv/Lib/site-packages/msgpack/__pycache__/_version.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-36.pyc
new file mode 100644
index 000000000..4567c20f6
Binary files /dev/null and b/venv/Lib/site-packages/msgpack/__pycache__/exceptions.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-36.pyc b/venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-36.pyc
new file mode 100644
index 000000000..69c79a516
Binary files /dev/null and b/venv/Lib/site-packages/msgpack/__pycache__/ext.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/msgpack/__pycache__/fallback.cpython-36.pyc b/venv/Lib/site-packages/msgpack/__pycache__/fallback.cpython-36.pyc
new file mode 100644
index 000000000..2fa23ab8c
Binary files /dev/null and b/venv/Lib/site-packages/msgpack/__pycache__/fallback.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/msgpack/_cmsgpack.cp36-win32.pyd b/venv/Lib/site-packages/msgpack/_cmsgpack.cp36-win32.pyd
new file mode 100644
index 000000000..d560c44ec
Binary files /dev/null and b/venv/Lib/site-packages/msgpack/_cmsgpack.cp36-win32.pyd differ
diff --git a/venv/Lib/site-packages/msgpack/_version.py b/venv/Lib/site-packages/msgpack/_version.py
new file mode 100644
index 000000000..9f55cf50d
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack/_version.py
@@ -0,0 +1 @@
+version = (1, 0, 0)
diff --git a/venv/Lib/site-packages/msgpack/exceptions.py b/venv/Lib/site-packages/msgpack/exceptions.py
new file mode 100644
index 000000000..d6d2615cf
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack/exceptions.py
@@ -0,0 +1,48 @@
+class UnpackException(Exception):
+ """Base class for some exceptions raised while unpacking.
+
+ NOTE: unpack may raise exception other than subclass of
+ UnpackException. If you want to catch all error, catch
+ Exception instead.
+ """
+
+
+class BufferFull(UnpackException):
+ pass
+
+
+class OutOfData(UnpackException):
+ pass
+
+
+class FormatError(ValueError, UnpackException):
+ """Invalid msgpack format"""
+
+
+class StackError(ValueError, UnpackException):
+ """Too nested"""
+
+
+# Deprecated. Use ValueError instead
+UnpackValueError = ValueError
+
+
+class ExtraData(UnpackValueError):
+ """ExtraData is raised when there is trailing data.
+
+ This exception is raised while only one-shot (not streaming)
+ unpack.
+ """
+
+ def __init__(self, unpacked, extra):
+ self.unpacked = unpacked
+ self.extra = extra
+
+ def __str__(self):
+ return "unpack(b) received extra data."
+
+
+# Deprecated. Use Exception instead to catch all exception during packing.
+PackException = Exception
+PackValueError = ValueError
+PackOverflowError = OverflowError
diff --git a/venv/Lib/site-packages/msgpack/ext.py b/venv/Lib/site-packages/msgpack/ext.py
new file mode 100644
index 000000000..8341c68b8
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack/ext.py
@@ -0,0 +1,191 @@
+# coding: utf-8
+from collections import namedtuple
+import datetime
+import sys
+import struct
+
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+ int_types = (int, long)
+ _utc = None
+else:
+ int_types = int
+ try:
+ _utc = datetime.timezone.utc
+ except AttributeError:
+ _utc = datetime.timezone(datetime.timedelta(0))
+
+
+class ExtType(namedtuple("ExtType", "code data")):
+ """ExtType represents ext type in msgpack."""
+
+ def __new__(cls, code, data):
+ if not isinstance(code, int):
+ raise TypeError("code must be int")
+ if not isinstance(data, bytes):
+ raise TypeError("data must be bytes")
+ if not 0 <= code <= 127:
+ raise ValueError("code must be 0~127")
+ return super(ExtType, cls).__new__(cls, code, data)
+
+
+class Timestamp(object):
+ """Timestamp represents the Timestamp extension type in msgpack.
+
+ When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python
+ msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`.
+
+ This class is immutable: Do not override seconds and nanoseconds.
+ """
+
+ __slots__ = ["seconds", "nanoseconds"]
+
+ def __init__(self, seconds, nanoseconds=0):
+ """Initialize a Timestamp object.
+
+ :param int seconds:
+ Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
+ May be negative.
+
+ :param int nanoseconds:
+ Number of nanoseconds to add to `seconds` to get fractional time.
+ Maximum is 999_999_999. Default is 0.
+
+ Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
+ """
+ if not isinstance(seconds, int_types):
+ raise TypeError("seconds must be an interger")
+ if not isinstance(nanoseconds, int_types):
+ raise TypeError("nanoseconds must be an integer")
+ if not (0 <= nanoseconds < 10 ** 9):
+ raise ValueError(
+ "nanoseconds must be a non-negative integer less than 999999999."
+ )
+ self.seconds = seconds
+ self.nanoseconds = nanoseconds
+
+ def __repr__(self):
+ """String representation of Timestamp."""
+ return "Timestamp(seconds={0}, nanoseconds={1})".format(
+ self.seconds, self.nanoseconds
+ )
+
+ def __eq__(self, other):
+ """Check for equality with another Timestamp object"""
+ if type(other) is self.__class__:
+ return (
+ self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
+ )
+ return False
+
+ def __ne__(self, other):
+ """not-equals method (see :func:`__eq__()`)"""
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash((self.seconds, self.nanoseconds))
+
+ @staticmethod
+ def from_bytes(b):
+ """Unpack bytes into a `Timestamp` object.
+
+ Used for pure-Python msgpack unpacking.
+
+ :param b: Payload from msgpack ext message with code -1
+ :type b: bytes
+
+ :returns: Timestamp object unpacked from msgpack ext payload
+ :rtype: Timestamp
+ """
+ if len(b) == 4:
+ seconds = struct.unpack("!L", b)[0]
+ nanoseconds = 0
+ elif len(b) == 8:
+ data64 = struct.unpack("!Q", b)[0]
+ seconds = data64 & 0x00000003FFFFFFFF
+ nanoseconds = data64 >> 34
+ elif len(b) == 12:
+ nanoseconds, seconds = struct.unpack("!Iq", b)
+ else:
+ raise ValueError(
+ "Timestamp type can only be created from 32, 64, or 96-bit byte objects"
+ )
+ return Timestamp(seconds, nanoseconds)
+
+ def to_bytes(self):
+ """Pack this Timestamp object into bytes.
+
+ Used for pure-Python msgpack packing.
+
+ :returns data: Payload for EXT message with code -1 (timestamp type)
+ :rtype: bytes
+ """
+ if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits
+ data64 = self.nanoseconds << 34 | self.seconds
+ if data64 & 0xFFFFFFFF00000000 == 0:
+ # nanoseconds is zero and seconds < 2**32, so timestamp 32
+ data = struct.pack("!L", data64)
+ else:
+ # timestamp 64
+ data = struct.pack("!Q", data64)
+ else:
+ # timestamp 96
+ data = struct.pack("!Iq", self.nanoseconds, self.seconds)
+ return data
+
+ @staticmethod
+ def from_unix(unix_sec):
+ """Create a Timestamp from posix timestamp in seconds.
+
+ :param unix_float: Posix timestamp in seconds.
+ :type unix_float: int or float.
+ """
+ seconds = int(unix_sec // 1)
+ nanoseconds = int((unix_sec % 1) * 10 ** 9)
+ return Timestamp(seconds, nanoseconds)
+
+ def to_unix(self):
+ """Get the timestamp as a floating-point value.
+
+ :returns: posix timestamp
+ :rtype: float
+ """
+ return self.seconds + self.nanoseconds / 1e9
+
+ @staticmethod
+ def from_unix_nano(unix_ns):
+ """Create a Timestamp from posix timestamp in nanoseconds.
+
+ :param int unix_ns: Posix timestamp in nanoseconds.
+ :rtype: Timestamp
+ """
+ return Timestamp(*divmod(unix_ns, 10 ** 9))
+
+ def to_unix_nano(self):
+ """Get the timestamp as a unixtime in nanoseconds.
+
+ :returns: posix timestamp in nanoseconds
+ :rtype: int
+ """
+ return self.seconds * 10 ** 9 + self.nanoseconds
+
+ def to_datetime(self):
+ """Get the timestamp as a UTC datetime.
+
+ Python 2 is not supported.
+
+ :rtype: datetime.
+ """
+ return datetime.datetime.fromtimestamp(self.to_unix(), _utc)
+
+ @staticmethod
+ def from_datetime(dt):
+ """Create a Timestamp from datetime with tzinfo.
+
+ Python 2 is not supported.
+
+ :rtype: Timestamp
+ """
+ return Timestamp.from_unix(dt.timestamp())
diff --git a/venv/Lib/site-packages/msgpack/fallback.py b/venv/Lib/site-packages/msgpack/fallback.py
new file mode 100644
index 000000000..9f6665b3e
--- /dev/null
+++ b/venv/Lib/site-packages/msgpack/fallback.py
@@ -0,0 +1,1063 @@
+"""Fallback pure Python implementation of msgpack"""
+
+from datetime import datetime as _DateTime
+import sys
+import struct
+
+
+PY2 = sys.version_info[0] == 2
+if PY2:
+ int_types = (int, long)
+
+ def dict_iteritems(d):
+ return d.iteritems()
+
+
+else:
+ int_types = int
+ unicode = str
+ xrange = range
+
+ def dict_iteritems(d):
+ return d.items()
+
+
+if sys.version_info < (3, 5):
+ # Ugly hack...
+ RecursionError = RuntimeError
+
+ def _is_recursionerror(e):
+ return (
+ len(e.args) == 1
+ and isinstance(e.args[0], str)
+ and e.args[0].startswith("maximum recursion depth exceeded")
+ )
+
+
+else:
+
+ def _is_recursionerror(e):
+ return True
+
+
+if hasattr(sys, "pypy_version_info"):
+ # StringIO is slow on PyPy, StringIO is faster. However: PyPy's own
+ # StringBuilder is fastest.
+ from __pypy__ import newlist_hint
+
+ try:
+ from __pypy__.builders import BytesBuilder as StringBuilder
+ except ImportError:
+ from __pypy__.builders import StringBuilder
+ USING_STRINGBUILDER = True
+
+ class StringIO(object):
+ def __init__(self, s=b""):
+ if s:
+ self.builder = StringBuilder(len(s))
+ self.builder.append(s)
+ else:
+ self.builder = StringBuilder()
+
+ def write(self, s):
+ if isinstance(s, memoryview):
+ s = s.tobytes()
+ elif isinstance(s, bytearray):
+ s = bytes(s)
+ self.builder.append(s)
+
+ def getvalue(self):
+ return self.builder.build()
+
+
+else:
+ USING_STRINGBUILDER = False
+ from io import BytesIO as StringIO
+
+ newlist_hint = lambda size: []
+
+
+from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError
+
+from .ext import ExtType, Timestamp
+
+
+EX_SKIP = 0
+EX_CONSTRUCT = 1
+EX_READ_ARRAY_HEADER = 2
+EX_READ_MAP_HEADER = 3
+
+TYPE_IMMEDIATE = 0
+TYPE_ARRAY = 1
+TYPE_MAP = 2
+TYPE_RAW = 3
+TYPE_BIN = 4
+TYPE_EXT = 5
+
+DEFAULT_RECURSE_LIMIT = 511
+
+
+def _check_type_strict(obj, t, type=type, tuple=tuple):
+ if type(t) is tuple:
+ return type(obj) in t
+ else:
+ return type(obj) is t
+
+
+def _get_data_from_buffer(obj):
+ view = memoryview(obj)
+ if view.itemsize != 1:
+ raise ValueError("cannot unpack from multi-byte object")
+ return view
+
+
+def unpackb(packed, **kwargs):
+ """
+ Unpack an object from `packed`.
+
+ Raises ``ExtraData`` when *packed* contains extra bytes.
+ Raises ``ValueError`` when *packed* is incomplete.
+ Raises ``FormatError`` when *packed* is not valid msgpack.
+ Raises ``StackError`` when *packed* contains too nested.
+ Other exceptions can be raised during unpacking.
+
+ See :class:`Unpacker` for options.
+ """
+ unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs)
+ unpacker.feed(packed)
+ try:
+ ret = unpacker._unpack()
+ except OutOfData:
+ raise ValueError("Unpack failed: incomplete input")
+ except RecursionError as e:
+ if _is_recursionerror(e):
+ raise StackError
+ raise
+ if unpacker._got_extradata():
+ raise ExtraData(ret, unpacker._get_extradata())
+ return ret
+
+
+if sys.version_info < (2, 7, 6):
+
+ def _unpack_from(f, b, o=0):
+ """Explicit type cast for legacy struct.unpack_from"""
+ return struct.unpack_from(f, bytes(b), o)
+
+
+else:
+ _unpack_from = struct.unpack_from
+
+
+class Unpacker(object):
+ """Streaming unpacker.
+
+ Arguments:
+
+ :param file_like:
+ File-like object having `.read(n)` method.
+ If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
+
+ :param int read_size:
+ Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
+
+ :param bool use_list:
+ If true, unpack msgpack array to Python list.
+ Otherwise, unpack to Python tuple. (default: True)
+
+ :param bool raw:
+ If true, unpack msgpack raw to Python bytes.
+ Otherwise, unpack to Python str by decoding with UTF-8 encoding (default).
+
+ :param int timestamp:
+ Control how timestamp type is unpacked:
+
+ 0 - Timestamp
+ 1 - float (Seconds from the EPOCH)
+ 2 - int (Nanoseconds from the EPOCH)
+ 3 - datetime.datetime (UTC). Python 2 is not supported.
+
+ :param bool strict_map_key:
+ If true (default), only str or bytes are accepted for map (dict) keys.
+
+ :param callable object_hook:
+ When specified, it should be callable.
+ Unpacker calls it with a dict argument after unpacking msgpack map.
+ (See also simplejson)
+
+ :param callable object_pairs_hook:
+ When specified, it should be callable.
+ Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
+ (See also simplejson)
+
+ :param str unicode_errors:
+ The error handler for decoding unicode. (default: 'strict')
+ This option should be used only when you have msgpack data which
+ contains invalid UTF-8 string.
+
+ :param int max_buffer_size:
+ Limits size of data waiting unpacked. 0 means 2**32-1.
+ The default value is 100*1024*1024 (100MiB).
+ Raises `BufferFull` exception when it is insufficient.
+ You should set this parameter when unpacking data from untrusted source.
+
+ :param int max_str_len:
+ Deprecated, use *max_buffer_size* instead.
+ Limits max length of str. (default: max_buffer_size)
+
+ :param int max_bin_len:
+ Deprecated, use *max_buffer_size* instead.
+ Limits max length of bin. (default: max_buffer_size)
+
+ :param int max_array_len:
+ Limits max length of array.
+ (default: max_buffer_size)
+
+ :param int max_map_len:
+ Limits max length of map.
+ (default: max_buffer_size//2)
+
+ :param int max_ext_len:
+ Deprecated, use *max_buffer_size* instead.
+ Limits max size of ext type. (default: max_buffer_size)
+
+ Example of streaming deserialize from file-like object::
+
+ unpacker = Unpacker(file_like)
+ for o in unpacker:
+ process(o)
+
+ Example of streaming deserialize from socket::
+
+ unpacker = Unpacker(max_buffer_size)
+ while True:
+ buf = sock.recv(1024**2)
+ if not buf:
+ break
+ unpacker.feed(buf)
+ for o in unpacker:
+ process(o)
+
+ Raises ``ExtraData`` when *packed* contains extra bytes.
+ Raises ``OutOfData`` when *packed* is incomplete.
+ Raises ``FormatError`` when *packed* is not valid msgpack.
+ Raises ``StackError`` when *packed* contains too nested.
+ Other exceptions can be raised during unpacking.
+ """
+
+ def __init__(
+ self,
+ file_like=None,
+ read_size=0,
+ use_list=True,
+ raw=False,
+ timestamp=0,
+ strict_map_key=True,
+ object_hook=None,
+ object_pairs_hook=None,
+ list_hook=None,
+ unicode_errors=None,
+ max_buffer_size=100 * 1024 * 1024,
+ ext_hook=ExtType,
+ max_str_len=-1,
+ max_bin_len=-1,
+ max_array_len=-1,
+ max_map_len=-1,
+ max_ext_len=-1,
+ ):
+ if unicode_errors is None:
+ unicode_errors = "strict"
+
+ if file_like is None:
+ self._feeding = True
+ else:
+ if not callable(file_like.read):
+ raise TypeError("`file_like.read` must be callable")
+ self.file_like = file_like
+ self._feeding = False
+
+ #: array of bytes fed.
+ self._buffer = bytearray()
+ #: Which position we currently reads
+ self._buff_i = 0
+
+ # When Unpacker is used as an iterable, between the calls to next(),
+ # the buffer is not "consumed" completely, for efficiency sake.
+ # Instead, it is done sloppily. To make sure we raise BufferFull at
+ # the correct moments, we have to keep track of how sloppy we were.
+ # Furthermore, when the buffer is incomplete (that is: in the case
+ # we raise an OutOfData) we need to rollback the buffer to the correct
+ # state, which _buf_checkpoint records.
+ self._buf_checkpoint = 0
+
+ if not max_buffer_size:
+ max_buffer_size = 2 ** 31 - 1
+ if max_str_len == -1:
+ max_str_len = max_buffer_size
+ if max_bin_len == -1:
+ max_bin_len = max_buffer_size
+ if max_array_len == -1:
+ max_array_len = max_buffer_size
+ if max_map_len == -1:
+ max_map_len = max_buffer_size // 2
+ if max_ext_len == -1:
+ max_ext_len = max_buffer_size
+
+ self._max_buffer_size = max_buffer_size
+ if read_size > self._max_buffer_size:
+ raise ValueError("read_size must be smaller than max_buffer_size")
+ self._read_size = read_size or min(self._max_buffer_size, 16 * 1024)
+ self._raw = bool(raw)
+ self._strict_map_key = bool(strict_map_key)
+ self._unicode_errors = unicode_errors
+ self._use_list = use_list
+ if not (0 <= timestamp <= 3):
+ raise ValueError("timestamp must be 0..3")
+ self._timestamp = timestamp
+ self._list_hook = list_hook
+ self._object_hook = object_hook
+ self._object_pairs_hook = object_pairs_hook
+ self._ext_hook = ext_hook
+ self._max_str_len = max_str_len
+ self._max_bin_len = max_bin_len
+ self._max_array_len = max_array_len
+ self._max_map_len = max_map_len
+ self._max_ext_len = max_ext_len
+ self._stream_offset = 0
+
+ if list_hook is not None and not callable(list_hook):
+ raise TypeError("`list_hook` is not callable")
+ if object_hook is not None and not callable(object_hook):
+ raise TypeError("`object_hook` is not callable")
+ if object_pairs_hook is not None and not callable(object_pairs_hook):
+ raise TypeError("`object_pairs_hook` is not callable")
+ if object_hook is not None and object_pairs_hook is not None:
+ raise TypeError(
+ "object_pairs_hook and object_hook are mutually " "exclusive"
+ )
+ if not callable(ext_hook):
+ raise TypeError("`ext_hook` is not callable")
+
+ def feed(self, next_bytes):
+ assert self._feeding
+ view = _get_data_from_buffer(next_bytes)
+ if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size:
+ raise BufferFull
+
+ # Strip buffer before checkpoint before reading file.
+ if self._buf_checkpoint > 0:
+ del self._buffer[: self._buf_checkpoint]
+ self._buff_i -= self._buf_checkpoint
+ self._buf_checkpoint = 0
+
+ # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython
+ self._buffer.extend(view)
+
+ def _consume(self):
+ """ Gets rid of the used parts of the buffer. """
+ self._stream_offset += self._buff_i - self._buf_checkpoint
+ self._buf_checkpoint = self._buff_i
+
+ def _got_extradata(self):
+ return self._buff_i < len(self._buffer)
+
+ def _get_extradata(self):
+ return self._buffer[self._buff_i :]
+
+ def read_bytes(self, n):
+ ret = self._read(n)
+ self._consume()
+ return ret
+
+ def _read(self, n):
+ # (int) -> bytearray
+ self._reserve(n)
+ i = self._buff_i
+ self._buff_i = i + n
+ return self._buffer[i : i + n]
+
+ def _reserve(self, n):
+ remain_bytes = len(self._buffer) - self._buff_i - n
+
+ # Fast path: buffer has n bytes already
+ if remain_bytes >= 0:
+ return
+
+ if self._feeding:
+ self._buff_i = self._buf_checkpoint
+ raise OutOfData
+
+ # Strip buffer before checkpoint before reading file.
+ if self._buf_checkpoint > 0:
+ del self._buffer[: self._buf_checkpoint]
+ self._buff_i -= self._buf_checkpoint
+ self._buf_checkpoint = 0
+
+ # Read from file
+ remain_bytes = -remain_bytes
+ while remain_bytes > 0:
+ to_read_bytes = max(self._read_size, remain_bytes)
+ read_data = self.file_like.read(to_read_bytes)
+ if not read_data:
+ break
+ assert isinstance(read_data, bytes)
+ self._buffer += read_data
+ remain_bytes -= len(read_data)
+
+ if len(self._buffer) < n + self._buff_i:
+ self._buff_i = 0 # rollback
+ raise OutOfData
+
+ def _read_header(self, execute=EX_CONSTRUCT):
+ typ = TYPE_IMMEDIATE
+ n = 0
+ obj = None
+ self._reserve(1)
+ b = self._buffer[self._buff_i]
+ self._buff_i += 1
+ if b & 0b10000000 == 0:
+ obj = b
+ elif b & 0b11100000 == 0b11100000:
+ obj = -1 - (b ^ 0xFF)
+ elif b & 0b11100000 == 0b10100000:
+ n = b & 0b00011111
+ typ = TYPE_RAW
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b & 0b11110000 == 0b10010000:
+ n = b & 0b00001111
+ typ = TYPE_ARRAY
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ elif b & 0b11110000 == 0b10000000:
+ n = b & 0b00001111
+ typ = TYPE_MAP
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ elif b == 0xC0:
+ obj = None
+ elif b == 0xC2:
+ obj = False
+ elif b == 0xC3:
+ obj = True
+ elif b == 0xC4:
+ typ = TYPE_BIN
+ self._reserve(1)
+ n = self._buffer[self._buff_i]
+ self._buff_i += 1
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ obj = self._read(n)
+ elif b == 0xC5:
+ typ = TYPE_BIN
+ self._reserve(2)
+ n = _unpack_from(">H", self._buffer, self._buff_i)[0]
+ self._buff_i += 2
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ obj = self._read(n)
+ elif b == 0xC6:
+ typ = TYPE_BIN
+ self._reserve(4)
+ n = _unpack_from(">I", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ obj = self._read(n)
+ elif b == 0xC7: # ext 8
+ typ = TYPE_EXT
+ self._reserve(2)
+ L, n = _unpack_from("Bb", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ obj = self._read(L)
+ elif b == 0xC8: # ext 16
+ typ = TYPE_EXT
+ self._reserve(3)
+ L, n = _unpack_from(">Hb", self._buffer, self._buff_i)
+ self._buff_i += 3
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ obj = self._read(L)
+ elif b == 0xC9: # ext 32
+ typ = TYPE_EXT
+ self._reserve(5)
+ L, n = _unpack_from(">Ib", self._buffer, self._buff_i)
+ self._buff_i += 5
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ obj = self._read(L)
+ elif b == 0xCA:
+ self._reserve(4)
+ obj = _unpack_from(">f", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ elif b == 0xCB:
+ self._reserve(8)
+ obj = _unpack_from(">d", self._buffer, self._buff_i)[0]
+ self._buff_i += 8
+ elif b == 0xCC:
+ self._reserve(1)
+ obj = self._buffer[self._buff_i]
+ self._buff_i += 1
+ elif b == 0xCD:
+ self._reserve(2)
+ obj = _unpack_from(">H", self._buffer, self._buff_i)[0]
+ self._buff_i += 2
+ elif b == 0xCE:
+ self._reserve(4)
+ obj = _unpack_from(">I", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ elif b == 0xCF:
+ self._reserve(8)
+ obj = _unpack_from(">Q", self._buffer, self._buff_i)[0]
+ self._buff_i += 8
+ elif b == 0xD0:
+ self._reserve(1)
+ obj = _unpack_from("b", self._buffer, self._buff_i)[0]
+ self._buff_i += 1
+ elif b == 0xD1:
+ self._reserve(2)
+ obj = _unpack_from(">h", self._buffer, self._buff_i)[0]
+ self._buff_i += 2
+ elif b == 0xD2:
+ self._reserve(4)
+ obj = _unpack_from(">i", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ elif b == 0xD3:
+ self._reserve(8)
+ obj = _unpack_from(">q", self._buffer, self._buff_i)[0]
+ self._buff_i += 8
+ elif b == 0xD4: # fixext 1
+ typ = TYPE_EXT
+ if self._max_ext_len < 1:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
+ self._reserve(2)
+ n, obj = _unpack_from("b1s", self._buffer, self._buff_i)
+ self._buff_i += 2
+ elif b == 0xD5: # fixext 2
+ typ = TYPE_EXT
+ if self._max_ext_len < 2:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
+ self._reserve(3)
+ n, obj = _unpack_from("b2s", self._buffer, self._buff_i)
+ self._buff_i += 3
+ elif b == 0xD6: # fixext 4
+ typ = TYPE_EXT
+ if self._max_ext_len < 4:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
+ self._reserve(5)
+ n, obj = _unpack_from("b4s", self._buffer, self._buff_i)
+ self._buff_i += 5
+ elif b == 0xD7: # fixext 8
+ typ = TYPE_EXT
+ if self._max_ext_len < 8:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
+ self._reserve(9)
+ n, obj = _unpack_from("b8s", self._buffer, self._buff_i)
+ self._buff_i += 9
+ elif b == 0xD8: # fixext 16
+ typ = TYPE_EXT
+ if self._max_ext_len < 16:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
+ self._reserve(17)
+ n, obj = _unpack_from("b16s", self._buffer, self._buff_i)
+ self._buff_i += 17
+ elif b == 0xD9:
+ typ = TYPE_RAW
+ self._reserve(1)
+ n = self._buffer[self._buff_i]
+ self._buff_i += 1
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b == 0xDA:
+ typ = TYPE_RAW
+ self._reserve(2)
+ (n,) = _unpack_from(">H", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b == 0xDB:
+ typ = TYPE_RAW
+ self._reserve(4)
+ (n,) = _unpack_from(">I", self._buffer, self._buff_i)
+ self._buff_i += 4
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b == 0xDC:
+ typ = TYPE_ARRAY
+ self._reserve(2)
+ (n,) = _unpack_from(">H", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ elif b == 0xDD:
+ typ = TYPE_ARRAY
+ self._reserve(4)
+ (n,) = _unpack_from(">I", self._buffer, self._buff_i)
+ self._buff_i += 4
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ elif b == 0xDE:
+ self._reserve(2)
+ (n,) = _unpack_from(">H", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ typ = TYPE_MAP
+ elif b == 0xDF:
+ self._reserve(4)
+ (n,) = _unpack_from(">I", self._buffer, self._buff_i)
+ self._buff_i += 4
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ typ = TYPE_MAP
+ else:
+ raise FormatError("Unknown header: 0x%x" % b)
+ return typ, n, obj
+
+ def _unpack(self, execute=EX_CONSTRUCT):
+ typ, n, obj = self._read_header(execute)
+
+ if execute == EX_READ_ARRAY_HEADER:
+ if typ != TYPE_ARRAY:
+ raise ValueError("Expected array")
+ return n
+ if execute == EX_READ_MAP_HEADER:
+ if typ != TYPE_MAP:
+ raise ValueError("Expected map")
+ return n
+ # TODO should we eliminate the recursion?
+ if typ == TYPE_ARRAY:
+ if execute == EX_SKIP:
+ for i in xrange(n):
+ # TODO check whether we need to call `list_hook`
+ self._unpack(EX_SKIP)
+ return
+ ret = newlist_hint(n)
+ for i in xrange(n):
+ ret.append(self._unpack(EX_CONSTRUCT))
+ if self._list_hook is not None:
+ ret = self._list_hook(ret)
+ # TODO is the interaction between `list_hook` and `use_list` ok?
+ return ret if self._use_list else tuple(ret)
+ if typ == TYPE_MAP:
+ if execute == EX_SKIP:
+ for i in xrange(n):
+ # TODO check whether we need to call hooks
+ self._unpack(EX_SKIP)
+ self._unpack(EX_SKIP)
+ return
+ if self._object_pairs_hook is not None:
+ ret = self._object_pairs_hook(
+ (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT))
+ for _ in xrange(n)
+ )
+ else:
+ ret = {}
+ for _ in xrange(n):
+ key = self._unpack(EX_CONSTRUCT)
+ if self._strict_map_key and type(key) not in (unicode, bytes):
+ raise ValueError(
+ "%s is not allowed for map key" % str(type(key))
+ )
+ if not PY2 and type(key) is str:
+ key = sys.intern(key)
+ ret[key] = self._unpack(EX_CONSTRUCT)
+ if self._object_hook is not None:
+ ret = self._object_hook(ret)
+ return ret
+ if execute == EX_SKIP:
+ return
+ if typ == TYPE_RAW:
+ if self._raw:
+ obj = bytes(obj)
+ else:
+ obj = obj.decode("utf_8", self._unicode_errors)
+ return obj
+ if typ == TYPE_BIN:
+ return bytes(obj)
+ if typ == TYPE_EXT:
+ if n == -1: # timestamp
+ ts = Timestamp.from_bytes(bytes(obj))
+ if self._timestamp == 1:
+ return ts.to_unix()
+ elif self._timestamp == 2:
+ return ts.to_unix_nano()
+ elif self._timestamp == 3:
+ return ts.to_datetime()
+ else:
+ return ts
+ else:
+ return self._ext_hook(n, bytes(obj))
+ assert typ == TYPE_IMMEDIATE
+ return obj
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ try:
+ ret = self._unpack(EX_CONSTRUCT)
+ self._consume()
+ return ret
+ except OutOfData:
+ self._consume()
+ raise StopIteration
+ except RecursionError:
+ raise StackError
+
+ next = __next__
+
+ def skip(self):
+ self._unpack(EX_SKIP)
+ self._consume()
+
+ def unpack(self):
+ try:
+ ret = self._unpack(EX_CONSTRUCT)
+ except RecursionError:
+ raise StackError
+ self._consume()
+ return ret
+
+ def read_array_header(self):
+ ret = self._unpack(EX_READ_ARRAY_HEADER)
+ self._consume()
+ return ret
+
+ def read_map_header(self):
+ ret = self._unpack(EX_READ_MAP_HEADER)
+ self._consume()
+ return ret
+
+ def tell(self):
+ return self._stream_offset
+
+
+class Packer(object):
+ """
+ MessagePack Packer
+
+ Usage:
+
+ packer = Packer()
+ astream.write(packer.pack(a))
+ astream.write(packer.pack(b))
+
+ Packer's constructor has some keyword arguments:
+
+ :param callable default:
+ Convert user type to builtin type that Packer supports.
+ See also simplejson's document.
+
+ :param bool use_single_float:
+ Use single precision float type for float. (default: False)
+
+ :param bool autoreset:
+ Reset buffer after each pack and return its content as `bytes`. (default: True).
+ If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.
+
+ :param bool use_bin_type:
+ Use bin type introduced in msgpack spec 2.0 for bytes.
+ It also enables str8 type for unicode. (default: True)
+
+ :param bool strict_types:
+ If set to true, types will be checked to be exact. Derived classes
+ from serializable types will not be serialized and will be
+ treated as unsupported type and forwarded to default.
+ Additionally tuples will not be serialized as lists.
+ This is useful when trying to implement accurate serialization
+ for python types.
+
+ :param bool datetime:
+ If set to true, datetime with tzinfo is packed into Timestamp type.
+ Note that the tzinfo is stripped in the timestamp.
+ You can get UTC datetime with `timestamp=3` option of the Unpacker.
+ (Python 2 is not supported).
+
+ :param str unicode_errors:
+ The error handler for encoding unicode. (default: 'strict')
+ DO NOT USE THIS!! This option is kept for very specific usage.
+ """
+
+ def __init__(
+ self,
+ default=None,
+ use_single_float=False,
+ autoreset=True,
+ use_bin_type=True,
+ strict_types=False,
+ datetime=False,
+ unicode_errors=None,
+ ):
+ self._strict_types = strict_types
+ self._use_float = use_single_float
+ self._autoreset = autoreset
+ self._use_bin_type = use_bin_type
+ self._buffer = StringIO()
+ if PY2 and datetime:
+ raise ValueError("datetime is not supported in Python 2")
+ self._datetime = bool(datetime)
+ self._unicode_errors = unicode_errors or "strict"
+ if default is not None:
+ if not callable(default):
+ raise TypeError("default must be callable")
+ self._default = default
+
+ def _pack(
+ self,
+ obj,
+ nest_limit=DEFAULT_RECURSE_LIMIT,
+ check=isinstance,
+ check_type_strict=_check_type_strict,
+ ):
+ default_used = False
+ if self._strict_types:
+ check = check_type_strict
+ list_types = list
+ else:
+ list_types = (list, tuple)
+ while True:
+ if nest_limit < 0:
+ raise ValueError("recursion limit exceeded")
+ if obj is None:
+ return self._buffer.write(b"\xc0")
+ if check(obj, bool):
+ if obj:
+ return self._buffer.write(b"\xc3")
+ return self._buffer.write(b"\xc2")
+ if check(obj, int_types):
+ if 0 <= obj < 0x80:
+ return self._buffer.write(struct.pack("B", obj))
+ if -0x20 <= obj < 0:
+ return self._buffer.write(struct.pack("b", obj))
+ if 0x80 <= obj <= 0xFF:
+ return self._buffer.write(struct.pack("BB", 0xCC, obj))
+ if -0x80 <= obj < 0:
+ return self._buffer.write(struct.pack(">Bb", 0xD0, obj))
+ if 0xFF < obj <= 0xFFFF:
+ return self._buffer.write(struct.pack(">BH", 0xCD, obj))
+ if -0x8000 <= obj < -0x80:
+ return self._buffer.write(struct.pack(">Bh", 0xD1, obj))
+ if 0xFFFF < obj <= 0xFFFFFFFF:
+ return self._buffer.write(struct.pack(">BI", 0xCE, obj))
+ if -0x80000000 <= obj < -0x8000:
+ return self._buffer.write(struct.pack(">Bi", 0xD2, obj))
+ if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF:
+ return self._buffer.write(struct.pack(">BQ", 0xCF, obj))
+ if -0x8000000000000000 <= obj < -0x80000000:
+ return self._buffer.write(struct.pack(">Bq", 0xD3, obj))
+ if not default_used and self._default is not None:
+ obj = self._default(obj)
+ default_used = True
+ continue
+ raise OverflowError("Integer value out of range")
+ if check(obj, (bytes, bytearray)):
+ n = len(obj)
+ if n >= 2 ** 32:
+ raise ValueError("%s is too large" % type(obj).__name__)
+ self._pack_bin_header(n)
+ return self._buffer.write(obj)
+ if check(obj, unicode):
+ obj = obj.encode("utf-8", self._unicode_errors)
+ n = len(obj)
+ if n >= 2 ** 32:
+ raise ValueError("String is too large")
+ self._pack_raw_header(n)
+ return self._buffer.write(obj)
+ if check(obj, memoryview):
+ n = len(obj) * obj.itemsize
+ if n >= 2 ** 32:
+ raise ValueError("Memoryview is too large")
+ self._pack_bin_header(n)
+ return self._buffer.write(obj)
+ if check(obj, float):
+ if self._use_float:
+ return self._buffer.write(struct.pack(">Bf", 0xCA, obj))
+ return self._buffer.write(struct.pack(">Bd", 0xCB, obj))
+ if check(obj, (ExtType, Timestamp)):
+ if check(obj, Timestamp):
+ code = -1
+ data = obj.to_bytes()
+ else:
+ code = obj.code
+ data = obj.data
+ assert isinstance(code, int)
+ assert isinstance(data, bytes)
+ L = len(data)
+ if L == 1:
+ self._buffer.write(b"\xd4")
+ elif L == 2:
+ self._buffer.write(b"\xd5")
+ elif L == 4:
+ self._buffer.write(b"\xd6")
+ elif L == 8:
+ self._buffer.write(b"\xd7")
+ elif L == 16:
+ self._buffer.write(b"\xd8")
+ elif L <= 0xFF:
+ self._buffer.write(struct.pack(">BB", 0xC7, L))
+ elif L <= 0xFFFF:
+ self._buffer.write(struct.pack(">BH", 0xC8, L))
+ else:
+ self._buffer.write(struct.pack(">BI", 0xC9, L))
+ self._buffer.write(struct.pack("b", code))
+ self._buffer.write(data)
+ return
+ if check(obj, list_types):
+ n = len(obj)
+ self._pack_array_header(n)
+ for i in xrange(n):
+ self._pack(obj[i], nest_limit - 1)
+ return
+ if check(obj, dict):
+ return self._pack_map_pairs(
+ len(obj), dict_iteritems(obj), nest_limit - 1
+ )
+
+ if self._datetime and check(obj, _DateTime):
+ obj = Timestamp.from_datetime(obj)
+ default_used = 1
+ continue
+
+ if not default_used and self._default is not None:
+ obj = self._default(obj)
+ default_used = 1
+ continue
+ raise TypeError("Cannot serialize %r" % (obj,))
+
+ def pack(self, obj):
+ try:
+ self._pack(obj)
+ except:
+ self._buffer = StringIO() # force reset
+ raise
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_map_pairs(self, pairs):
+ self._pack_map_pairs(len(pairs), pairs)
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_array_header(self, n):
+ if n >= 2 ** 32:
+ raise ValueError
+ self._pack_array_header(n)
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_map_header(self, n):
+ if n >= 2 ** 32:
+ raise ValueError
+ self._pack_map_header(n)
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_ext_type(self, typecode, data):
+ if not isinstance(typecode, int):
+ raise TypeError("typecode must have int type.")
+ if not 0 <= typecode <= 127:
+ raise ValueError("typecode should be 0-127")
+ if not isinstance(data, bytes):
+ raise TypeError("data must have bytes type")
+ L = len(data)
+ if L > 0xFFFFFFFF:
+ raise ValueError("Too large data")
+ if L == 1:
+ self._buffer.write(b"\xd4")
+ elif L == 2:
+ self._buffer.write(b"\xd5")
+ elif L == 4:
+ self._buffer.write(b"\xd6")
+ elif L == 8:
+ self._buffer.write(b"\xd7")
+ elif L == 16:
+ self._buffer.write(b"\xd8")
+ elif L <= 0xFF:
+ self._buffer.write(b"\xc7" + struct.pack("B", L))
+ elif L <= 0xFFFF:
+ self._buffer.write(b"\xc8" + struct.pack(">H", L))
+ else:
+ self._buffer.write(b"\xc9" + struct.pack(">I", L))
+ self._buffer.write(struct.pack("B", typecode))
+ self._buffer.write(data)
+
+ def _pack_array_header(self, n):
+ if n <= 0x0F:
+ return self._buffer.write(struct.pack("B", 0x90 + n))
+ if n <= 0xFFFF:
+ return self._buffer.write(struct.pack(">BH", 0xDC, n))
+ if n <= 0xFFFFFFFF:
+ return self._buffer.write(struct.pack(">BI", 0xDD, n))
+ raise ValueError("Array is too large")
+
+ def _pack_map_header(self, n):
+ if n <= 0x0F:
+ return self._buffer.write(struct.pack("B", 0x80 + n))
+ if n <= 0xFFFF:
+ return self._buffer.write(struct.pack(">BH", 0xDE, n))
+ if n <= 0xFFFFFFFF:
+ return self._buffer.write(struct.pack(">BI", 0xDF, n))
+ raise ValueError("Dict is too large")
+
+ def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
+ self._pack_map_header(n)
+ for (k, v) in pairs:
+ self._pack(k, nest_limit - 1)
+ self._pack(v, nest_limit - 1)
+
+ def _pack_raw_header(self, n):
+ if n <= 0x1F:
+ self._buffer.write(struct.pack("B", 0xA0 + n))
+ elif self._use_bin_type and n <= 0xFF:
+ self._buffer.write(struct.pack(">BB", 0xD9, n))
+ elif n <= 0xFFFF:
+ self._buffer.write(struct.pack(">BH", 0xDA, n))
+ elif n <= 0xFFFFFFFF:
+ self._buffer.write(struct.pack(">BI", 0xDB, n))
+ else:
+ raise ValueError("Raw is too large")
+
+ def _pack_bin_header(self, n):
+ if not self._use_bin_type:
+ return self._pack_raw_header(n)
+ elif n <= 0xFF:
+ return self._buffer.write(struct.pack(">BB", 0xC4, n))
+ elif n <= 0xFFFF:
+ return self._buffer.write(struct.pack(">BH", 0xC5, n))
+ elif n <= 0xFFFFFFFF:
+ return self._buffer.write(struct.pack(">BI", 0xC6, n))
+ else:
+ raise ValueError("Bin is too large")
+
+ def bytes(self):
+ """Return internal buffer contents as bytes object"""
+ return self._buffer.getvalue()
+
+ def reset(self):
+ """Reset internal buffer.
+
+ This method is useful only when autoreset=False.
+ """
+ self._buffer = StringIO()
+
+ def getbuffer(self):
+ """Return view of internal buffer."""
+ if USING_STRINGBUILDER or PY2:
+ return memoryview(self.bytes())
+ else:
+ return self._buffer.getbuffer()
diff --git a/venv/Lib/site-packages/pycparser-2.20.dist-info/INSTALLER b/venv/Lib/site-packages/pycparser-2.20.dist-info/INSTALLER
new file mode 100644
index 000000000..a1b589e38
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser-2.20.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/Lib/site-packages/pycparser-2.20.dist-info/LICENSE b/venv/Lib/site-packages/pycparser-2.20.dist-info/LICENSE
new file mode 100644
index 000000000..79b7547b6
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser-2.20.dist-info/LICENSE
@@ -0,0 +1,27 @@
+pycparser -- A C parser in Python
+
+Copyright (c) 2008-2017, Eli Bendersky
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+* Neither the name of Eli Bendersky nor the names of its contributors may
+ be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/Lib/site-packages/pycparser-2.20.dist-info/METADATA b/venv/Lib/site-packages/pycparser-2.20.dist-info/METADATA
new file mode 100644
index 000000000..a3939e065
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser-2.20.dist-info/METADATA
@@ -0,0 +1,27 @@
+Metadata-Version: 2.1
+Name: pycparser
+Version: 2.20
+Summary: C parser in Python
+Home-page: https://github.com/eliben/pycparser
+Author: Eli Bendersky
+Author-email: eliben@gmail.com
+Maintainer: Eli Bendersky
+License: BSD
+Platform: Cross Platform
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+
+
+pycparser is a complete parser of the C language, written in
+pure Python using the PLY parsing library.
+It parses C code into an AST and can serve as a front-end for
+C compilers or analysis tools.
+
+
diff --git a/venv/Lib/site-packages/pycparser-2.20.dist-info/RECORD b/venv/Lib/site-packages/pycparser-2.20.dist-info/RECORD
new file mode 100644
index 000000000..c5126895b
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser-2.20.dist-info/RECORD
@@ -0,0 +1,41 @@
+pycparser-2.20.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pycparser-2.20.dist-info/LICENSE,sha256=PHZimICuwvhXjtkUcBpP-eXai2CsuLfsZ1q_g8kMUWg,1536
+pycparser-2.20.dist-info/METADATA,sha256=5_RDLTEfmg8dh29oc053jTNp_OL82PllsggkGQTU_Ds,907
+pycparser-2.20.dist-info/RECORD,,
+pycparser-2.20.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+pycparser-2.20.dist-info/top_level.txt,sha256=c-lPcS74L_8KoH7IE6PQF5ofyirRQNV4VhkbSFIPeWM,10
+pycparser/__init__.py,sha256=O2ajDXgU2_NI52hUFV8WeAjCR5L-sclmaXerpcxqgPo,2815
+pycparser/__pycache__/__init__.cpython-36.pyc,,
+pycparser/__pycache__/_ast_gen.cpython-36.pyc,,
+pycparser/__pycache__/_build_tables.cpython-36.pyc,,
+pycparser/__pycache__/ast_transforms.cpython-36.pyc,,
+pycparser/__pycache__/c_ast.cpython-36.pyc,,
+pycparser/__pycache__/c_generator.cpython-36.pyc,,
+pycparser/__pycache__/c_lexer.cpython-36.pyc,,
+pycparser/__pycache__/c_parser.cpython-36.pyc,,
+pycparser/__pycache__/lextab.cpython-36.pyc,,
+pycparser/__pycache__/plyparser.cpython-36.pyc,,
+pycparser/__pycache__/yacctab.cpython-36.pyc,,
+pycparser/_ast_gen.py,sha256=_LbRr_kKa2EHeb7y0gV525JV29nzCUbTH4oZ-9I4qIs,10607
+pycparser/_build_tables.py,sha256=oZCd3Plhq-vkV-QuEsaahcf-jUI6-HgKsrAL9gvFzuU,1039
+pycparser/_c_ast.cfg,sha256=1W8-DHa5RqZvyhha_0b4VvKL0CEYv9W0xFs_YwiyEHY,4206
+pycparser/ast_transforms.py,sha256=93ENKENTlugzFehnrQ0fdprijVdNt_ACCPygMxH4v7Q,3648
+pycparser/c_ast.py,sha256=JdDyC3QUZBfX9wVu2ENOrQQPbc737Jmf8Vtozhukayo,30233
+pycparser/c_generator.py,sha256=AwzNyE_rOFK2gzK0J5pCWDqfk7V8KL54ITFRf9m4GlY,15365
+pycparser/c_lexer.py,sha256=GWPUkwFe6F00gTAKIPAx4xs8-J-at_oGwEHnrKF4teM,16208
+pycparser/c_parser.py,sha256=w74N4tFGQ3TETIqUwClZIcbl-V4hFeJSPG2halVgUVs,69746
+pycparser/lextab.py,sha256=FyjRIsaq2wViDqJNYScURuc7GDW5F12VuYxOJLh1j4g,7011
+pycparser/ply/__init__.py,sha256=q4s86QwRsYRa20L9ueSxfh-hPihpftBjDOvYa2_SS2Y,102
+pycparser/ply/__pycache__/__init__.cpython-36.pyc,,
+pycparser/ply/__pycache__/cpp.cpython-36.pyc,,
+pycparser/ply/__pycache__/ctokens.cpython-36.pyc,,
+pycparser/ply/__pycache__/lex.cpython-36.pyc,,
+pycparser/ply/__pycache__/yacc.cpython-36.pyc,,
+pycparser/ply/__pycache__/ygen.cpython-36.pyc,,
+pycparser/ply/cpp.py,sha256=UtC3ylTWp5_1MKA-PLCuwKQR8zSOnlGuGGIdzj8xS98,33282
+pycparser/ply/ctokens.py,sha256=MKksnN40TehPhgVfxCJhjj_BjL943apreABKYz-bl0Y,3177
+pycparser/ply/lex.py,sha256=7Qol57x702HZwjA3ZLp-84CUEWq1EehW-N67Wzghi-M,42918
+pycparser/ply/yacc.py,sha256=eatSDkRLgRr6X3-hoDk_SQQv065R0BdL2K7fQ54CgVM,137323
+pycparser/ply/ygen.py,sha256=2JYNeYtrPz1JzLSLO3d4GsS8zJU8jY_I_CR1VI9gWrA,2251
+pycparser/plyparser.py,sha256=saGNjpsgncQz-hHEh45f28BLqopTxHffaJg_9BCZhi8,4873
+pycparser/yacctab.py,sha256=KOewsHNgbSYaYrLvDJr7K3jXj-7qou0ngyNEnhDmyB4,169715
diff --git a/venv/Lib/site-packages/pycparser-2.20.dist-info/WHEEL b/venv/Lib/site-packages/pycparser-2.20.dist-info/WHEEL
new file mode 100644
index 000000000..ef99c6cf3
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser-2.20.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/Lib/site-packages/pycparser-2.20.dist-info/top_level.txt b/venv/Lib/site-packages/pycparser-2.20.dist-info/top_level.txt
new file mode 100644
index 000000000..dc1c9e101
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser-2.20.dist-info/top_level.txt
@@ -0,0 +1 @@
+pycparser
diff --git a/venv/Lib/site-packages/pycparser/__init__.py b/venv/Lib/site-packages/pycparser/__init__.py
new file mode 100644
index 000000000..6e86e9f6a
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/__init__.py
@@ -0,0 +1,90 @@
+#-----------------------------------------------------------------
+# pycparser: __init__.py
+#
+# This package file exports some convenience functions for
+# interacting with pycparser
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+__all__ = ['c_lexer', 'c_parser', 'c_ast']
+__version__ = '2.20'
+
+import io
+from subprocess import check_output
+from .c_parser import CParser
+
+
+def preprocess_file(filename, cpp_path='cpp', cpp_args=''):
+ """ Preprocess a file using cpp.
+
+ filename:
+ Name of the file you want to preprocess.
+
+ cpp_path:
+ cpp_args:
+ Refer to the documentation of parse_file for the meaning of these
+ arguments.
+
+ When successful, returns the preprocessed file's contents.
+ Errors from cpp will be printed out.
+ """
+ path_list = [cpp_path]
+ if isinstance(cpp_args, list):
+ path_list += cpp_args
+ elif cpp_args != '':
+ path_list += [cpp_args]
+ path_list += [filename]
+
+ try:
+ # Note the use of universal_newlines to treat all newlines
+ # as \n for Python's purpose
+ text = check_output(path_list, universal_newlines=True)
+ except OSError as e:
+ raise RuntimeError("Unable to invoke 'cpp'. " +
+ 'Make sure its path was passed correctly\n' +
+ ('Original error: %s' % e))
+
+ return text
+
+
+def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='',
+ parser=None):
+ """ Parse a C file using pycparser.
+
+ filename:
+ Name of the file you want to parse.
+
+ use_cpp:
+ Set to True if you want to execute the C pre-processor
+ on the file prior to parsing it.
+
+ cpp_path:
+ If use_cpp is True, this is the path to 'cpp' on your
+ system. If no path is provided, it attempts to just
+ execute 'cpp', so it must be in your PATH.
+
+ cpp_args:
+ If use_cpp is True, set this to the command line arguments strings
+ to cpp. Be careful with quotes - it's best to pass a raw string
+ (r'') here. For example:
+ r'-I../utils/fake_libc_include'
+ If several arguments are required, pass a list of strings.
+
+ parser:
+ Optional parser object to be used instead of the default CParser
+
+ When successful, an AST is returned. ParseError can be
+ thrown if the file doesn't parse successfully.
+
+ Errors from cpp will be printed out.
+ """
+ if use_cpp:
+ text = preprocess_file(filename, cpp_path, cpp_args)
+ else:
+ with io.open(filename) as f:
+ text = f.read()
+
+ if parser is None:
+ parser = CParser()
+ return parser.parse(text, filename)
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 000000000..127558142
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-36.pyc
new file mode 100644
index 000000000..7c8b8c535
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/_build_tables.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/_build_tables.cpython-36.pyc
new file mode 100644
index 000000000..77b2414c7
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/_build_tables.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-36.pyc
new file mode 100644
index 000000000..a4955babc
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-36.pyc
new file mode 100644
index 000000000..55f121dd8
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/c_generator.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/c_generator.cpython-36.pyc
new file mode 100644
index 000000000..937197da2
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/c_generator.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-36.pyc
new file mode 100644
index 000000000..21e4f6754
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-36.pyc
new file mode 100644
index 000000000..009804be5
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/lextab.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/lextab.cpython-36.pyc
new file mode 100644
index 000000000..91c4b278d
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/lextab.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-36.pyc
new file mode 100644
index 000000000..c94df93ce
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/__pycache__/yacctab.cpython-36.pyc b/venv/Lib/site-packages/pycparser/__pycache__/yacctab.cpython-36.pyc
new file mode 100644
index 000000000..6ada9700e
Binary files /dev/null and b/venv/Lib/site-packages/pycparser/__pycache__/yacctab.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pycparser/_ast_gen.py b/venv/Lib/site-packages/pycparser/_ast_gen.py
new file mode 100644
index 000000000..5ec2d3df1
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/_ast_gen.py
@@ -0,0 +1,338 @@
+#-----------------------------------------------------------------
+# _ast_gen.py
+#
+# Generates the AST Node classes from a specification given in
+# a configuration file
+#
+# The design of this module was inspired by astgen.py from the
+# Python 2.5 code-base.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+import pprint
+from string import Template
+
+
+class ASTCodeGenerator(object):
+ def __init__(self, cfg_filename='_c_ast.cfg'):
+ """ Initialize the code generator from a configuration
+ file.
+ """
+ self.cfg_filename = cfg_filename
+ self.node_cfg = [NodeCfg(name, contents)
+ for (name, contents) in self.parse_cfgfile(cfg_filename)]
+
+ def generate(self, file=None):
+ """ Generates the code into file, an open file buffer.
+ """
+ src = Template(_PROLOGUE_COMMENT).substitute(
+ cfg_filename=self.cfg_filename)
+
+ src += _PROLOGUE_CODE
+ for node_cfg in self.node_cfg:
+ src += node_cfg.generate_source() + '\n\n'
+
+ file.write(src)
+
+ def parse_cfgfile(self, filename):
+ """ Parse the configuration file and yield pairs of
+ (name, contents) for each node.
+ """
+ with open(filename, "r") as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ colon_i = line.find(':')
+ lbracket_i = line.find('[')
+ rbracket_i = line.find(']')
+ if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i:
+ raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line))
+
+ name = line[:colon_i]
+ val = line[lbracket_i + 1:rbracket_i]
+ vallist = [v.strip() for v in val.split(',')] if val else []
+ yield name, vallist
+
+
+class NodeCfg(object):
+ """ Node configuration.
+
+ name: node name
+ contents: a list of contents - attributes and child nodes
+ See comment at the top of the configuration file for details.
+ """
+
+ def __init__(self, name, contents):
+ self.name = name
+ self.all_entries = []
+ self.attr = []
+ self.child = []
+ self.seq_child = []
+
+ for entry in contents:
+ clean_entry = entry.rstrip('*')
+ self.all_entries.append(clean_entry)
+
+ if entry.endswith('**'):
+ self.seq_child.append(clean_entry)
+ elif entry.endswith('*'):
+ self.child.append(clean_entry)
+ else:
+ self.attr.append(entry)
+
+ def generate_source(self):
+ src = self._gen_init()
+ src += '\n' + self._gen_children()
+ src += '\n' + self._gen_iter()
+
+ src += '\n' + self._gen_attr_names()
+ return src
+
+ def _gen_init(self):
+ src = "class %s(Node):\n" % self.name
+
+ if self.all_entries:
+ args = ', '.join(self.all_entries)
+ slots = ', '.join("'{0}'".format(e) for e in self.all_entries)
+ slots += ", 'coord', '__weakref__'"
+ arglist = '(self, %s, coord=None)' % args
+ else:
+ slots = "'coord', '__weakref__'"
+ arglist = '(self, coord=None)'
+
+ src += " __slots__ = (%s)\n" % slots
+ src += " def __init__%s:\n" % arglist
+
+ for name in self.all_entries + ['coord']:
+ src += " self.%s = %s\n" % (name, name)
+
+ return src
+
+ def _gen_children(self):
+ src = ' def children(self):\n'
+
+ if self.all_entries:
+ src += ' nodelist = []\n'
+
+ for child in self.child:
+ src += (
+ ' if self.%(child)s is not None:' +
+ ' nodelist.append(("%(child)s", self.%(child)s))\n') % (
+ dict(child=child))
+
+ for seq_child in self.seq_child:
+ src += (
+ ' for i, child in enumerate(self.%(child)s or []):\n'
+ ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % (
+ dict(child=seq_child))
+
+ src += ' return tuple(nodelist)\n'
+ else:
+ src += ' return ()\n'
+
+ return src
+
+ def _gen_iter(self):
+ src = ' def __iter__(self):\n'
+
+ if self.all_entries:
+ for child in self.child:
+ src += (
+ ' if self.%(child)s is not None:\n' +
+ ' yield self.%(child)s\n') % (dict(child=child))
+
+ for seq_child in self.seq_child:
+ src += (
+ ' for child in (self.%(child)s or []):\n'
+ ' yield child\n') % (dict(child=seq_child))
+
+ if not (self.child or self.seq_child):
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+ else:
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+
+ return src
+
+ def _gen_attr_names(self):
+ src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
+ return src
+
+
+_PROLOGUE_COMMENT = \
+r'''#-----------------------------------------------------------------
+# ** ATTENTION **
+# This code was automatically generated from the file:
+# $cfg_filename
+#
+# Do not modify it directly. Modify the configuration file and
+# run the generator again.
+# ** ** *** ** **
+#
+# pycparser: c_ast.py
+#
+# AST Node classes.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+'''
+
+_PROLOGUE_CODE = r'''
+import sys
+
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
+ return repr(obj)
+
+class Node(object):
+ __slots__ = ()
+ """ Abstract base class for AST nodes.
+ """
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
+
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
+
+ result += indent + ')'
+
+ return result
+
+ def children(self):
+ """ A sequence of all children that are Nodes
+ """
+ pass
+
+ def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
+ """ Pretty print the Node and all its attributes and
+ children (recursively) to a buffer.
+
+ buf:
+ Open IO buffer into which the Node is printed.
+
+ offset:
+ Initial offset (amount of leading spaces)
+
+ attrnames:
+ True if you want to see the attribute names in
+ name=value pairs. False to only see the values.
+
+ nodenames:
+ True if you want to see the actual node names
+ within their parents.
+
+ showcoord:
+ Do you want the coordinates of each Node to be
+ displayed.
+ """
+ lead = ' ' * offset
+ if nodenames and _my_node_name is not None:
+ buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
+ else:
+ buf.write(lead + self.__class__.__name__+ ': ')
+
+ if self.attr_names:
+ if attrnames:
+ nvlist = [(n, getattr(self,n)) for n in self.attr_names]
+ attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
+ else:
+ vlist = [getattr(self, n) for n in self.attr_names]
+ attrstr = ', '.join('%s' % v for v in vlist)
+ buf.write(attrstr)
+
+ if showcoord:
+ buf.write(' (at %s)' % self.coord)
+ buf.write('\n')
+
+ for (child_name, child) in self.children():
+ child.show(
+ buf,
+ offset=offset + 2,
+ attrnames=attrnames,
+ nodenames=nodenames,
+ showcoord=showcoord,
+ _my_node_name=child_name)
+
+
+class NodeVisitor(object):
+ """ A base NodeVisitor class for visiting c_ast nodes.
+ Subclass it and define your own visit_XXX methods, where
+ XXX is the class name you want to visit with these
+ methods.
+
+ For example:
+
+ class ConstantVisitor(NodeVisitor):
+ def __init__(self):
+ self.values = []
+
+ def visit_Constant(self, node):
+ self.values.append(node.value)
+
+ Creates a list of values of all the constant nodes
+ encountered below the given node. To use it:
+
+ cv = ConstantVisitor()
+ cv.visit(node)
+
+ Notes:
+
+ * generic_visit() will be called for AST nodes for which
+ no visit_XXX method was defined.
+ * The children of nodes for which a visit_XXX was
+ defined will not be visited - if you need this, call
+ generic_visit() on the node.
+ You can use:
+ NodeVisitor.generic_visit(self, node)
+ * Modeled after Python's own AST visiting facilities
+ (the ast module of Python 3.0)
+ """
+
+ _method_cache = None
+
+ def visit(self, node):
+ """ Visit a node.
+ """
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
+ return visitor(node)
+
+ def generic_visit(self, node):
+ """ Called if no explicit visitor function exists for a
+ node. Implements preorder visiting of the node.
+ """
+ for c in node:
+ self.visit(c)
+
+'''
diff --git a/venv/Lib/site-packages/pycparser/_build_tables.py b/venv/Lib/site-packages/pycparser/_build_tables.py
new file mode 100644
index 000000000..958381ad0
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/_build_tables.py
@@ -0,0 +1,37 @@
+#-----------------------------------------------------------------
+# pycparser: _build_tables.py
+#
+# A dummy for generating the lexing/parsing tables and and
+# compiling them into .pyc for faster execution in optimized mode.
+# Also generates AST code from the configuration file.
+# Should be called from the pycparser directory.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+# Insert '.' and '..' as first entries to the search path for modules.
+# Restricted environments like embeddable python do not include the
+# current working directory on startup.
+import sys
+sys.path[0:0] = ['.', '..']
+
+# Generate c_ast.py
+from _ast_gen import ASTCodeGenerator
+ast_gen = ASTCodeGenerator('_c_ast.cfg')
+ast_gen.generate(open('c_ast.py', 'w'))
+
+from pycparser import c_parser
+
+# Generates the tables
+#
+c_parser.CParser(
+ lex_optimize=True,
+ yacc_debug=False,
+ yacc_optimize=True)
+
+# Load to compile into .pyc
+#
+import lextab
+import yacctab
+import c_ast
diff --git a/venv/Lib/site-packages/pycparser/_c_ast.cfg b/venv/Lib/site-packages/pycparser/_c_ast.cfg
new file mode 100644
index 000000000..b93d50bb6
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/_c_ast.cfg
@@ -0,0 +1,191 @@
+#-----------------------------------------------------------------
+# pycparser: _c_ast.cfg
+#
+# Defines the AST Node classes used in pycparser.
+#
+# Each entry is a Node sub-class name, listing the attributes
+# and child nodes of the class:
+# * - a child node
+# ** - a sequence of child nodes
+# - an attribute
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+# ArrayDecl is a nested declaration of an array with the given type.
+# dim: the dimension (for example, constant 42)
+# dim_quals: list of dimension qualifiers, to support C99's allowing 'const'
+# and 'static' within the array dimension in function declarations.
+ArrayDecl: [type*, dim*, dim_quals]
+
+ArrayRef: [name*, subscript*]
+
+# op: =, +=, /= etc.
+#
+Assignment: [op, lvalue*, rvalue*]
+
+BinaryOp: [op, left*, right*]
+
+Break: []
+
+Case: [expr*, stmts**]
+
+Cast: [to_type*, expr*]
+
+# Compound statement in C99 is a list of block items (declarations or
+# statements).
+#
+Compound: [block_items**]
+
+# Compound literal (anonymous aggregate) for C99.
+# (type-name) {initializer_list}
+# type: the typename
+# init: InitList for the initializer list
+#
+CompoundLiteral: [type*, init*]
+
+# type: int, char, float, etc. see CLexer for constant token types
+#
+Constant: [type, value]
+
+Continue: []
+
+# name: the variable being declared
+# quals: list of qualifiers (const, volatile)
+# funcspec: list function specifiers (i.e. inline in C99)
+# storage: list of storage specifiers (extern, register, etc.)
+# type: declaration type (probably nested with all the modifiers)
+# init: initialization value, or None
+# bitsize: bit field size, or None
+#
+Decl: [name, quals, storage, funcspec, type*, init*, bitsize*]
+
+DeclList: [decls**]
+
+Default: [stmts**]
+
+DoWhile: [cond*, stmt*]
+
+# Represents the ellipsis (...) parameter in a function
+# declaration
+#
+EllipsisParam: []
+
+# An empty statement (a semicolon ';' on its own)
+#
+EmptyStatement: []
+
+# Enumeration type specifier
+# name: an optional ID
+# values: an EnumeratorList
+#
+Enum: [name, values*]
+
+# A name/value pair for enumeration values
+#
+Enumerator: [name, value*]
+
+# A list of enumerators
+#
+EnumeratorList: [enumerators**]
+
+# A list of expressions separated by the comma operator.
+#
+ExprList: [exprs**]
+
+# This is the top of the AST, representing a single C file (a
+# translation unit in K&R jargon). It contains a list of
+# "external-declaration"s, which is either declarations (Decl),
+# Typedef or function definitions (FuncDef).
+#
+FileAST: [ext**]
+
+# for (init; cond; next) stmt
+#
+For: [init*, cond*, next*, stmt*]
+
+# name: Id
+# args: ExprList
+#
+FuncCall: [name*, args*]
+
+# type (args)
+#
+FuncDecl: [args*, type*]
+
+# Function definition: a declarator for the function name and
+# a body, which is a compound statement.
+# There's an optional list of parameter declarations for old
+# K&R-style definitions
+#
+FuncDef: [decl*, param_decls**, body*]
+
+Goto: [name]
+
+ID: [name]
+
+# Holder for types that are a simple identifier (e.g. the built
+# ins void, char etc. and typedef-defined types)
+#
+IdentifierType: [names]
+
+If: [cond*, iftrue*, iffalse*]
+
+# An initialization list used for compound literals.
+#
+InitList: [exprs**]
+
+Label: [name, stmt*]
+
+# A named initializer for C99.
+# The name of a NamedInitializer is a sequence of Nodes, because
+# names can be hierarchical and contain constant expressions.
+#
+NamedInitializer: [name**, expr*]
+
+# a list of comma separated function parameter declarations
+#
+ParamList: [params**]
+
+PtrDecl: [quals, type*]
+
+Return: [expr*]
+
+# name: struct tag name
+# decls: declaration of members
+#
+Struct: [name, decls**]
+
+# type: . or ->
+# name.field or name->field
+#
+StructRef: [name*, type, field*]
+
+Switch: [cond*, stmt*]
+
+# cond ? iftrue : iffalse
+#
+TernaryOp: [cond*, iftrue*, iffalse*]
+
+# A base type declaration
+#
+TypeDecl: [declname, quals, type*]
+
+# A typedef declaration.
+# Very similar to Decl, but without some attributes
+#
+Typedef: [name, quals, storage, type*]
+
+Typename: [name, quals, type*]
+
+UnaryOp: [op, expr*]
+
+# name: union tag name
+# decls: declaration of members
+#
+Union: [name, decls**]
+
+While: [cond*, stmt*]
+
+Pragma: [string]
diff --git a/venv/Lib/site-packages/pycparser/ast_transforms.py b/venv/Lib/site-packages/pycparser/ast_transforms.py
new file mode 100644
index 000000000..0aeb88f0e
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/ast_transforms.py
@@ -0,0 +1,106 @@
+#------------------------------------------------------------------------------
+# pycparser: ast_transforms.py
+#
+# Some utilities used by the parser to create a friendlier AST.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+
+from . import c_ast
+
+
+def fix_switch_cases(switch_node):
+ """ The 'case' statements in a 'switch' come out of parsing with one
+ child node, so subsequent statements are just tucked to the parent
+ Compound. Additionally, consecutive (fall-through) case statements
+ come out messy. This is a peculiarity of the C grammar. The following:
+
+ switch (myvar) {
+ case 10:
+ k = 10;
+ p = k + 1;
+ return 10;
+ case 20:
+ case 30:
+ return 20;
+ default:
+ break;
+ }
+
+ Creates this tree (pseudo-dump):
+
+ Switch
+ ID: myvar
+ Compound:
+ Case 10:
+ k = 10
+ p = k + 1
+ return 10
+ Case 20:
+ Case 30:
+ return 20
+ Default:
+ break
+
+ The goal of this transform is to fix this mess, turning it into the
+ following:
+
+ Switch
+ ID: myvar
+ Compound:
+ Case 10:
+ k = 10
+ p = k + 1
+ return 10
+ Case 20:
+ Case 30:
+ return 20
+ Default:
+ break
+
+ A fixed AST node is returned. The argument may be modified.
+ """
+ assert isinstance(switch_node, c_ast.Switch)
+ if not isinstance(switch_node.stmt, c_ast.Compound):
+ return switch_node
+
+ # The new Compound child for the Switch, which will collect children in the
+ # correct order
+ new_compound = c_ast.Compound([], switch_node.stmt.coord)
+
+ # The last Case/Default node
+ last_case = None
+
+ # Goes over the children of the Compound below the Switch, adding them
+ # either directly below new_compound or below the last Case as appropriate
+ # (for `switch(cond) {}`, block_items would have been None)
+ for child in (switch_node.stmt.block_items or []):
+ if isinstance(child, (c_ast.Case, c_ast.Default)):
+ # If it's a Case/Default:
+ # 1. Add it to the Compound and mark as "last case"
+ # 2. If its immediate child is also a Case or Default, promote it
+ # to a sibling.
+ new_compound.block_items.append(child)
+ _extract_nested_case(child, new_compound.block_items)
+ last_case = new_compound.block_items[-1]
+ else:
+ # Other statements are added as children to the last case, if it
+ # exists.
+ if last_case is None:
+ new_compound.block_items.append(child)
+ else:
+ last_case.stmts.append(child)
+
+ switch_node.stmt = new_compound
+ return switch_node
+
+
+def _extract_nested_case(case_node, stmts_list):
+ """ Recursively extract consecutive Case statements that are made nested
+ by the parser and add them to the stmts_list.
+ """
+ if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)):
+ stmts_list.append(case_node.stmts.pop())
+ _extract_nested_case(stmts_list[-1], stmts_list)
+
diff --git a/venv/Lib/site-packages/pycparser/c_ast.py b/venv/Lib/site-packages/pycparser/c_ast.py
new file mode 100644
index 000000000..b7bbbeed2
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/c_ast.py
@@ -0,0 +1,1084 @@
+#-----------------------------------------------------------------
+# ** ATTENTION **
+# This code was automatically generated from the file:
+# _c_ast.cfg
+#
+# Do not modify it directly. Modify the configuration file and
+# run the generator again.
+# ** ** *** ** **
+#
+# pycparser: c_ast.py
+#
+# AST Node classes.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#-----------------------------------------------------------------
+
+
+import sys
+
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
+ return repr(obj)
+
+class Node(object):
+ __slots__ = ()
+ """ Abstract base class for AST nodes.
+ """
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
+
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
+
+ result += indent + ')'
+
+ return result
+
+ def children(self):
+ """ A sequence of all children that are Nodes
+ """
+ pass
+
+ def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
+ """ Pretty print the Node and all its attributes and
+ children (recursively) to a buffer.
+
+ buf:
+ Open IO buffer into which the Node is printed.
+
+ offset:
+ Initial offset (amount of leading spaces)
+
+ attrnames:
+ True if you want to see the attribute names in
+ name=value pairs. False to only see the values.
+
+ nodenames:
+ True if you want to see the actual node names
+ within their parents.
+
+ showcoord:
+ Do you want the coordinates of each Node to be
+ displayed.
+ """
+ lead = ' ' * offset
+ if nodenames and _my_node_name is not None:
+ buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
+ else:
+ buf.write(lead + self.__class__.__name__+ ': ')
+
+ if self.attr_names:
+ if attrnames:
+ nvlist = [(n, getattr(self,n)) for n in self.attr_names]
+ attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
+ else:
+ vlist = [getattr(self, n) for n in self.attr_names]
+ attrstr = ', '.join('%s' % v for v in vlist)
+ buf.write(attrstr)
+
+ if showcoord:
+ buf.write(' (at %s)' % self.coord)
+ buf.write('\n')
+
+ for (child_name, child) in self.children():
+ child.show(
+ buf,
+ offset=offset + 2,
+ attrnames=attrnames,
+ nodenames=nodenames,
+ showcoord=showcoord,
+ _my_node_name=child_name)
+
+
+class NodeVisitor(object):
+ """ A base NodeVisitor class for visiting c_ast nodes.
+ Subclass it and define your own visit_XXX methods, where
+ XXX is the class name you want to visit with these
+ methods.
+
+ For example:
+
+ class ConstantVisitor(NodeVisitor):
+ def __init__(self):
+ self.values = []
+
+ def visit_Constant(self, node):
+ self.values.append(node.value)
+
+ Creates a list of values of all the constant nodes
+ encountered below the given node. To use it:
+
+ cv = ConstantVisitor()
+ cv.visit(node)
+
+ Notes:
+
+ * generic_visit() will be called for AST nodes for which
+ no visit_XXX method was defined.
+ * The children of nodes for which a visit_XXX was
+ defined will not be visited - if you need this, call
+ generic_visit() on the node.
+ You can use:
+ NodeVisitor.generic_visit(self, node)
+ * Modeled after Python's own AST visiting facilities
+ (the ast module of Python 3.0)
+ """
+
+ _method_cache = None
+
+ def visit(self, node):
+ """ Visit a node.
+ """
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
+ return visitor(node)
+
+ def generic_visit(self, node):
+ """ Called if no explicit visitor function exists for a
+ node. Implements preorder visiting of the node.
+ """
+ for c in node:
+ self.visit(c)
+
+class ArrayDecl(Node):
+ __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__')
+ def __init__(self, type, dim, dim_quals, coord=None):
+ self.type = type
+ self.dim = dim
+ self.dim_quals = dim_quals
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ if self.dim is not None: nodelist.append(("dim", self.dim))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.dim is not None:
+ yield self.dim
+
+ attr_names = ('dim_quals', )
+
+class ArrayRef(Node):
+ __slots__ = ('name', 'subscript', 'coord', '__weakref__')
+ def __init__(self, name, subscript, coord=None):
+ self.name = name
+ self.subscript = subscript
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.name is not None: nodelist.append(("name", self.name))
+ if self.subscript is not None: nodelist.append(("subscript", self.subscript))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.subscript is not None:
+ yield self.subscript
+
+ attr_names = ()
+
+class Assignment(Node):
+ __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__')
+ def __init__(self, op, lvalue, rvalue, coord=None):
+ self.op = op
+ self.lvalue = lvalue
+ self.rvalue = rvalue
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue))
+ if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.lvalue is not None:
+ yield self.lvalue
+ if self.rvalue is not None:
+ yield self.rvalue
+
+ attr_names = ('op', )
+
+class BinaryOp(Node):
+ __slots__ = ('op', 'left', 'right', 'coord', '__weakref__')
+ def __init__(self, op, left, right, coord=None):
+ self.op = op
+ self.left = left
+ self.right = right
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.left is not None: nodelist.append(("left", self.left))
+ if self.right is not None: nodelist.append(("right", self.right))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.left is not None:
+ yield self.left
+ if self.right is not None:
+ yield self.right
+
+ attr_names = ('op', )
+
+class Break(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class Case(Node):
+ __slots__ = ('expr', 'stmts', 'coord', '__weakref__')
+ def __init__(self, expr, stmts, coord=None):
+ self.expr = expr
+ self.stmts = stmts
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ for i, child in enumerate(self.stmts or []):
+ nodelist.append(("stmts[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.stmts or []):
+ yield child
+
+ attr_names = ()
+
+class Cast(Node):
+ __slots__ = ('to_type', 'expr', 'coord', '__weakref__')
+ def __init__(self, to_type, expr, coord=None):
+ self.to_type = to_type
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.to_type is not None: nodelist.append(("to_type", self.to_type))
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.to_type is not None:
+ yield self.to_type
+ if self.expr is not None:
+ yield self.expr
+
+ attr_names = ()
+
+class Compound(Node):
+ __slots__ = ('block_items', 'coord', '__weakref__')
+ def __init__(self, block_items, coord=None):
+ self.block_items = block_items
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.block_items or []):
+ nodelist.append(("block_items[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.block_items or []):
+ yield child
+
+ attr_names = ()
+
+class CompoundLiteral(Node):
+ __slots__ = ('type', 'init', 'coord', '__weakref__')
+ def __init__(self, type, init, coord=None):
+ self.type = type
+ self.init = init
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ if self.init is not None: nodelist.append(("init", self.init))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+
+ attr_names = ()
+
+class Constant(Node):
+ __slots__ = ('type', 'value', 'coord', '__weakref__')
+ def __init__(self, type, value, coord=None):
+ self.type = type
+ self.value = value
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('type', 'value', )
+
+class Continue(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class Decl(Node):
+ __slots__ = ('name', 'quals', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__')
+ def __init__(self, name, quals, storage, funcspec, type, init, bitsize, coord=None):
+ self.name = name
+ self.quals = quals
+ self.storage = storage
+ self.funcspec = funcspec
+ self.type = type
+ self.init = init
+ self.bitsize = bitsize
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ if self.init is not None: nodelist.append(("init", self.init))
+ if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+ if self.bitsize is not None:
+ yield self.bitsize
+
+ attr_names = ('name', 'quals', 'storage', 'funcspec', )
+
+class DeclList(Node):
+ __slots__ = ('decls', 'coord', '__weakref__')
+ def __init__(self, decls, coord=None):
+ self.decls = decls
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.decls or []):
+ nodelist.append(("decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
+ attr_names = ()
+
+class Default(Node):
+ __slots__ = ('stmts', 'coord', '__weakref__')
+ def __init__(self, stmts, coord=None):
+ self.stmts = stmts
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.stmts or []):
+ nodelist.append(("stmts[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.stmts or []):
+ yield child
+
+ attr_names = ()
+
+class DoWhile(Node):
+ __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
+ def __init__(self, cond, stmt, coord=None):
+ self.cond = cond
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class EllipsisParam(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class EmptyStatement(Node):
+ __slots__ = ('coord', '__weakref__')
+ def __init__(self, coord=None):
+ self.coord = coord
+
+ def children(self):
+ return ()
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ()
+
+class Enum(Node):
+ __slots__ = ('name', 'values', 'coord', '__weakref__')
+ def __init__(self, name, values, coord=None):
+ self.name = name
+ self.values = values
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.values is not None: nodelist.append(("values", self.values))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.values is not None:
+ yield self.values
+
+ attr_names = ('name', )
+
+class Enumerator(Node):
+ __slots__ = ('name', 'value', 'coord', '__weakref__')
+ def __init__(self, name, value, coord=None):
+ self.name = name
+ self.value = value
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.value is not None: nodelist.append(("value", self.value))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.value is not None:
+ yield self.value
+
+ attr_names = ('name', )
+
+class EnumeratorList(Node):
+ __slots__ = ('enumerators', 'coord', '__weakref__')
+ def __init__(self, enumerators, coord=None):
+ self.enumerators = enumerators
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.enumerators or []):
+ nodelist.append(("enumerators[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.enumerators or []):
+ yield child
+
+ attr_names = ()
+
+class ExprList(Node):
+ __slots__ = ('exprs', 'coord', '__weakref__')
+ def __init__(self, exprs, coord=None):
+ self.exprs = exprs
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.exprs or []):
+ nodelist.append(("exprs[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
+ attr_names = ()
+
+class FileAST(Node):
+ __slots__ = ('ext', 'coord', '__weakref__')
+ def __init__(self, ext, coord=None):
+ self.ext = ext
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.ext or []):
+ nodelist.append(("ext[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.ext or []):
+ yield child
+
+ attr_names = ()
+
+class For(Node):
+ __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__')
+ def __init__(self, init, cond, next, stmt, coord=None):
+ self.init = init
+ self.cond = cond
+ self.next = next
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.init is not None: nodelist.append(("init", self.init))
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.next is not None: nodelist.append(("next", self.next))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.init is not None:
+ yield self.init
+ if self.cond is not None:
+ yield self.cond
+ if self.next is not None:
+ yield self.next
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class FuncCall(Node):
+ __slots__ = ('name', 'args', 'coord', '__weakref__')
+ def __init__(self, name, args, coord=None):
+ self.name = name
+ self.args = args
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.name is not None: nodelist.append(("name", self.name))
+ if self.args is not None: nodelist.append(("args", self.args))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.args is not None:
+ yield self.args
+
+ attr_names = ()
+
+class FuncDecl(Node):
+ __slots__ = ('args', 'type', 'coord', '__weakref__')
+ def __init__(self, args, type, coord=None):
+ self.args = args
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.args is not None: nodelist.append(("args", self.args))
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.args is not None:
+ yield self.args
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ()
+
+class FuncDef(Node):
+ __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__')
+ def __init__(self, decl, param_decls, body, coord=None):
+ self.decl = decl
+ self.param_decls = param_decls
+ self.body = body
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.decl is not None: nodelist.append(("decl", self.decl))
+ if self.body is not None: nodelist.append(("body", self.body))
+ for i, child in enumerate(self.param_decls or []):
+ nodelist.append(("param_decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.decl is not None:
+ yield self.decl
+ if self.body is not None:
+ yield self.body
+ for child in (self.param_decls or []):
+ yield child
+
+ attr_names = ()
+
+class Goto(Node):
+ __slots__ = ('name', 'coord', '__weakref__')
+ def __init__(self, name, coord=None):
+ self.name = name
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('name', )
+
+class ID(Node):
+ __slots__ = ('name', 'coord', '__weakref__')
+ def __init__(self, name, coord=None):
+ self.name = name
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('name', )
+
+class IdentifierType(Node):
+ __slots__ = ('names', 'coord', '__weakref__')
+ def __init__(self, names, coord=None):
+ self.names = names
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('names', )
+
+class If(Node):
+ __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
+ def __init__(self, cond, iftrue, iffalse, coord=None):
+ self.cond = cond
+ self.iftrue = iftrue
+ self.iffalse = iffalse
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
+ if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
+ attr_names = ()
+
+class InitList(Node):
+ __slots__ = ('exprs', 'coord', '__weakref__')
+ def __init__(self, exprs, coord=None):
+ self.exprs = exprs
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.exprs or []):
+ nodelist.append(("exprs[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
+ attr_names = ()
+
+class Label(Node):
+ __slots__ = ('name', 'stmt', 'coord', '__weakref__')
+ def __init__(self, name, stmt, coord=None):
+ self.name = name
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ('name', )
+
+class NamedInitializer(Node):
+ __slots__ = ('name', 'expr', 'coord', '__weakref__')
+ def __init__(self, name, expr, coord=None):
+ self.name = name
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ for i, child in enumerate(self.name or []):
+ nodelist.append(("name[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.name or []):
+ yield child
+
+ attr_names = ()
+
+class ParamList(Node):
+ __slots__ = ('params', 'coord', '__weakref__')
+ def __init__(self, params, coord=None):
+ self.params = params
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.params or []):
+ nodelist.append(("params[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.params or []):
+ yield child
+
+ attr_names = ()
+
+class PtrDecl(Node):
+ __slots__ = ('quals', 'type', 'coord', '__weakref__')
+ def __init__(self, quals, type, coord=None):
+ self.quals = quals
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('quals', )
+
+class Return(Node):
+ __slots__ = ('expr', 'coord', '__weakref__')
+ def __init__(self, expr, coord=None):
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
+ attr_names = ()
+
+class Struct(Node):
+ __slots__ = ('name', 'decls', 'coord', '__weakref__')
+ def __init__(self, name, decls, coord=None):
+ self.name = name
+ self.decls = decls
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.decls or []):
+ nodelist.append(("decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
+ attr_names = ('name', )
+
+class StructRef(Node):
+ __slots__ = ('name', 'type', 'field', 'coord', '__weakref__')
+ def __init__(self, name, type, field, coord=None):
+ self.name = name
+ self.type = type
+ self.field = field
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.name is not None: nodelist.append(("name", self.name))
+ if self.field is not None: nodelist.append(("field", self.field))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.field is not None:
+ yield self.field
+
+ attr_names = ('type', )
+
+class Switch(Node):
+ __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
+ def __init__(self, cond, stmt, coord=None):
+ self.cond = cond
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class TernaryOp(Node):
+ __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
+ def __init__(self, cond, iftrue, iffalse, coord=None):
+ self.cond = cond
+ self.iftrue = iftrue
+ self.iffalse = iffalse
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
+ if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
+ attr_names = ()
+
+class TypeDecl(Node):
+ __slots__ = ('declname', 'quals', 'type', 'coord', '__weakref__')
+ def __init__(self, declname, quals, type, coord=None):
+ self.declname = declname
+ self.quals = quals
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('declname', 'quals', )
+
+class Typedef(Node):
+ __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__')
+ def __init__(self, name, quals, storage, type, coord=None):
+ self.name = name
+ self.quals = quals
+ self.storage = storage
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('name', 'quals', 'storage', )
+
+class Typename(Node):
+ __slots__ = ('name', 'quals', 'type', 'coord', '__weakref__')
+ def __init__(self, name, quals, type, coord=None):
+ self.name = name
+ self.quals = quals
+ self.type = type
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.type is not None: nodelist.append(("type", self.type))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
+ attr_names = ('name', 'quals', )
+
+class UnaryOp(Node):
+ __slots__ = ('op', 'expr', 'coord', '__weakref__')
+ def __init__(self, op, expr, coord=None):
+ self.op = op
+ self.expr = expr
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.expr is not None: nodelist.append(("expr", self.expr))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
+ attr_names = ('op', )
+
+class Union(Node):
+ __slots__ = ('name', 'decls', 'coord', '__weakref__')
+ def __init__(self, name, decls, coord=None):
+ self.name = name
+ self.decls = decls
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ for i, child in enumerate(self.decls or []):
+ nodelist.append(("decls[%d]" % i, child))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
+ attr_names = ('name', )
+
+class While(Node):
+ __slots__ = ('cond', 'stmt', 'coord', '__weakref__')
+ def __init__(self, cond, stmt, coord=None):
+ self.cond = cond
+ self.stmt = stmt
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ if self.cond is not None: nodelist.append(("cond", self.cond))
+ if self.stmt is not None: nodelist.append(("stmt", self.stmt))
+ return tuple(nodelist)
+
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
+ attr_names = ()
+
+class Pragma(Node):
+ __slots__ = ('string', 'coord', '__weakref__')
+ def __init__(self, string, coord=None):
+ self.string = string
+ self.coord = coord
+
+ def children(self):
+ nodelist = []
+ return tuple(nodelist)
+
+ def __iter__(self):
+ return
+ yield
+
+ attr_names = ('string', )
+
diff --git a/venv/Lib/site-packages/pycparser/c_generator.py b/venv/Lib/site-packages/pycparser/c_generator.py
new file mode 100644
index 000000000..973d24a8c
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/c_generator.py
@@ -0,0 +1,444 @@
+#------------------------------------------------------------------------------
+# pycparser: c_generator.py
+#
+# C code generator from pycparser AST nodes.
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+from . import c_ast
+
+
+class CGenerator(object):
+ """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to
+ return a value from each visit method, using string accumulation in
+ generic_visit.
+ """
+ def __init__(self):
+ # Statements start with indentation of self.indent_level spaces, using
+ # the _make_indent method
+ #
+ self.indent_level = 0
+
+ def _make_indent(self):
+ return ' ' * self.indent_level
+
+ def visit(self, node):
+ method = 'visit_' + node.__class__.__name__
+ return getattr(self, method, self.generic_visit)(node)
+
+ def generic_visit(self, node):
+ #~ print('generic:', type(node))
+ if node is None:
+ return ''
+ else:
+ return ''.join(self.visit(c) for c_name, c in node.children())
+
+ def visit_Constant(self, n):
+ return n.value
+
+ def visit_ID(self, n):
+ return n.name
+
+ def visit_Pragma(self, n):
+ ret = '#pragma'
+ if n.string:
+ ret += ' ' + n.string
+ return ret
+
+ def visit_ArrayRef(self, n):
+ arrref = self._parenthesize_unless_simple(n.name)
+ return arrref + '[' + self.visit(n.subscript) + ']'
+
+ def visit_StructRef(self, n):
+ sref = self._parenthesize_unless_simple(n.name)
+ return sref + n.type + self.visit(n.field)
+
+ def visit_FuncCall(self, n):
+ fref = self._parenthesize_unless_simple(n.name)
+ return fref + '(' + self.visit(n.args) + ')'
+
+ def visit_UnaryOp(self, n):
+ operand = self._parenthesize_unless_simple(n.expr)
+ if n.op == 'p++':
+ return '%s++' % operand
+ elif n.op == 'p--':
+ return '%s--' % operand
+ elif n.op == 'sizeof':
+ # Always parenthesize the argument of sizeof since it can be
+ # a name.
+ return 'sizeof(%s)' % self.visit(n.expr)
+ else:
+ return '%s%s' % (n.op, operand)
+
+ def visit_BinaryOp(self, n):
+ lval_str = self._parenthesize_if(n.left,
+ lambda d: not self._is_simple_node(d))
+ rval_str = self._parenthesize_if(n.right,
+ lambda d: not self._is_simple_node(d))
+ return '%s %s %s' % (lval_str, n.op, rval_str)
+
+ def visit_Assignment(self, n):
+ rval_str = self._parenthesize_if(
+ n.rvalue,
+ lambda n: isinstance(n, c_ast.Assignment))
+ return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str)
+
+ def visit_IdentifierType(self, n):
+ return ' '.join(n.names)
+
+ def _visit_expr(self, n):
+ if isinstance(n, c_ast.InitList):
+ return '{' + self.visit(n) + '}'
+ elif isinstance(n, c_ast.ExprList):
+ return '(' + self.visit(n) + ')'
+ else:
+ return self.visit(n)
+
+ def visit_Decl(self, n, no_type=False):
+ # no_type is used when a Decl is part of a DeclList, where the type is
+ # explicitly only for the first declaration in a list.
+ #
+ s = n.name if no_type else self._generate_decl(n)
+ if n.bitsize: s += ' : ' + self.visit(n.bitsize)
+ if n.init:
+ s += ' = ' + self._visit_expr(n.init)
+ return s
+
+ def visit_DeclList(self, n):
+ s = self.visit(n.decls[0])
+ if len(n.decls) > 1:
+ s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True)
+ for decl in n.decls[1:])
+ return s
+
+ def visit_Typedef(self, n):
+ s = ''
+ if n.storage: s += ' '.join(n.storage) + ' '
+ s += self._generate_type(n.type)
+ return s
+
+ def visit_Cast(self, n):
+ s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')'
+ return s + ' ' + self._parenthesize_unless_simple(n.expr)
+
+ def visit_ExprList(self, n):
+ visited_subexprs = []
+ for expr in n.exprs:
+ visited_subexprs.append(self._visit_expr(expr))
+ return ', '.join(visited_subexprs)
+
+ def visit_InitList(self, n):
+ visited_subexprs = []
+ for expr in n.exprs:
+ visited_subexprs.append(self._visit_expr(expr))
+ return ', '.join(visited_subexprs)
+
+ def visit_Enum(self, n):
+ return self._generate_struct_union_enum(n, name='enum')
+
+ def visit_Enumerator(self, n):
+ if not n.value:
+ return '{indent}{name},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ )
+ else:
+ return '{indent}{name} = {value},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ value=self.visit(n.value),
+ )
+
+ def visit_FuncDef(self, n):
+ decl = self.visit(n.decl)
+ self.indent_level = 0
+ body = self.visit(n.body)
+ if n.param_decls:
+ knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls)
+ return decl + '\n' + knrdecls + ';\n' + body + '\n'
+ else:
+ return decl + '\n' + body + '\n'
+
+ def visit_FileAST(self, n):
+ s = ''
+ for ext in n.ext:
+ if isinstance(ext, c_ast.FuncDef):
+ s += self.visit(ext)
+ elif isinstance(ext, c_ast.Pragma):
+ s += self.visit(ext) + '\n'
+ else:
+ s += self.visit(ext) + ';\n'
+ return s
+
+ def visit_Compound(self, n):
+ s = self._make_indent() + '{\n'
+ self.indent_level += 2
+ if n.block_items:
+ s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items)
+ self.indent_level -= 2
+ s += self._make_indent() + '}\n'
+ return s
+
+ def visit_CompoundLiteral(self, n):
+ return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
+
+
+ def visit_EmptyStatement(self, n):
+ return ';'
+
+ def visit_ParamList(self, n):
+ return ', '.join(self.visit(param) for param in n.params)
+
+ def visit_Return(self, n):
+ s = 'return'
+ if n.expr: s += ' ' + self.visit(n.expr)
+ return s + ';'
+
+ def visit_Break(self, n):
+ return 'break;'
+
+ def visit_Continue(self, n):
+ return 'continue;'
+
+ def visit_TernaryOp(self, n):
+ s = '(' + self._visit_expr(n.cond) + ') ? '
+ s += '(' + self._visit_expr(n.iftrue) + ') : '
+ s += '(' + self._visit_expr(n.iffalse) + ')'
+ return s
+
+ def visit_If(self, n):
+ s = 'if ('
+ if n.cond: s += self.visit(n.cond)
+ s += ')\n'
+ s += self._generate_stmt(n.iftrue, add_indent=True)
+ if n.iffalse:
+ s += self._make_indent() + 'else\n'
+ s += self._generate_stmt(n.iffalse, add_indent=True)
+ return s
+
+ def visit_For(self, n):
+ s = 'for ('
+ if n.init: s += self.visit(n.init)
+ s += ';'
+ if n.cond: s += ' ' + self.visit(n.cond)
+ s += ';'
+ if n.next: s += ' ' + self.visit(n.next)
+ s += ')\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ return s
+
+ def visit_While(self, n):
+ s = 'while ('
+ if n.cond: s += self.visit(n.cond)
+ s += ')\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ return s
+
+ def visit_DoWhile(self, n):
+ s = 'do\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ s += self._make_indent() + 'while ('
+ if n.cond: s += self.visit(n.cond)
+ s += ');'
+ return s
+
+ def visit_Switch(self, n):
+ s = 'switch (' + self.visit(n.cond) + ')\n'
+ s += self._generate_stmt(n.stmt, add_indent=True)
+ return s
+
+ def visit_Case(self, n):
+ s = 'case ' + self.visit(n.expr) + ':\n'
+ for stmt in n.stmts:
+ s += self._generate_stmt(stmt, add_indent=True)
+ return s
+
+ def visit_Default(self, n):
+ s = 'default:\n'
+ for stmt in n.stmts:
+ s += self._generate_stmt(stmt, add_indent=True)
+ return s
+
+ def visit_Label(self, n):
+ return n.name + ':\n' + self._generate_stmt(n.stmt)
+
+ def visit_Goto(self, n):
+ return 'goto ' + n.name + ';'
+
+ def visit_EllipsisParam(self, n):
+ return '...'
+
+ def visit_Struct(self, n):
+ return self._generate_struct_union_enum(n, 'struct')
+
+ def visit_Typename(self, n):
+ return self._generate_type(n.type)
+
+ def visit_Union(self, n):
+ return self._generate_struct_union_enum(n, 'union')
+
+ def visit_NamedInitializer(self, n):
+ s = ''
+ for name in n.name:
+ if isinstance(name, c_ast.ID):
+ s += '.' + name.name
+ else:
+ s += '[' + self.visit(name) + ']'
+ s += ' = ' + self._visit_expr(n.expr)
+ return s
+
+ def visit_FuncDecl(self, n):
+ return self._generate_type(n)
+
+ def visit_ArrayDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def visit_TypeDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def visit_PtrDecl(self, n):
+ return self._generate_type(n, emit_declname=False)
+
+ def _generate_struct_union_enum(self, n, name):
+ """ Generates code for structs, unions, and enums. name should be
+ 'struct', 'union', or 'enum'.
+ """
+ if name in ('struct', 'union'):
+ members = n.decls
+ body_function = self._generate_struct_union_body
+ else:
+ assert name == 'enum'
+ members = None if n.values is None else n.values.enumerators
+ body_function = self._generate_enum_body
+ s = name + ' ' + (n.name or '')
+ if members is not None:
+ # None means no members
+ # Empty sequence means an empty list of members
+ s += '\n'
+ s += self._make_indent()
+ self.indent_level += 2
+ s += '{\n'
+ s += body_function(members)
+ self.indent_level -= 2
+ s += self._make_indent() + '}'
+ return s
+
+ def _generate_struct_union_body(self, members):
+ return ''.join(self._generate_stmt(decl) for decl in members)
+
+ def _generate_enum_body(self, members):
+ # `[:-2] + '\n'` removes the final `,` from the enumerator list
+ return ''.join(self.visit(value) for value in members)[:-2] + '\n'
+
+ def _generate_stmt(self, n, add_indent=False):
+ """ Generation from a statement node. This method exists as a wrapper
+ for individual visit_* methods to handle different treatment of
+ some statements in this context.
+ """
+ typ = type(n)
+ if add_indent: self.indent_level += 2
+ indent = self._make_indent()
+ if add_indent: self.indent_level -= 2
+
+ if typ in (
+ c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp,
+ c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef,
+ c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef,
+ c_ast.ExprList):
+ # These can also appear in an expression context so no semicolon
+ # is added to them automatically
+ #
+ return indent + self.visit(n) + ';\n'
+ elif typ in (c_ast.Compound,):
+ # No extra indentation required before the opening brace of a
+ # compound - because it consists of multiple lines it has to
+ # compute its own indentation.
+ #
+ return self.visit(n)
+ else:
+ return indent + self.visit(n) + '\n'
+
+ def _generate_decl(self, n):
+ """ Generation from a Decl node.
+ """
+ s = ''
+ if n.funcspec: s = ' '.join(n.funcspec) + ' '
+ if n.storage: s += ' '.join(n.storage) + ' '
+ s += self._generate_type(n.type)
+ return s
+
+ def _generate_type(self, n, modifiers=[], emit_declname = True):
+ """ Recursive generation from a type node. n is the type node.
+ modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
+ encountered on the way down to a TypeDecl, to allow proper
+ generation from it.
+ """
+ typ = type(n)
+ #~ print(n, modifiers)
+
+ if typ == c_ast.TypeDecl:
+ s = ''
+ if n.quals: s += ' '.join(n.quals) + ' '
+ s += self.visit(n.type)
+
+ nstr = n.declname if n.declname and emit_declname else ''
+ # Resolve modifiers.
+ # Wrap in parens to distinguish pointer to array and pointer to
+ # function syntax.
+ #
+ for i, modifier in enumerate(modifiers):
+ if isinstance(modifier, c_ast.ArrayDecl):
+ if (i != 0 and
+ isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+ nstr = '(' + nstr + ')'
+ nstr += '['
+ if modifier.dim_quals:
+ nstr += ' '.join(modifier.dim_quals) + ' '
+ nstr += self.visit(modifier.dim) + ']'
+ elif isinstance(modifier, c_ast.FuncDecl):
+ if (i != 0 and
+ isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+ nstr = '(' + nstr + ')'
+ nstr += '(' + self.visit(modifier.args) + ')'
+ elif isinstance(modifier, c_ast.PtrDecl):
+ if modifier.quals:
+ nstr = '* %s%s' % (' '.join(modifier.quals),
+ ' ' + nstr if nstr else '')
+ else:
+ nstr = '*' + nstr
+ if nstr: s += ' ' + nstr
+ return s
+ elif typ == c_ast.Decl:
+ return self._generate_decl(n.type)
+ elif typ == c_ast.Typename:
+ return self._generate_type(n.type, emit_declname = emit_declname)
+ elif typ == c_ast.IdentifierType:
+ return ' '.join(n.names) + ' '
+ elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
+ return self._generate_type(n.type, modifiers + [n],
+ emit_declname = emit_declname)
+ else:
+ return self.visit(n)
+
+ def _parenthesize_if(self, n, condition):
+ """ Visits 'n' and returns its string representation, parenthesized
+ if the condition function applied to the node returns True.
+ """
+ s = self._visit_expr(n)
+ if condition(n):
+ return '(' + s + ')'
+ else:
+ return s
+
+ def _parenthesize_unless_simple(self, n):
+ """ Common use case for _parenthesize_if
+ """
+ return self._parenthesize_if(n, lambda d: not self._is_simple_node(d))
+
+ def _is_simple_node(self, n):
+ """ Returns True for nodes that are "simple" - i.e. nodes that always
+ have higher precedence than operators.
+ """
+ return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
+ c_ast.StructRef, c_ast.FuncCall))
diff --git a/venv/Lib/site-packages/pycparser/c_lexer.py b/venv/Lib/site-packages/pycparser/c_lexer.py
new file mode 100644
index 000000000..045d24eb2
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/c_lexer.py
@@ -0,0 +1,514 @@
+#------------------------------------------------------------------------------
+# pycparser: c_lexer.py
+#
+# CLexer class: lexer for the C language
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+import re
+import sys
+
+from .ply import lex
+from .ply.lex import TOKEN
+
+
+class CLexer(object):
+ """ A lexer for the C language. After building it, set the
+ input text with input(), and call token() to get new
+ tokens.
+
+ The public attribute filename can be set to an initial
+ filename, but the lexer will update it upon #line
+ directives.
+ """
+ def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
+ type_lookup_func):
+ """ Create a new Lexer.
+
+ error_func:
+ An error function. Will be called with an error
+ message, line and column as arguments, in case of
+ an error during lexing.
+
+ on_lbrace_func, on_rbrace_func:
+ Called when an LBRACE or RBRACE is encountered
+ (likely to push/pop type_lookup_func's scope)
+
+ type_lookup_func:
+ A type lookup function. Given a string, it must
+ return True IFF this string is a name of a type
+ that was defined with a typedef earlier.
+ """
+ self.error_func = error_func
+ self.on_lbrace_func = on_lbrace_func
+ self.on_rbrace_func = on_rbrace_func
+ self.type_lookup_func = type_lookup_func
+ self.filename = ''
+
+ # Keeps track of the last token returned from self.token()
+ self.last_token = None
+
+ # Allow either "# line" or "# " to support GCC's
+ # cpp output
+ #
+ self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
+ self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
+
+ def build(self, **kwargs):
+ """ Builds the lexer from the specification. Must be
+ called after the lexer object is created.
+
+ This method exists separately, because the PLY
+ manual warns against calling lex.lex inside
+ __init__
+ """
+ self.lexer = lex.lex(object=self, **kwargs)
+
+ def reset_lineno(self):
+ """ Resets the internal line number counter of the lexer.
+ """
+ self.lexer.lineno = 1
+
+ def input(self, text):
+ self.lexer.input(text)
+
+ def token(self):
+ self.last_token = self.lexer.token()
+ return self.last_token
+
+ def find_tok_column(self, token):
+ """ Find the column of the token in its line.
+ """
+ last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos)
+ return token.lexpos - last_cr
+
+ ######################-- PRIVATE --######################
+
+ ##
+ ## Internal auxiliary methods
+ ##
+ def _error(self, msg, token):
+ location = self._make_tok_location(token)
+ self.error_func(msg, location[0], location[1])
+ self.lexer.skip(1)
+
+ def _make_tok_location(self, token):
+ return (token.lineno, self.find_tok_column(token))
+
+ ##
+ ## Reserved keywords
+ ##
+ keywords = (
+ '_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
+ 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
+ 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
+ 'REGISTER', 'OFFSETOF',
+ 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
+ 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
+ 'VOLATILE', 'WHILE', '__INT128',
+ )
+
+ keyword_map = {}
+ for keyword in keywords:
+ if keyword == '_BOOL':
+ keyword_map['_Bool'] = keyword
+ elif keyword == '_COMPLEX':
+ keyword_map['_Complex'] = keyword
+ else:
+ keyword_map[keyword.lower()] = keyword
+
+ ##
+ ## All the tokens recognized by the lexer
+ ##
+ tokens = keywords + (
+ # Identifiers
+ 'ID',
+
+ # Type identifiers (identifiers previously defined as
+ # types with typedef)
+ 'TYPEID',
+
+ # constants
+ 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR',
+ 'FLOAT_CONST', 'HEX_FLOAT_CONST',
+ 'CHAR_CONST',
+ 'WCHAR_CONST',
+
+ # String literals
+ 'STRING_LITERAL',
+ 'WSTRING_LITERAL',
+
+ # Operators
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
+ 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
+ 'LOR', 'LAND', 'LNOT',
+ 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
+
+ # Assignment
+ 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL',
+ 'PLUSEQUAL', 'MINUSEQUAL',
+ 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL',
+ 'OREQUAL',
+
+ # Increment/decrement
+ 'PLUSPLUS', 'MINUSMINUS',
+
+ # Structure dereference (->)
+ 'ARROW',
+
+ # Conditional operator (?)
+ 'CONDOP',
+
+ # Delimeters
+ 'LPAREN', 'RPAREN', # ( )
+ 'LBRACKET', 'RBRACKET', # [ ]
+ 'LBRACE', 'RBRACE', # { }
+ 'COMMA', 'PERIOD', # . ,
+ 'SEMI', 'COLON', # ; :
+
+ # Ellipsis (...)
+ 'ELLIPSIS',
+
+ # pre-processor
+ 'PPHASH', # '#'
+ 'PPPRAGMA', # 'pragma'
+ 'PPPRAGMASTR',
+ )
+
+ ##
+ ## Regexes for use in tokens
+ ##
+ ##
+
+ # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers)
+ identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*'
+
+ hex_prefix = '0[xX]'
+ hex_digits = '[0-9a-fA-F]+'
+ bin_prefix = '0[bB]'
+ bin_digits = '[01]+'
+
+ # integer constants (K&R2: A.2.5.1)
+ integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?'
+ decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')'
+ octal_constant = '0[0-7]*'+integer_suffix_opt
+ hex_constant = hex_prefix+hex_digits+integer_suffix_opt
+ bin_constant = bin_prefix+bin_digits+integer_suffix_opt
+
+ bad_octal_constant = '0[0-7]*[89]'
+
+ # character constants (K&R2: A.2.5.2)
+ # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
+ # directives with Windows paths as filenames (..\..\dir\file)
+ # For the same reason, decimal_escape allows all digit sequences. We want to
+ # parse all correct code, even if it means to sometimes parse incorrect
+ # code.
+ #
+ # The original regexes were taken verbatim from the C syntax definition,
+ # and were later modified to avoid worst-case exponential running time.
+ #
+ # simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
+ # decimal_escape = r"""(\d+)"""
+ # hex_escape = r"""(x[0-9a-fA-F]+)"""
+ # bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+ #
+ # The following modifications were made to avoid the ambiguity that allowed backtracking:
+ # (https://github.com/eliben/pycparser/issues/61)
+ #
+ # - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape.
+ # - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex
+ # - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal
+ # - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape.
+ #
+ # Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways.
+ # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`.
+
+ simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))"""
+ decimal_escape = r"""(\d+)(?!\d)"""
+ hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])"""
+ bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])"""
+
+ escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
+
+ # This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed
+ # 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to
+
+ escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])"""
+
+ cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
+ char_const = "'"+cconst_char+"'"
+ wchar_const = 'L'+char_const
+ multicharacter_constant = "'"+cconst_char+"{2,4}'"
+ unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
+ bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
+
+ # string literals (K&R2: A.2.6)
+ string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')'
+ string_literal = '"'+string_char+'*"'
+ wstring_literal = 'L'+string_literal
+ bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"'
+
+ # floating constants (K&R2: A.2.5.3)
+ exponent_part = r"""([eE][-+]?[0-9]+)"""
+ fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
+ floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)'
+ binary_exponent_part = r'''([pP][+-]?[0-9]+)'''
+ hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))"""
+ hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)'
+
+ ##
+ ## Lexer states: used for preprocessor \n-terminated directives
+ ##
+ states = (
+ # ppline: preprocessor line directives
+ #
+ ('ppline', 'exclusive'),
+
+ # pppragma: pragma
+ #
+ ('pppragma', 'exclusive'),
+ )
+
+ def t_PPHASH(self, t):
+ r'[ \t]*\#'
+ if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
+ t.lexer.begin('ppline')
+ self.pp_line = self.pp_filename = None
+ elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
+ t.lexer.begin('pppragma')
+ else:
+ t.type = 'PPHASH'
+ return t
+
+ ##
+ ## Rules for the ppline state
+ ##
+ @TOKEN(string_literal)
+ def t_ppline_FILENAME(self, t):
+ if self.pp_line is None:
+ self._error('filename before line number in #line', t)
+ else:
+ self.pp_filename = t.value.lstrip('"').rstrip('"')
+
+ @TOKEN(decimal_constant)
+ def t_ppline_LINE_NUMBER(self, t):
+ if self.pp_line is None:
+ self.pp_line = t.value
+ else:
+ # Ignore: GCC's cpp sometimes inserts a numeric flag
+ # after the file name
+ pass
+
+ def t_ppline_NEWLINE(self, t):
+ r'\n'
+ if self.pp_line is None:
+ self._error('line number missing in #line', t)
+ else:
+ self.lexer.lineno = int(self.pp_line)
+
+ if self.pp_filename is not None:
+ self.filename = self.pp_filename
+
+ t.lexer.begin('INITIAL')
+
+ def t_ppline_PPLINE(self, t):
+ r'line'
+ pass
+
+ t_ppline_ignore = ' \t'
+
+ def t_ppline_error(self, t):
+ self._error('invalid #line directive', t)
+
+ ##
+ ## Rules for the pppragma state
+ ##
+ def t_pppragma_NEWLINE(self, t):
+ r'\n'
+ t.lexer.lineno += 1
+ t.lexer.begin('INITIAL')
+
+ def t_pppragma_PPPRAGMA(self, t):
+ r'pragma'
+ return t
+
+ t_pppragma_ignore = ' \t'
+
+ def t_pppragma_STR(self, t):
+ '.+'
+ t.type = 'PPPRAGMASTR'
+ return t
+
+ def t_pppragma_error(self, t):
+ self._error('invalid #pragma directive', t)
+
+ ##
+ ## Rules for the normal state
+ ##
+ t_ignore = ' \t'
+
+ # Newlines
+ def t_NEWLINE(self, t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+ # Operators
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_MOD = r'%'
+ t_OR = r'\|'
+ t_AND = r'&'
+ t_NOT = r'~'
+ t_XOR = r'\^'
+ t_LSHIFT = r'<<'
+ t_RSHIFT = r'>>'
+ t_LOR = r'\|\|'
+ t_LAND = r'&&'
+ t_LNOT = r'!'
+ t_LT = r'<'
+ t_GT = r'>'
+ t_LE = r'<='
+ t_GE = r'>='
+ t_EQ = r'=='
+ t_NE = r'!='
+
+ # Assignment operators
+ t_EQUALS = r'='
+ t_TIMESEQUAL = r'\*='
+ t_DIVEQUAL = r'/='
+ t_MODEQUAL = r'%='
+ t_PLUSEQUAL = r'\+='
+ t_MINUSEQUAL = r'-='
+ t_LSHIFTEQUAL = r'<<='
+ t_RSHIFTEQUAL = r'>>='
+ t_ANDEQUAL = r'&='
+ t_OREQUAL = r'\|='
+ t_XOREQUAL = r'\^='
+
+ # Increment/decrement
+ t_PLUSPLUS = r'\+\+'
+ t_MINUSMINUS = r'--'
+
+ # ->
+ t_ARROW = r'->'
+
+ # ?
+ t_CONDOP = r'\?'
+
+ # Delimeters
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_LBRACKET = r'\['
+ t_RBRACKET = r'\]'
+ t_COMMA = r','
+ t_PERIOD = r'\.'
+ t_SEMI = r';'
+ t_COLON = r':'
+ t_ELLIPSIS = r'\.\.\.'
+
+ # Scope delimiters
+ # To see why on_lbrace_func is needed, consider:
+ # typedef char TT;
+ # void foo(int TT) { TT = 10; }
+ # TT x = 5;
+ # Outside the function, TT is a typedef, but inside (starting and ending
+ # with the braces) it's a parameter. The trouble begins with yacc's
+ # lookahead token. If we open a new scope in brace_open, then TT has
+ # already been read and incorrectly interpreted as TYPEID. So, we need
+ # to open and close scopes from within the lexer.
+ # Similar for the TT immediately outside the end of the function.
+ #
+ @TOKEN(r'\{')
+ def t_LBRACE(self, t):
+ self.on_lbrace_func()
+ return t
+ @TOKEN(r'\}')
+ def t_RBRACE(self, t):
+ self.on_rbrace_func()
+ return t
+
+ t_STRING_LITERAL = string_literal
+
+ # The following floating and integer constants are defined as
+ # functions to impose a strict order (otherwise, decimal
+ # is placed before the others because its regex is longer,
+ # and this is bad)
+ #
+ @TOKEN(floating_constant)
+ def t_FLOAT_CONST(self, t):
+ return t
+
+ @TOKEN(hex_floating_constant)
+ def t_HEX_FLOAT_CONST(self, t):
+ return t
+
+ @TOKEN(hex_constant)
+ def t_INT_CONST_HEX(self, t):
+ return t
+
+ @TOKEN(bin_constant)
+ def t_INT_CONST_BIN(self, t):
+ return t
+
+ @TOKEN(bad_octal_constant)
+ def t_BAD_CONST_OCT(self, t):
+ msg = "Invalid octal constant"
+ self._error(msg, t)
+
+ @TOKEN(octal_constant)
+ def t_INT_CONST_OCT(self, t):
+ return t
+
+ @TOKEN(decimal_constant)
+ def t_INT_CONST_DEC(self, t):
+ return t
+
+ # Must come before bad_char_const, to prevent it from
+ # catching valid char constants as invalid
+ #
+ @TOKEN(multicharacter_constant)
+ def t_INT_CONST_CHAR(self, t):
+ return t
+
+ @TOKEN(char_const)
+ def t_CHAR_CONST(self, t):
+ return t
+
+ @TOKEN(wchar_const)
+ def t_WCHAR_CONST(self, t):
+ return t
+
+ @TOKEN(unmatched_quote)
+ def t_UNMATCHED_QUOTE(self, t):
+ msg = "Unmatched '"
+ self._error(msg, t)
+
+ @TOKEN(bad_char_const)
+ def t_BAD_CHAR_CONST(self, t):
+ msg = "Invalid char constant %s" % t.value
+ self._error(msg, t)
+
+ @TOKEN(wstring_literal)
+ def t_WSTRING_LITERAL(self, t):
+ return t
+
+ # unmatched string literals are caught by the preprocessor
+
+ @TOKEN(bad_string_literal)
+ def t_BAD_STRING_LITERAL(self, t):
+ msg = "String contains invalid escape code"
+ self._error(msg, t)
+
+ @TOKEN(identifier)
+ def t_ID(self, t):
+ t.type = self.keyword_map.get(t.value, "ID")
+ if t.type == 'ID' and self.type_lookup_func(t.value):
+ t.type = "TYPEID"
+ return t
+
+ def t_error(self, t):
+ msg = 'Illegal character %s' % repr(t.value[0])
+ self._error(msg, t)
diff --git a/venv/Lib/site-packages/pycparser/c_parser.py b/venv/Lib/site-packages/pycparser/c_parser.py
new file mode 100644
index 000000000..744ede8ac
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/c_parser.py
@@ -0,0 +1,1863 @@
+#------------------------------------------------------------------------------
+# pycparser: c_parser.py
+#
+# CParser class: Parser and AST builder for the C language
+#
+# Eli Bendersky [https://eli.thegreenplace.net/]
+# License: BSD
+#------------------------------------------------------------------------------
+import re
+
+from .ply import yacc
+
+from . import c_ast
+from .c_lexer import CLexer
+from .plyparser import PLYParser, Coord, ParseError, parameterized, template
+from .ast_transforms import fix_switch_cases
+
+
+@template
+class CParser(PLYParser):
+ def __init__(
+ self,
+ lex_optimize=True,
+ lexer=CLexer,
+ lextab='pycparser.lextab',
+ yacc_optimize=True,
+ yacctab='pycparser.yacctab',
+ yacc_debug=False,
+ taboutputdir=''):
+ """ Create a new CParser.
+
+ Some arguments for controlling the debug/optimization
+ level of the parser are provided. The defaults are
+ tuned for release/performance mode.
+ The simple rules for using them are:
+ *) When tweaking CParser/CLexer, set these to False
+ *) When releasing a stable parser, set to True
+
+ lex_optimize:
+ Set to False when you're modifying the lexer.
+ Otherwise, changes in the lexer won't be used, if
+ some lextab.py file exists.
+ When releasing with a stable lexer, set to True
+ to save the re-generation of the lexer table on
+ each run.
+
+ lexer:
+ Set this parameter to define the lexer to use if
+ you're not using the default CLexer.
+
+ lextab:
+ Points to the lex table that's used for optimized
+ mode. Only if you're modifying the lexer and want
+ some tests to avoid re-generating the table, make
+ this point to a local lex table file (that's been
+ earlier generated with lex_optimize=True)
+
+ yacc_optimize:
+ Set to False when you're modifying the parser.
+ Otherwise, changes in the parser won't be used, if
+ some parsetab.py file exists.
+ When releasing with a stable parser, set to True
+ to save the re-generation of the parser table on
+ each run.
+
+ yacctab:
+ Points to the yacc table that's used for optimized
+ mode. Only if you're modifying the parser, make
+ this point to a local yacc table file
+
+ yacc_debug:
+ Generate a parser.out file that explains how yacc
+ built the parsing table from the grammar.
+
+ taboutputdir:
+ Set this parameter to control the location of generated
+ lextab and yacctab files.
+ """
+ self.clex = lexer(
+ error_func=self._lex_error_func,
+ on_lbrace_func=self._lex_on_lbrace_func,
+ on_rbrace_func=self._lex_on_rbrace_func,
+ type_lookup_func=self._lex_type_lookup_func)
+
+ self.clex.build(
+ optimize=lex_optimize,
+ lextab=lextab,
+ outputdir=taboutputdir)
+ self.tokens = self.clex.tokens
+
+ rules_with_opt = [
+ 'abstract_declarator',
+ 'assignment_expression',
+ 'declaration_list',
+ 'declaration_specifiers_no_type',
+ 'designation',
+ 'expression',
+ 'identifier_list',
+ 'init_declarator_list',
+ 'id_init_declarator_list',
+ 'initializer_list',
+ 'parameter_type_list',
+ 'block_item_list',
+ 'type_qualifier_list',
+ 'struct_declarator_list'
+ ]
+
+ for rule in rules_with_opt:
+ self._create_opt_rule(rule)
+
+ self.cparser = yacc.yacc(
+ module=self,
+ start='translation_unit_or_empty',
+ debug=yacc_debug,
+ optimize=yacc_optimize,
+ tabmodule=yacctab,
+ outputdir=taboutputdir)
+
+ # Stack of scopes for keeping track of symbols. _scope_stack[-1] is
+ # the current (topmost) scope. Each scope is a dictionary that
+ # specifies whether a name is a type. If _scope_stack[n][name] is
+ # True, 'name' is currently a type in the scope. If it's False,
+ # 'name' is used in the scope but not as a type (for instance, if we
+ # saw: int name;
+ # If 'name' is not a key in _scope_stack[n] then 'name' was not defined
+ # in this scope at all.
+ self._scope_stack = [dict()]
+
+ # Keeps track of the last token given to yacc (the lookahead token)
+ self._last_yielded_token = None
+
+ def parse(self, text, filename='', debuglevel=0):
+ """ Parses C code and returns an AST.
+
+ text:
+ A string containing the C source code
+
+ filename:
+ Name of the file being parsed (for meaningful
+ error messages)
+
+ debuglevel:
+ Debug level to yacc
+ """
+ self.clex.filename = filename
+ self.clex.reset_lineno()
+ self._scope_stack = [dict()]
+ self._last_yielded_token = None
+ return self.cparser.parse(
+ input=text,
+ lexer=self.clex,
+ debug=debuglevel)
+
+ ######################-- PRIVATE --######################
+
+ def _push_scope(self):
+ self._scope_stack.append(dict())
+
+ def _pop_scope(self):
+ assert len(self._scope_stack) > 1
+ self._scope_stack.pop()
+
+ def _add_typedef_name(self, name, coord):
+ """ Add a new typedef name (ie a TYPEID) to the current scope
+ """
+ if not self._scope_stack[-1].get(name, True):
+ self._parse_error(
+ "Typedef %r previously declared as non-typedef "
+ "in this scope" % name, coord)
+ self._scope_stack[-1][name] = True
+
+ def _add_identifier(self, name, coord):
+ """ Add a new object, function, or enum member name (ie an ID) to the
+ current scope
+ """
+ if self._scope_stack[-1].get(name, False):
+ self._parse_error(
+ "Non-typedef %r previously declared as typedef "
+ "in this scope" % name, coord)
+ self._scope_stack[-1][name] = False
+
+ def _is_type_in_scope(self, name):
+ """ Is *name* a typedef-name in the current scope?
+ """
+ for scope in reversed(self._scope_stack):
+ # If name is an identifier in this scope it shadows typedefs in
+ # higher scopes.
+ in_scope = scope.get(name)
+ if in_scope is not None: return in_scope
+ return False
+
+ def _lex_error_func(self, msg, line, column):
+ self._parse_error(msg, self._coord(line, column))
+
+ def _lex_on_lbrace_func(self):
+ self._push_scope()
+
+ def _lex_on_rbrace_func(self):
+ self._pop_scope()
+
+ def _lex_type_lookup_func(self, name):
+ """ Looks up types that were previously defined with
+ typedef.
+ Passed to the lexer for recognizing identifiers that
+ are types.
+ """
+ is_type = self._is_type_in_scope(name)
+ return is_type
+
+ def _get_yacc_lookahead_token(self):
+ """ We need access to yacc's lookahead token in certain cases.
+ This is the last token yacc requested from the lexer, so we
+ ask the lexer.
+ """
+ return self.clex.last_token
+
+ # To understand what's going on here, read sections A.8.5 and
+ # A.8.6 of K&R2 very carefully.
+ #
+ # A C type consists of a basic type declaration, with a list
+ # of modifiers. For example:
+ #
+ # int *c[5];
+ #
+ # The basic declaration here is 'int c', and the pointer and
+ # the array are the modifiers.
+ #
+ # Basic declarations are represented by TypeDecl (from module c_ast) and the
+ # modifiers are FuncDecl, PtrDecl and ArrayDecl.
+ #
+ # The standard states that whenever a new modifier is parsed, it should be
+ # added to the end of the list of modifiers. For example:
+ #
+ # K&R2 A.8.6.2: Array Declarators
+ #
+ # In a declaration T D where D has the form
+ # D1 [constant-expression-opt]
+ # and the type of the identifier in the declaration T D1 is
+ # "type-modifier T", the type of the
+ # identifier of D is "type-modifier array of T"
+ #
+ # This is what this method does. The declarator it receives
+ # can be a list of declarators ending with TypeDecl. It
+ # tacks the modifier to the end of this list, just before
+ # the TypeDecl.
+ #
+ # Additionally, the modifier may be a list itself. This is
+ # useful for pointers, that can come as a chain from the rule
+ # p_pointer. In this case, the whole modifier list is spliced
+ # into the new location.
+ def _type_modify_decl(self, decl, modifier):
+ """ Tacks a type modifier on a declarator, and returns
+ the modified declarator.
+
+ Note: the declarator and modifier may be modified
+ """
+ #~ print '****'
+ #~ decl.show(offset=3)
+ #~ modifier.show(offset=3)
+ #~ print '****'
+
+ modifier_head = modifier
+ modifier_tail = modifier
+
+ # The modifier may be a nested list. Reach its tail.
+ #
+ while modifier_tail.type:
+ modifier_tail = modifier_tail.type
+
+ # If the decl is a basic type, just tack the modifier onto
+ # it
+ #
+ if isinstance(decl, c_ast.TypeDecl):
+ modifier_tail.type = decl
+ return modifier
+ else:
+ # Otherwise, the decl is a list of modifiers. Reach
+ # its tail and splice the modifier onto the tail,
+ # pointing to the underlying basic type.
+ #
+ decl_tail = decl
+
+ while not isinstance(decl_tail.type, c_ast.TypeDecl):
+ decl_tail = decl_tail.type
+
+ modifier_tail.type = decl_tail.type
+ decl_tail.type = modifier_head
+ return decl
+
+ # Due to the order in which declarators are constructed,
+ # they have to be fixed in order to look like a normal AST.
+ #
+ # When a declaration arrives from syntax construction, it has
+ # these problems:
+ # * The innermost TypeDecl has no type (because the basic
+ # type is only known at the uppermost declaration level)
+ # * The declaration has no variable name, since that is saved
+ # in the innermost TypeDecl
+ # * The typename of the declaration is a list of type
+ # specifiers, and not a node. Here, basic identifier types
+ # should be separated from more complex types like enums
+ # and structs.
+ #
+ # This method fixes these problems.
+ #
+ def _fix_decl_name_type(self, decl, typename):
+ """ Fixes a declaration. Modifies decl.
+ """
+ # Reach the underlying basic type
+ #
+ type = decl
+ while not isinstance(type, c_ast.TypeDecl):
+ type = type.type
+
+ decl.name = type.declname
+ type.quals = decl.quals
+
+ # The typename is a list of types. If any type in this
+ # list isn't an IdentifierType, it must be the only
+ # type in the list (it's illegal to declare "int enum ..")
+ # If all the types are basic, they're collected in the
+ # IdentifierType holder.
+ #
+ for tn in typename:
+ if not isinstance(tn, c_ast.IdentifierType):
+ if len(typename) > 1:
+ self._parse_error(
+ "Invalid multiple types specified", tn.coord)
+ else:
+ type.type = tn
+ return decl
+
+ if not typename:
+ # Functions default to returning int
+ #
+ if not isinstance(decl.type, c_ast.FuncDecl):
+ self._parse_error(
+ "Missing type in declaration", decl.coord)
+ type.type = c_ast.IdentifierType(
+ ['int'],
+ coord=decl.coord)
+ else:
+ # At this point, we know that typename is a list of IdentifierType
+ # nodes. Concatenate all the names into a single list.
+ #
+ type.type = c_ast.IdentifierType(
+ [name for id in typename for name in id.names],
+ coord=typename[0].coord)
+ return decl
+
+ def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
+ """ Declaration specifiers are represented by a dictionary
+ with the entries:
+ * qual: a list of type qualifiers
+ * storage: a list of storage type qualifiers
+ * type: a list of type specifiers
+ * function: a list of function specifiers
+
+ This method is given a declaration specifier, and a
+ new specifier of a given kind.
+ If `append` is True, the new specifier is added to the end of
+ the specifiers list, otherwise it's added at the beginning.
+ Returns the declaration specifier, with the new
+ specifier incorporated.
+ """
+ spec = declspec or dict(qual=[], storage=[], type=[], function=[])
+
+ if append:
+ spec[kind].append(newspec)
+ else:
+ spec[kind].insert(0, newspec)
+
+ return spec
+
+ def _build_declarations(self, spec, decls, typedef_namespace=False):
+ """ Builds a list of declarations all sharing the given specifiers.
+ If typedef_namespace is true, each declared name is added
+ to the "typedef namespace", which also includes objects,
+ functions, and enum constants.
+ """
+ is_typedef = 'typedef' in spec['storage']
+ declarations = []
+
+ # Bit-fields are allowed to be unnamed.
+ #
+ if decls[0].get('bitsize') is not None:
+ pass
+
+ # When redeclaring typedef names as identifiers in inner scopes, a
+ # problem can occur where the identifier gets grouped into
+ # spec['type'], leaving decl as None. This can only occur for the
+ # first declarator.
+ #
+ elif decls[0]['decl'] is None:
+ if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \
+ not self._is_type_in_scope(spec['type'][-1].names[0]):
+ coord = '?'
+ for t in spec['type']:
+ if hasattr(t, 'coord'):
+ coord = t.coord
+ break
+ self._parse_error('Invalid declaration', coord)
+
+ # Make this look as if it came from "direct_declarator:ID"
+ decls[0]['decl'] = c_ast.TypeDecl(
+ declname=spec['type'][-1].names[0],
+ type=None,
+ quals=None,
+ coord=spec['type'][-1].coord)
+ # Remove the "new" type's name from the end of spec['type']
+ del spec['type'][-1]
+
+ # A similar problem can occur where the declaration ends up looking
+ # like an abstract declarator. Give it a name if this is the case.
+ #
+ elif not isinstance(decls[0]['decl'],
+ (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
+ decls_0_tail = decls[0]['decl']
+ while not isinstance(decls_0_tail, c_ast.TypeDecl):
+ decls_0_tail = decls_0_tail.type
+ if decls_0_tail.declname is None:
+ decls_0_tail.declname = spec['type'][-1].names[0]
+ del spec['type'][-1]
+
+ for decl in decls:
+ assert decl['decl'] is not None
+ if is_typedef:
+ declaration = c_ast.Typedef(
+ name=None,
+ quals=spec['qual'],
+ storage=spec['storage'],
+ type=decl['decl'],
+ coord=decl['decl'].coord)
+ else:
+ declaration = c_ast.Decl(
+ name=None,
+ quals=spec['qual'],
+ storage=spec['storage'],
+ funcspec=spec['function'],
+ type=decl['decl'],
+ init=decl.get('init'),
+ bitsize=decl.get('bitsize'),
+ coord=decl['decl'].coord)
+
+ if isinstance(declaration.type,
+ (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
+ fixed_decl = declaration
+ else:
+ fixed_decl = self._fix_decl_name_type(declaration, spec['type'])
+
+ # Add the type name defined by typedef to a
+ # symbol table (for usage in the lexer)
+ #
+ if typedef_namespace:
+ if is_typedef:
+ self._add_typedef_name(fixed_decl.name, fixed_decl.coord)
+ else:
+ self._add_identifier(fixed_decl.name, fixed_decl.coord)
+
+ declarations.append(fixed_decl)
+
+ return declarations
+
+ def _build_function_definition(self, spec, decl, param_decls, body):
+ """ Builds a function definition.
+ """
+ assert 'typedef' not in spec['storage']
+
+ declaration = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=decl, init=None)],
+ typedef_namespace=True)[0]
+
+ return c_ast.FuncDef(
+ decl=declaration,
+ param_decls=param_decls,
+ body=body,
+ coord=decl.coord)
+
+ def _select_struct_union_class(self, token):
+ """ Given a token (either STRUCT or UNION), selects the
+ appropriate AST class.
+ """
+ if token == 'struct':
+ return c_ast.Struct
+ else:
+ return c_ast.Union
+
+ ##
+ ## Precedence and associativity of operators
+ ##
+ precedence = (
+ ('left', 'LOR'),
+ ('left', 'LAND'),
+ ('left', 'OR'),
+ ('left', 'XOR'),
+ ('left', 'AND'),
+ ('left', 'EQ', 'NE'),
+ ('left', 'GT', 'GE', 'LT', 'LE'),
+ ('left', 'RSHIFT', 'LSHIFT'),
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE', 'MOD')
+ )
+
+ ##
+ ## Grammar productions
+ ## Implementation of the BNF defined in K&R2 A.13
+ ##
+
+ # Wrapper around a translation unit, to allow for empty input.
+ # Not strictly part of the C99 Grammar, but useful in practice.
+ #
+ def p_translation_unit_or_empty(self, p):
+ """ translation_unit_or_empty : translation_unit
+ | empty
+ """
+ if p[1] is None:
+ p[0] = c_ast.FileAST([])
+ else:
+ p[0] = c_ast.FileAST(p[1])
+
+ def p_translation_unit_1(self, p):
+ """ translation_unit : external_declaration
+ """
+ # Note: external_declaration is already a list
+ #
+ p[0] = p[1]
+
+ def p_translation_unit_2(self, p):
+ """ translation_unit : translation_unit external_declaration
+ """
+ p[1].extend(p[2])
+ p[0] = p[1]
+
+ # Declarations always come as lists (because they can be
+ # several in one line), so we wrap the function definition
+ # into a list as well, to make the return value of
+ # external_declaration homogenous.
+ #
+ def p_external_declaration_1(self, p):
+ """ external_declaration : function_definition
+ """
+ p[0] = [p[1]]
+
+ def p_external_declaration_2(self, p):
+ """ external_declaration : declaration
+ """
+ p[0] = p[1]
+
+ def p_external_declaration_3(self, p):
+ """ external_declaration : pp_directive
+ | pppragma_directive
+ """
+ p[0] = [p[1]]
+
+ def p_external_declaration_4(self, p):
+ """ external_declaration : SEMI
+ """
+ p[0] = []
+
+ def p_pp_directive(self, p):
+ """ pp_directive : PPHASH
+ """
+ self._parse_error('Directives not supported yet',
+ self._token_coord(p, 1))
+
+ def p_pppragma_directive(self, p):
+ """ pppragma_directive : PPPRAGMA
+ | PPPRAGMA PPPRAGMASTR
+ """
+ if len(p) == 3:
+ p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
+ else:
+ p[0] = c_ast.Pragma("", self._token_coord(p, 1))
+
+ # In function definitions, the declarator can be followed by
+ # a declaration list, for old "K&R style" function definitios.
+ #
+ def p_function_definition_1(self, p):
+ """ function_definition : id_declarator declaration_list_opt compound_statement
+ """
+ # no declaration specifiers - 'int' becomes the default type
+ spec = dict(
+ qual=[],
+ storage=[],
+ type=[c_ast.IdentifierType(['int'],
+ coord=self._token_coord(p, 1))],
+ function=[])
+
+ p[0] = self._build_function_definition(
+ spec=spec,
+ decl=p[1],
+ param_decls=p[2],
+ body=p[3])
+
+ def p_function_definition_2(self, p):
+ """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
+ """
+ spec = p[1]
+
+ p[0] = self._build_function_definition(
+ spec=spec,
+ decl=p[2],
+ param_decls=p[3],
+ body=p[4])
+
+ def p_statement(self, p):
+ """ statement : labeled_statement
+ | expression_statement
+ | compound_statement
+ | selection_statement
+ | iteration_statement
+ | jump_statement
+ | pppragma_directive
+ """
+ p[0] = p[1]
+
+ # A pragma is generally considered a decorator rather than an actual statement.
+ # Still, for the purposes of analyzing an abstract syntax tree of C code,
+ # pragma's should not be ignored and were previously treated as a statement.
+ # This presents a problem for constructs that take a statement such as labeled_statements,
+ # selection_statements, and iteration_statements, causing a misleading structure
+ # in the AST. For example, consider the following C code.
+ #
+ # for (int i = 0; i < 3; i++)
+ # #pragma omp critical
+ # sum += 1;
+ #
+ # This code will compile and execute "sum += 1;" as the body of the for loop.
+ # Previous implementations of PyCParser would render the AST for this
+ # block of code as follows:
+ #
+ # For:
+ # DeclList:
+ # Decl: i, [], [], []
+ # TypeDecl: i, []
+ # IdentifierType: ['int']
+ # Constant: int, 0
+ # BinaryOp: <
+ # ID: i
+ # Constant: int, 3
+ # UnaryOp: p++
+ # ID: i
+ # Pragma: omp critical
+ # Assignment: +=
+ # ID: sum
+ # Constant: int, 1
+ #
+ # This AST misleadingly takes the Pragma as the body of the loop and the
+ # assignment then becomes a sibling of the loop.
+ #
+ # To solve edge cases like these, the pragmacomp_or_statement rule groups
+ # a pragma and its following statement (which would otherwise be orphaned)
+ # using a compound block, effectively turning the above code into:
+ #
+ # for (int i = 0; i < 3; i++) {
+ # #pragma omp critical
+ # sum += 1;
+ # }
+ def p_pragmacomp_or_statement(self, p):
+ """ pragmacomp_or_statement : pppragma_directive statement
+ | statement
+ """
+ if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
+ p[0] = c_ast.Compound(
+ block_items=[p[1], p[2]],
+ coord=self._token_coord(p, 1))
+ else:
+ p[0] = p[1]
+
+ # In C, declarations can come several in a line:
+ # int x, *px, romulo = 5;
+ #
+ # However, for the AST, we will split them to separate Decl
+ # nodes.
+ #
+ # This rule splits its declarations and always returns a list
+ # of Decl nodes, even if it's one element long.
+ #
+ def p_decl_body(self, p):
+ """ decl_body : declaration_specifiers init_declarator_list_opt
+ | declaration_specifiers_no_type id_init_declarator_list_opt
+ """
+ spec = p[1]
+
+ # p[2] (init_declarator_list_opt) is either a list or None
+ #
+ if p[2] is None:
+ # By the standard, you must have at least one declarator unless
+ # declaring a structure tag, a union tag, or the members of an
+ # enumeration.
+ #
+ ty = spec['type']
+ s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum)
+ if len(ty) == 1 and isinstance(ty[0], s_u_or_e):
+ decls = [c_ast.Decl(
+ name=None,
+ quals=spec['qual'],
+ storage=spec['storage'],
+ funcspec=spec['function'],
+ type=ty[0],
+ init=None,
+ bitsize=None,
+ coord=ty[0].coord)]
+
+ # However, this case can also occur on redeclared identifiers in
+ # an inner scope. The trouble is that the redeclared type's name
+ # gets grouped into declaration_specifiers; _build_declarations
+ # compensates for this.
+ #
+ else:
+ decls = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=None, init=None)],
+ typedef_namespace=True)
+
+ else:
+ decls = self._build_declarations(
+ spec=spec,
+ decls=p[2],
+ typedef_namespace=True)
+
+ p[0] = decls
+
+ # The declaration has been split to a decl_body sub-rule and
+ # SEMI, because having them in a single rule created a problem
+ # for defining typedefs.
+ #
+ # If a typedef line was directly followed by a line using the
+ # type defined with the typedef, the type would not be
+ # recognized. This is because to reduce the declaration rule,
+ # the parser's lookahead asked for the token after SEMI, which
+ # was the type from the next line, and the lexer had no chance
+ # to see the updated type symbol table.
+ #
+ # Splitting solves this problem, because after seeing SEMI,
+ # the parser reduces decl_body, which actually adds the new
+ # type into the table to be seen by the lexer before the next
+ # line is reached.
+ def p_declaration(self, p):
+ """ declaration : decl_body SEMI
+ """
+ p[0] = p[1]
+
+ # Since each declaration is a list of declarations, this
+ # rule will combine all the declarations and return a single
+ # list
+ #
+ def p_declaration_list(self, p):
+ """ declaration_list : declaration
+ | declaration_list declaration
+ """
+ p[0] = p[1] if len(p) == 2 else p[1] + p[2]
+
+ # To know when declaration-specifiers end and declarators begin,
+ # we require declaration-specifiers to have at least one
+ # type-specifier, and disallow typedef-names after we've seen any
+ # type-specifier. These are both required by the spec.
+ #
+ def p_declaration_specifiers_no_type_1(self, p):
+ """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
+
+ def p_declaration_specifiers_no_type_2(self, p):
+ """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
+
+ def p_declaration_specifiers_no_type_3(self, p):
+ """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
+
+
+ def p_declaration_specifiers_1(self, p):
+ """ declaration_specifiers : declaration_specifiers type_qualifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+ def p_declaration_specifiers_2(self, p):
+ """ declaration_specifiers : declaration_specifiers storage_class_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
+
+ def p_declaration_specifiers_3(self, p):
+ """ declaration_specifiers : declaration_specifiers function_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
+
+ def p_declaration_specifiers_4(self, p):
+ """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+ def p_declaration_specifiers_5(self, p):
+ """ declaration_specifiers : type_specifier
+ """
+ p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+ def p_declaration_specifiers_6(self, p):
+ """ declaration_specifiers : declaration_specifiers_no_type type_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+
+ def p_storage_class_specifier(self, p):
+ """ storage_class_specifier : AUTO
+ | REGISTER
+ | STATIC
+ | EXTERN
+ | TYPEDEF
+ """
+ p[0] = p[1]
+
+ def p_function_specifier(self, p):
+ """ function_specifier : INLINE
+ """
+ p[0] = p[1]
+
+ def p_type_specifier_no_typeid(self, p):
+ """ type_specifier_no_typeid : VOID
+ | _BOOL
+ | CHAR
+ | SHORT
+ | INT
+ | LONG
+ | FLOAT
+ | DOUBLE
+ | _COMPLEX
+ | SIGNED
+ | UNSIGNED
+ | __INT128
+ """
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
+
+ def p_type_specifier(self, p):
+ """ type_specifier : typedef_name
+ | enum_specifier
+ | struct_or_union_specifier
+ | type_specifier_no_typeid
+ """
+ p[0] = p[1]
+
+ def p_type_qualifier(self, p):
+ """ type_qualifier : CONST
+ | RESTRICT
+ | VOLATILE
+ """
+ p[0] = p[1]
+
+ def p_init_declarator_list(self, p):
+ """ init_declarator_list : init_declarator
+ | init_declarator_list COMMA init_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ # Returns a {decl= : init=} dictionary
+ # If there's no initializer, uses None
+ #
+ def p_init_declarator(self, p):
+ """ init_declarator : declarator
+ | declarator EQUALS initializer
+ """
+ p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
+
+ def p_id_init_declarator_list(self, p):
+ """ id_init_declarator_list : id_init_declarator
+ | id_init_declarator_list COMMA init_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ def p_id_init_declarator(self, p):
+ """ id_init_declarator : id_declarator
+ | id_declarator EQUALS initializer
+ """
+ p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
+
+ # Require at least one type specifier in a specifier-qualifier-list
+ #
+ def p_specifier_qualifier_list_1(self, p):
+ """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+ def p_specifier_qualifier_list_2(self, p):
+ """ specifier_qualifier_list : specifier_qualifier_list type_qualifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+ def p_specifier_qualifier_list_3(self, p):
+ """ specifier_qualifier_list : type_specifier
+ """
+ p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+ def p_specifier_qualifier_list_4(self, p):
+ """ specifier_qualifier_list : type_qualifier_list type_specifier
+ """
+ spec = dict(qual=p[1], storage=[], type=[], function=[])
+ p[0] = self._add_declaration_specifier(spec, p[2], 'type', append=True)
+
+ # TYPEID is allowed here (and in other struct/enum related tag names), because
+ # struct/enum tags reside in their own namespace and can be named the same as types
+ #
+ def p_struct_or_union_specifier_1(self, p):
+ """ struct_or_union_specifier : struct_or_union ID
+ | struct_or_union TYPEID
+ """
+ klass = self._select_struct_union_class(p[1])
+ # None means no list of members
+ p[0] = klass(
+ name=p[2],
+ decls=None,
+ coord=self._token_coord(p, 2))
+
+ def p_struct_or_union_specifier_2(self, p):
+ """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
+ | struct_or_union brace_open brace_close
+ """
+ klass = self._select_struct_union_class(p[1])
+ if len(p) == 4:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=None,
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=None,
+ decls=p[3],
+ coord=self._token_coord(p, 2))
+
+
+ def p_struct_or_union_specifier_3(self, p):
+ """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
+ | struct_or_union ID brace_open brace_close
+ | struct_or_union TYPEID brace_open struct_declaration_list brace_close
+ | struct_or_union TYPEID brace_open brace_close
+ """
+ klass = self._select_struct_union_class(p[1])
+ if len(p) == 5:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=p[2],
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=p[2],
+ decls=p[4],
+ coord=self._token_coord(p, 2))
+
+ def p_struct_or_union(self, p):
+ """ struct_or_union : STRUCT
+ | UNION
+ """
+ p[0] = p[1]
+
+ # Combine all declarations into a single list
+ #
+ def p_struct_declaration_list(self, p):
+ """ struct_declaration_list : struct_declaration
+ | struct_declaration_list struct_declaration
+ """
+ if len(p) == 2:
+ p[0] = p[1] or []
+ else:
+ p[0] = p[1] + (p[2] or [])
+
+ def p_struct_declaration_1(self, p):
+ """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI
+ """
+ spec = p[1]
+ assert 'typedef' not in spec['storage']
+
+ if p[2] is not None:
+ decls = self._build_declarations(
+ spec=spec,
+ decls=p[2])
+
+ elif len(spec['type']) == 1:
+ # Anonymous struct/union, gcc extension, C1x feature.
+ # Although the standard only allows structs/unions here, I see no
+ # reason to disallow other types since some compilers have typedefs
+ # here, and pycparser isn't about rejecting all invalid code.
+ #
+ node = spec['type'][0]
+ if isinstance(node, c_ast.Node):
+ decl_type = node
+ else:
+ decl_type = c_ast.IdentifierType(node)
+
+ decls = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=decl_type)])
+
+ else:
+ # Structure/union members can have the same names as typedefs.
+ # The trouble is that the member's name gets grouped into
+ # specifier_qualifier_list; _build_declarations compensates.
+ #
+ decls = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=None, init=None)])
+
+ p[0] = decls
+
+ def p_struct_declaration_2(self, p):
+ """ struct_declaration : SEMI
+ """
+ p[0] = None
+
+ def p_struct_declaration_3(self, p):
+ """ struct_declaration : pppragma_directive
+ """
+ p[0] = [p[1]]
+
+ def p_struct_declarator_list(self, p):
+ """ struct_declarator_list : struct_declarator
+ | struct_declarator_list COMMA struct_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ # struct_declarator passes up a dict with the keys: decl (for
+ # the underlying declarator) and bitsize (for the bitsize)
+ #
+ def p_struct_declarator_1(self, p):
+ """ struct_declarator : declarator
+ """
+ p[0] = {'decl': p[1], 'bitsize': None}
+
+ def p_struct_declarator_2(self, p):
+ """ struct_declarator : declarator COLON constant_expression
+ | COLON constant_expression
+ """
+ if len(p) > 3:
+ p[0] = {'decl': p[1], 'bitsize': p[3]}
+ else:
+ p[0] = {'decl': c_ast.TypeDecl(None, None, None), 'bitsize': p[2]}
+
+ def p_enum_specifier_1(self, p):
+ """ enum_specifier : ENUM ID
+ | ENUM TYPEID
+ """
+ p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
+
+ def p_enum_specifier_2(self, p):
+ """ enum_specifier : ENUM brace_open enumerator_list brace_close
+ """
+ p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
+
+ def p_enum_specifier_3(self, p):
+ """ enum_specifier : ENUM ID brace_open enumerator_list brace_close
+ | ENUM TYPEID brace_open enumerator_list brace_close
+ """
+ p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
+
+ def p_enumerator_list(self, p):
+ """ enumerator_list : enumerator
+ | enumerator_list COMMA
+ | enumerator_list COMMA enumerator
+ """
+ if len(p) == 2:
+ p[0] = c_ast.EnumeratorList([p[1]], p[1].coord)
+ elif len(p) == 3:
+ p[0] = p[1]
+ else:
+ p[1].enumerators.append(p[3])
+ p[0] = p[1]
+
+ def p_enumerator(self, p):
+ """ enumerator : ID
+ | ID EQUALS constant_expression
+ """
+ if len(p) == 2:
+ enumerator = c_ast.Enumerator(
+ p[1], None,
+ self._token_coord(p, 1))
+ else:
+ enumerator = c_ast.Enumerator(
+ p[1], p[3],
+ self._token_coord(p, 1))
+ self._add_identifier(enumerator.name, enumerator.coord)
+
+ p[0] = enumerator
+
+ def p_declarator(self, p):
+ """ declarator : id_declarator
+ | typeid_declarator
+ """
+ p[0] = p[1]
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_xxx_declarator_1(self, p):
+ """ xxx_declarator : direct_xxx_declarator
+ """
+ p[0] = p[1]
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_xxx_declarator_2(self, p):
+ """ xxx_declarator : pointer direct_xxx_declarator
+ """
+ p[0] = self._type_modify_decl(p[2], p[1])
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_1(self, p):
+ """ direct_xxx_declarator : yyy
+ """
+ p[0] = c_ast.TypeDecl(
+ declname=p[1],
+ type=None,
+ quals=None,
+ coord=self._token_coord(p, 1))
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
+ def p_direct_xxx_declarator_2(self, p):
+ """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN
+ """
+ p[0] = p[2]
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_3(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
+ """
+ quals = (p[3] if len(p) > 5 else []) or []
+ # Accept dimension qualifiers
+ # Per C99 6.7.5.3 p7
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=p[4] if len(p) > 5 else p[3],
+ dim_quals=quals,
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_4(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
+ | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
+ """
+ # Using slice notation for PLY objects doesn't work in Python 3 for the
+ # version of PLY embedded with pycparser; see PLY Google Code issue 30.
+ # Work around that here by listing the two elements separately.
+ listed_quals = [item if isinstance(item, list) else [item]
+ for item in [p[3],p[4]]]
+ dim_quals = [qual for sublist in listed_quals for qual in sublist
+ if qual is not None]
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=p[5],
+ dim_quals=dim_quals,
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ # Special for VLAs
+ #
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_5(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
+ """
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=c_ast.ID(p[4], self._token_coord(p, 4)),
+ dim_quals=p[3] if p[3] != None else [],
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_6(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
+ | direct_xxx_declarator LPAREN identifier_list_opt RPAREN
+ """
+ func = c_ast.FuncDecl(
+ args=p[3],
+ type=None,
+ coord=p[1].coord)
+
+ # To see why _get_yacc_lookahead_token is needed, consider:
+ # typedef char TT;
+ # void foo(int TT) { TT = 10; }
+ # Outside the function, TT is a typedef, but inside (starting and
+ # ending with the braces) it's a parameter. The trouble begins with
+ # yacc's lookahead token. We don't know if we're declaring or
+ # defining a function until we see LBRACE, but if we wait for yacc to
+ # trigger a rule on that token, then TT will have already been read
+ # and incorrectly interpreted as TYPEID. We need to add the
+ # parameters to the scope the moment the lexer sees LBRACE.
+ #
+ if self._get_yacc_lookahead_token().type == "LBRACE":
+ if func.args is not None:
+ for param in func.args.params:
+ if isinstance(param, c_ast.EllipsisParam): break
+ self._add_identifier(param.name, param.coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=func)
+
+ def p_pointer(self, p):
+ """ pointer : TIMES type_qualifier_list_opt
+ | TIMES type_qualifier_list_opt pointer
+ """
+ coord = self._token_coord(p, 1)
+ # Pointer decls nest from inside out. This is important when different
+ # levels have different qualifiers. For example:
+ #
+ # char * const * p;
+ #
+ # Means "pointer to const pointer to char"
+ #
+ # While:
+ #
+ # char ** const p;
+ #
+ # Means "const pointer to pointer to char"
+ #
+ # So when we construct PtrDecl nestings, the leftmost pointer goes in
+ # as the most nested type.
+ nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord)
+ if len(p) > 3:
+ tail_type = p[3]
+ while tail_type.type is not None:
+ tail_type = tail_type.type
+ tail_type.type = nested_type
+ p[0] = p[3]
+ else:
+ p[0] = nested_type
+
+ def p_type_qualifier_list(self, p):
+ """ type_qualifier_list : type_qualifier
+ | type_qualifier_list type_qualifier
+ """
+ p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
+
+ def p_parameter_type_list(self, p):
+ """ parameter_type_list : parameter_list
+ | parameter_list COMMA ELLIPSIS
+ """
+ if len(p) > 2:
+ p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
+
+ p[0] = p[1]
+
+ def p_parameter_list(self, p):
+ """ parameter_list : parameter_declaration
+ | parameter_list COMMA parameter_declaration
+ """
+ if len(p) == 2: # single parameter
+ p[0] = c_ast.ParamList([p[1]], p[1].coord)
+ else:
+ p[1].params.append(p[3])
+ p[0] = p[1]
+
+ # From ISO/IEC 9899:TC2, 6.7.5.3.11:
+ # "If, in a parameter declaration, an identifier can be treated either
+ # as a typedef name or as a parameter name, it shall be taken as a
+ # typedef name."
+ #
+ # Inside a parameter declaration, once we've reduced declaration specifiers,
+ # if we shift in an LPAREN and see a TYPEID, it could be either an abstract
+ # declarator or a declarator nested inside parens. This rule tells us to
+ # always treat it as an abstract declarator. Therefore, we only accept
+ # `id_declarator`s and `typeid_noparen_declarator`s.
+ def p_parameter_declaration_1(self, p):
+ """ parameter_declaration : declaration_specifiers id_declarator
+ | declaration_specifiers typeid_noparen_declarator
+ """
+ spec = p[1]
+ if not spec['type']:
+ spec['type'] = [c_ast.IdentifierType(['int'],
+ coord=self._token_coord(p, 1))]
+ p[0] = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=p[2])])[0]
+
+ def p_parameter_declaration_2(self, p):
+ """ parameter_declaration : declaration_specifiers abstract_declarator_opt
+ """
+ spec = p[1]
+ if not spec['type']:
+ spec['type'] = [c_ast.IdentifierType(['int'],
+ coord=self._token_coord(p, 1))]
+
+ # Parameters can have the same names as typedefs. The trouble is that
+ # the parameter's name gets grouped into declaration_specifiers, making
+ # it look like an old-style declaration; compensate.
+ #
+ if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \
+ self._is_type_in_scope(spec['type'][-1].names[0]):
+ decl = self._build_declarations(
+ spec=spec,
+ decls=[dict(decl=p[2], init=None)])[0]
+
+ # This truly is an old-style parameter declaration
+ #
+ else:
+ decl = c_ast.Typename(
+ name='',
+ quals=spec['qual'],
+ type=p[2] or c_ast.TypeDecl(None, None, None),
+ coord=self._token_coord(p, 2))
+ typename = spec['type']
+ decl = self._fix_decl_name_type(decl, typename)
+
+ p[0] = decl
+
+ def p_identifier_list(self, p):
+ """ identifier_list : identifier
+ | identifier_list COMMA identifier
+ """
+ if len(p) == 2: # single parameter
+ p[0] = c_ast.ParamList([p[1]], p[1].coord)
+ else:
+ p[1].params.append(p[3])
+ p[0] = p[1]
+
+ def p_initializer_1(self, p):
+ """ initializer : assignment_expression
+ """
+ p[0] = p[1]
+
+ def p_initializer_2(self, p):
+ """ initializer : brace_open initializer_list_opt brace_close
+ | brace_open initializer_list COMMA brace_close
+ """
+ if p[2] is None:
+ p[0] = c_ast.InitList([], self._token_coord(p, 1))
+ else:
+ p[0] = p[2]
+
+ def p_initializer_list(self, p):
+ """ initializer_list : designation_opt initializer
+ | initializer_list COMMA designation_opt initializer
+ """
+ if len(p) == 3: # single initializer
+ init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2])
+ p[0] = c_ast.InitList([init], p[2].coord)
+ else:
+ init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4])
+ p[1].exprs.append(init)
+ p[0] = p[1]
+
+ def p_designation(self, p):
+ """ designation : designator_list EQUALS
+ """
+ p[0] = p[1]
+
+ # Designators are represented as a list of nodes, in the order in which
+ # they're written in the code.
+ #
+ def p_designator_list(self, p):
+ """ designator_list : designator
+ | designator_list designator
+ """
+ p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
+
+ def p_designator(self, p):
+ """ designator : LBRACKET constant_expression RBRACKET
+ | PERIOD identifier
+ """
+ p[0] = p[2]
+
+ def p_type_name(self, p):
+ """ type_name : specifier_qualifier_list abstract_declarator_opt
+ """
+ typename = c_ast.Typename(
+ name='',
+ quals=p[1]['qual'],
+ type=p[2] or c_ast.TypeDecl(None, None, None),
+ coord=self._token_coord(p, 2))
+
+ p[0] = self._fix_decl_name_type(typename, p[1]['type'])
+
+ def p_abstract_declarator_1(self, p):
+ """ abstract_declarator : pointer
+ """
+ dummytype = c_ast.TypeDecl(None, None, None)
+ p[0] = self._type_modify_decl(
+ decl=dummytype,
+ modifier=p[1])
+
+ def p_abstract_declarator_2(self, p):
+ """ abstract_declarator : pointer direct_abstract_declarator
+ """
+ p[0] = self._type_modify_decl(p[2], p[1])
+
+ def p_abstract_declarator_3(self, p):
+ """ abstract_declarator : direct_abstract_declarator
+ """
+ p[0] = p[1]
+
+ # Creating and using direct_abstract_declarator_opt here
+ # instead of listing both direct_abstract_declarator and the
+ # lack of it in the beginning of _1 and _2 caused two
+ # shift/reduce errors.
+ #
+ def p_direct_abstract_declarator_1(self, p):
+ """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """
+ p[0] = p[2]
+
+ def p_direct_abstract_declarator_2(self, p):
+ """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET
+ """
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=p[3],
+ dim_quals=[],
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ def p_direct_abstract_declarator_3(self, p):
+ """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
+ """
+ quals = (p[2] if len(p) > 4 else []) or []
+ p[0] = c_ast.ArrayDecl(
+ type=c_ast.TypeDecl(None, None, None),
+ dim=p[3] if len(p) > 4 else p[2],
+ dim_quals=quals,
+ coord=self._token_coord(p, 1))
+
+ def p_direct_abstract_declarator_4(self, p):
+ """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
+ """
+ arr = c_ast.ArrayDecl(
+ type=None,
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
+ dim_quals=[],
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
+
+ def p_direct_abstract_declarator_5(self, p):
+ """ direct_abstract_declarator : LBRACKET TIMES RBRACKET
+ """
+ p[0] = c_ast.ArrayDecl(
+ type=c_ast.TypeDecl(None, None, None),
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
+ dim_quals=[],
+ coord=self._token_coord(p, 1))
+
+ def p_direct_abstract_declarator_6(self, p):
+ """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
+ """
+ func = c_ast.FuncDecl(
+ args=p[3],
+ type=None,
+ coord=p[1].coord)
+
+ p[0] = self._type_modify_decl(decl=p[1], modifier=func)
+
+ def p_direct_abstract_declarator_7(self, p):
+ """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN
+ """
+ p[0] = c_ast.FuncDecl(
+ args=p[2],
+ type=c_ast.TypeDecl(None, None, None),
+ coord=self._token_coord(p, 1))
+
+ # declaration is a list, statement isn't. To make it consistent, block_item
+ # will always be a list
+ #
+ def p_block_item(self, p):
+ """ block_item : declaration
+ | statement
+ """
+ p[0] = p[1] if isinstance(p[1], list) else [p[1]]
+
+ # Since we made block_item a list, this just combines lists
+ #
+ def p_block_item_list(self, p):
+ """ block_item_list : block_item
+ | block_item_list block_item
+ """
+ # Empty block items (plain ';') produce [None], so ignore them
+ p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
+
+ def p_compound_statement_1(self, p):
+ """ compound_statement : brace_open block_item_list_opt brace_close """
+ p[0] = c_ast.Compound(
+ block_items=p[2],
+ coord=self._token_coord(p, 1))
+
+ def p_labeled_statement_1(self, p):
+ """ labeled_statement : ID COLON pragmacomp_or_statement """
+ p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
+
+ def p_labeled_statement_2(self, p):
+ """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
+ p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
+
+ def p_labeled_statement_3(self, p):
+ """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """
+ p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
+
+ def p_selection_statement_1(self, p):
+ """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
+
+ def p_selection_statement_2(self, p):
+ """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
+ p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
+
+ def p_selection_statement_3(self, p):
+ """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = fix_switch_cases(
+ c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
+
+ def p_iteration_statement_1(self, p):
+ """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
+
+ def p_iteration_statement_2(self, p):
+ """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
+ p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
+
+ def p_iteration_statement_3(self, p):
+ """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
+
+ def p_iteration_statement_4(self, p):
+ """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
+ p[4], p[6], p[8], self._token_coord(p, 1))
+
+ def p_jump_statement_1(self, p):
+ """ jump_statement : GOTO ID SEMI """
+ p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
+
+ def p_jump_statement_2(self, p):
+ """ jump_statement : BREAK SEMI """
+ p[0] = c_ast.Break(self._token_coord(p, 1))
+
+ def p_jump_statement_3(self, p):
+ """ jump_statement : CONTINUE SEMI """
+ p[0] = c_ast.Continue(self._token_coord(p, 1))
+
+ def p_jump_statement_4(self, p):
+ """ jump_statement : RETURN expression SEMI
+ | RETURN SEMI
+ """
+ p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
+
+ def p_expression_statement(self, p):
+ """ expression_statement : expression_opt SEMI """
+ if p[1] is None:
+ p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
+ else:
+ p[0] = p[1]
+
+ def p_expression(self, p):
+ """ expression : assignment_expression
+ | expression COMMA assignment_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if not isinstance(p[1], c_ast.ExprList):
+ p[1] = c_ast.ExprList([p[1]], p[1].coord)
+
+ p[1].exprs.append(p[3])
+ p[0] = p[1]
+
+ def p_typedef_name(self, p):
+ """ typedef_name : TYPEID """
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
+
+ def p_assignment_expression(self, p):
+ """ assignment_expression : conditional_expression
+ | unary_expression assignment_operator assignment_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord)
+
+ # K&R2 defines these as many separate rules, to encode
+ # precedence and associativity. Why work hard ? I'll just use
+ # the built in precedence/associativity specification feature
+ # of PLY. (see precedence declaration above)
+ #
+ def p_assignment_operator(self, p):
+ """ assignment_operator : EQUALS
+ | XOREQUAL
+ | TIMESEQUAL
+ | DIVEQUAL
+ | MODEQUAL
+ | PLUSEQUAL
+ | MINUSEQUAL
+ | LSHIFTEQUAL
+ | RSHIFTEQUAL
+ | ANDEQUAL
+ | OREQUAL
+ """
+ p[0] = p[1]
+
+ def p_constant_expression(self, p):
+ """ constant_expression : conditional_expression """
+ p[0] = p[1]
+
+ def p_conditional_expression(self, p):
+ """ conditional_expression : binary_expression
+ | binary_expression CONDOP expression COLON conditional_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord)
+
+ def p_binary_expression(self, p):
+ """ binary_expression : cast_expression
+ | binary_expression TIMES binary_expression
+ | binary_expression DIVIDE binary_expression
+ | binary_expression MOD binary_expression
+ | binary_expression PLUS binary_expression
+ | binary_expression MINUS binary_expression
+ | binary_expression RSHIFT binary_expression
+ | binary_expression LSHIFT binary_expression
+ | binary_expression LT binary_expression
+ | binary_expression LE binary_expression
+ | binary_expression GE binary_expression
+ | binary_expression GT binary_expression
+ | binary_expression EQ binary_expression
+ | binary_expression NE binary_expression
+ | binary_expression AND binary_expression
+ | binary_expression OR binary_expression
+ | binary_expression XOR binary_expression
+ | binary_expression LAND binary_expression
+ | binary_expression LOR binary_expression
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord)
+
+ def p_cast_expression_1(self, p):
+ """ cast_expression : unary_expression """
+ p[0] = p[1]
+
+ def p_cast_expression_2(self, p):
+ """ cast_expression : LPAREN type_name RPAREN cast_expression """
+ p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
+
+ def p_unary_expression_1(self, p):
+ """ unary_expression : postfix_expression """
+ p[0] = p[1]
+
+ def p_unary_expression_2(self, p):
+ """ unary_expression : PLUSPLUS unary_expression
+ | MINUSMINUS unary_expression
+ | unary_operator cast_expression
+ """
+ p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord)
+
+ def p_unary_expression_3(self, p):
+ """ unary_expression : SIZEOF unary_expression
+ | SIZEOF LPAREN type_name RPAREN
+ """
+ p[0] = c_ast.UnaryOp(
+ p[1],
+ p[2] if len(p) == 3 else p[3],
+ self._token_coord(p, 1))
+
+ def p_unary_operator(self, p):
+ """ unary_operator : AND
+ | TIMES
+ | PLUS
+ | MINUS
+ | NOT
+ | LNOT
+ """
+ p[0] = p[1]
+
+ def p_postfix_expression_1(self, p):
+ """ postfix_expression : primary_expression """
+ p[0] = p[1]
+
+ def p_postfix_expression_2(self, p):
+ """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """
+ p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
+
+ def p_postfix_expression_3(self, p):
+ """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN
+ | postfix_expression LPAREN RPAREN
+ """
+ p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord)
+
+ def p_postfix_expression_4(self, p):
+ """ postfix_expression : postfix_expression PERIOD ID
+ | postfix_expression PERIOD TYPEID
+ | postfix_expression ARROW ID
+ | postfix_expression ARROW TYPEID
+ """
+ field = c_ast.ID(p[3], self._token_coord(p, 3))
+ p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
+
+ def p_postfix_expression_5(self, p):
+ """ postfix_expression : postfix_expression PLUSPLUS
+ | postfix_expression MINUSMINUS
+ """
+ p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord)
+
+ def p_postfix_expression_6(self, p):
+ """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close
+ | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close
+ """
+ p[0] = c_ast.CompoundLiteral(p[2], p[5])
+
+ def p_primary_expression_1(self, p):
+ """ primary_expression : identifier """
+ p[0] = p[1]
+
+ def p_primary_expression_2(self, p):
+ """ primary_expression : constant """
+ p[0] = p[1]
+
+ def p_primary_expression_3(self, p):
+ """ primary_expression : unified_string_literal
+ | unified_wstring_literal
+ """
+ p[0] = p[1]
+
+ def p_primary_expression_4(self, p):
+ """ primary_expression : LPAREN expression RPAREN """
+ p[0] = p[2]
+
+ def p_primary_expression_5(self, p):
+ """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
+ """
+ coord = self._token_coord(p, 1)
+ p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
+ c_ast.ExprList([p[3], p[5]], coord),
+ coord)
+
+ def p_offsetof_member_designator(self, p):
+ """ offsetof_member_designator : identifier
+ | offsetof_member_designator PERIOD identifier
+ | offsetof_member_designator LBRACKET expression RBRACKET
+ """
+ if len(p) == 2:
+ p[0] = p[1]
+ elif len(p) == 4:
+ p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord)
+ elif len(p) == 5:
+ p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
+ else:
+ raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p))
+
+ def p_argument_expression_list(self, p):
+ """ argument_expression_list : assignment_expression
+ | argument_expression_list COMMA assignment_expression
+ """
+ if len(p) == 2: # single expr
+ p[0] = c_ast.ExprList([p[1]], p[1].coord)
+ else:
+ p[1].exprs.append(p[3])
+ p[0] = p[1]
+
+ def p_identifier(self, p):
+ """ identifier : ID """
+ p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
+
+ def p_constant_1(self, p):
+ """ constant : INT_CONST_DEC
+ | INT_CONST_OCT
+ | INT_CONST_HEX
+ | INT_CONST_BIN
+ | INT_CONST_CHAR
+ """
+ uCount = 0
+ lCount = 0
+ for x in p[1][-3:]:
+ if x in ('l', 'L'):
+ lCount += 1
+ elif x in ('u', 'U'):
+ uCount += 1
+ t = ''
+ if uCount > 1:
+ raise ValueError('Constant cannot have more than one u/U suffix.')
+ elif lCount > 2:
+ raise ValueError('Constant cannot have more than two l/L suffix.')
+ prefix = 'unsigned ' * uCount + 'long ' * lCount
+ p[0] = c_ast.Constant(
+ prefix + 'int', p[1], self._token_coord(p, 1))
+
+ def p_constant_2(self, p):
+ """ constant : FLOAT_CONST
+ | HEX_FLOAT_CONST
+ """
+ if 'x' in p[1].lower():
+ t = 'float'
+ else:
+ if p[1][-1] in ('f', 'F'):
+ t = 'float'
+ elif p[1][-1] in ('l', 'L'):
+ t = 'long double'
+ else:
+ t = 'double'
+
+ p[0] = c_ast.Constant(
+ t, p[1], self._token_coord(p, 1))
+
+ def p_constant_3(self, p):
+ """ constant : CHAR_CONST
+ | WCHAR_CONST
+ """
+ p[0] = c_ast.Constant(
+ 'char', p[1], self._token_coord(p, 1))
+
+ # The "unified" string and wstring literal rules are for supporting
+ # concatenation of adjacent string literals.
+ # I.e. "hello " "world" is seen by the C compiler as a single string literal
+ # with the value "hello world"
+ #
+ def p_unified_string_literal(self, p):
+ """ unified_string_literal : STRING_LITERAL
+ | unified_string_literal STRING_LITERAL
+ """
+ if len(p) == 2: # single literal
+ p[0] = c_ast.Constant(
+ 'string', p[1], self._token_coord(p, 1))
+ else:
+ p[1].value = p[1].value[:-1] + p[2][1:]
+ p[0] = p[1]
+
+ def p_unified_wstring_literal(self, p):
+ """ unified_wstring_literal : WSTRING_LITERAL
+ | unified_wstring_literal WSTRING_LITERAL
+ """
+ if len(p) == 2: # single literal
+ p[0] = c_ast.Constant(
+ 'string', p[1], self._token_coord(p, 1))
+ else:
+ p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
+ p[0] = p[1]
+
+ def p_brace_open(self, p):
+ """ brace_open : LBRACE
+ """
+ p[0] = p[1]
+ p.set_lineno(0, p.lineno(1))
+
+ def p_brace_close(self, p):
+ """ brace_close : RBRACE
+ """
+ p[0] = p[1]
+ p.set_lineno(0, p.lineno(1))
+
+ def p_empty(self, p):
+ 'empty : '
+ p[0] = None
+
+ def p_error(self, p):
+ # If error recovery is added here in the future, make sure
+ # _get_yacc_lookahead_token still works!
+ #
+ if p:
+ self._parse_error(
+ 'before: %s' % p.value,
+ self._coord(lineno=p.lineno,
+ column=self.clex.find_tok_column(p)))
+ else:
+ self._parse_error('At end of input', self.clex.filename)
diff --git a/venv/Lib/site-packages/pycparser/lextab.py b/venv/Lib/site-packages/pycparser/lextab.py
new file mode 100644
index 000000000..eb3ae07d5
--- /dev/null
+++ b/venv/Lib/site-packages/pycparser/lextab.py
@@ -0,0 +1,10 @@
+# lextab.py. This file automatically created by PLY (version 3.10). Don't edit!
+_tabversion = '3.10'
+_lextokens = set(('VOID', 'LBRACKET', 'WCHAR_CONST', 'FLOAT_CONST', 'MINUS', 'RPAREN', 'LONG', 'PLUS', 'ELLIPSIS', 'GT', 'GOTO', 'ENUM', 'PERIOD', 'GE', 'INT_CONST_DEC', 'ARROW', '__INT128', 'HEX_FLOAT_CONST', 'DOUBLE', 'MINUSEQUAL', 'INT_CONST_OCT', 'TIMESEQUAL', 'OR', 'SHORT', 'RETURN', 'RSHIFTEQUAL', 'RESTRICT', 'STATIC', 'SIZEOF', 'UNSIGNED', 'UNION', 'COLON', 'WSTRING_LITERAL', 'DIVIDE', 'FOR', 'PLUSPLUS', 'EQUALS', 'ELSE', 'INLINE', 'EQ', 'AND', 'TYPEID', 'LBRACE', 'PPHASH', 'INT', 'SIGNED', 'CONTINUE', 'NOT', 'OREQUAL', 'MOD', 'RSHIFT', 'DEFAULT', 'CHAR', 'WHILE', 'DIVEQUAL', 'EXTERN', 'CASE', 'LAND', 'REGISTER', 'MODEQUAL', 'NE', 'SWITCH', 'INT_CONST_HEX', '_COMPLEX', 'PPPRAGMASTR', 'PLUSEQUAL', 'STRUCT', 'CONDOP', 'BREAK', 'VOLATILE', 'PPPRAGMA', 'ANDEQUAL', 'INT_CONST_BIN', 'DO', 'LNOT', 'CONST', 'LOR', 'CHAR_CONST', 'LSHIFT', 'RBRACE', '_BOOL', 'LE', 'SEMI', 'LT', 'COMMA', 'OFFSETOF', 'TYPEDEF', 'XOR', 'AUTO', 'TIMES', 'LPAREN', 'MINUSMINUS', 'ID', 'IF', 'STRING_LITERAL', 'FLOAT', 'XOREQUAL', 'LSHIFTEQUAL', 'RBRACKET'))
+_lexreflags = 64
+_lexliterals = ''
+_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'}
+_lexstatere = {'ppline': [('(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, None, None, None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])], 'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX')]), ('(?P0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[0-7]*[89])|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])[^\'\\n]*\'))', [None, ('t_INT_CONST_BIN', 'INT_CONST_BIN'), None, None, None, None, None, None, None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST')]), ('(?PL"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*?([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P\\.\\.\\.)|(?P\\+\\+)|(?P\\|\\|)|(?P