Added delete option to database storage.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-12 12:10:01 -04:00
parent 308604a33c
commit 963b5bc68b
1868 changed files with 192402 additions and 13278 deletions

View file

@ -1,4 +1,5 @@
import pyrebase
import firebase_admin
firebaseConfig = {
'apiKey': "AIzaSyAdL0W5HscjEDFPK4BDi6Cnc7FLa30GPYY",
@ -8,7 +9,8 @@ firebaseConfig = {
'storageBucket': "vehicleantitheftrecognition.appspot.com",
'messagingSenderId': "163692530359",
'appId': "1:163692530359:web:b6dc7ccfc56a79afb11b32",
'measurementId': "G-EPWP2LK89Q"
'measurementId': "G-EPWP2LK89Q",
'serviceAccount': 'vehicleantitheftrecognition-firebase-adminsdk-krrgw-05da515de5.json'
}
firebase = pyrebase.initialize_app(firebaseConfig)
@ -19,7 +21,7 @@ storage = firebase.storage()
class DBHelper:
# Create account function which creates a new authentication info.
def createaccount(username, password, confirmpassword):
def createAccount(username, password, confirmpassword):
email = username + "@hotmail.com"
if password == confirmpassword:
auth.create_user_with_email_and_password(email,password)
@ -46,7 +48,7 @@ class DBHelper:
db.child("Users").child(userID).remove()
# Returns the first name or else an empty string.
def getfirstname(userID):
def getFirstName(userID):
firstname = ""
users = db.child("Users").get()
for user in users.each():
@ -55,7 +57,7 @@ class DBHelper:
return firstname
# Returns the last name or else an empty string.
def getlastname(userID):
def getLastName(userID):
lastname = ""
users = db.child("Users").get()
for user in users.each():
@ -64,7 +66,7 @@ class DBHelper:
return lastname
# Returns the e-mail or else an empty string.
def getemail(userID):
def getEmail(userID):
email = ""
users = db.child("Users").get()
for user in users.each():
@ -73,7 +75,7 @@ class DBHelper:
return email
# Returns the phone or else an empty string.
def getphone(userID):
def getPhone(userID):
phone = ""
users = db.child("Users").get()
for user in users.each():
@ -82,7 +84,7 @@ class DBHelper:
return phone
# Returns the address or else an empty string.
def getaddress(userID):
def getAddress(userID):
address = ""
users = db.child("Users").get()
for user in users.each():
@ -91,19 +93,24 @@ class DBHelper:
return address
# Uploads the photo of user, input should be something like "example.png"
def uploaduserphoto(userphoto):
def uploadUserPhoto(userphoto):
userphoto_str = str(userphoto)
storage.child("Photos_of_Users/" + str(userphoto)).put("Photos_of_Users/" + str(userphoto))
# Uploads the photo of thief, input should be something like "example.png"
def uploadthiefphoto(userphoto):
def uploadThiefPhoto(userphoto):
userphoto_str = str(userphoto)
storage.child("Photos_of_Thieves/" + str(userphoto)).put("Photos_of_Thieves/" + str(userphoto))
# Downloads all the user photos.
def downloadalluserphotos(self):
def downloadAllUserphotos(self):
storage.child("Photos_of_Users").download("Storage_from_Database")
# Downloads all the thief photos.
def downloadallthiefphotos(self):
def downloadAllThiefphotos(self):
storage.child("Photos_of_Thieves").download("Storage_from_Thieves")
# Deletes photo of the specified user.
def deleteUserPhoto(userphoto):
storage.delete('Photos_of_Users/' + userphoto)

View file

@ -0,0 +1,12 @@
{
"type": "service_account",
"project_id": "vehicleantitheftrecognition",
"private_key_id": "05da515de56c8ac1f4882cca95e59b528327689d",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDEuXY99rdd3tUr\n478TMukS5x7nWXDHTFHZcxFXIyjkgjcm7a4JieJCuf7VtteAoJQ+8q4qLxypwkOA\nmVpGFXtP9xQCeoiL/lsZGUQ2GAZVmOOumlRMbDJ6bJANe4V7QnmdfLInjTks/ahr\nw6xBNBmbOXxk7adF3Qj3rp/1iUG54AHOkBvQkz53zCRVi2yX7PiwvARDErgj3LNn\nEoc/95P7HXgzuwGfwfziGqN1EtxcKzDsVi5Sg5L2cyjm5V17sEkFRKA2zXqjdOrM\nOQINL4UW4mcU7zyj8Iyu9IFTzWgEVS64+O2+mA9T2Rf325LNa46yqdtoQA8A40oZ\nYrV089bLAgMBAAECggEAAZ94vEDMI81yJhng5QBR2pgRhzMTsnE3aPssCh3+I7R3\nTq499fzZ4wG0eoA2/Zaq4my6HHBQZON3ZG40x8iRld1MoEtGfnErazpBd84cRJnY\nTFkgQj56RCGf0UdS7etkeXTNO3T3eHQ3RCb/Gs1FO3numXPkd2JqxTBUSCrztzGA\nMmk5ikE3giL+ZVB3TaKwkuDLNzxaQSKR+tHGrBPndb6exVfYEROk8HBeTmE+UfBY\nnKXM1uAk7dGXkjq3dFekv0CmEBydebx862/pWgo/a4o7eGDwmPPlxgu3mMJ7fVjB\nji70iusIDc7QCPWYCVCFW8BkZTNy/lGIGNMAEK5w0QKBgQDsl5FPJCOdERQCylcF\nxh6ECw6LqxioFRovXgtsfn/eXoLVCh3Vulqu6zL78qn0f7JQNdUMt9x++wIpNMWb\nV0GHJZVUnqzrvpTZdHWYVDj4tD9WC8vOjbCf5pDfrN4bTh0q4SJasm8tFcHUNcpW\nechUCQMdtxVoi2jgwHBJWlwXnQKBgQDU3KzASyuPZip2oELx+jbXBZ840P/8aekh\nNQUit2jG0twM7Qgk1M929fhY3xyAFKjptk8vLFZcmfi0p0FeNUYnF4bmORuz+bng\nxEW4+dzrf9eeekQcspUf8kwgUkzuuePwJJ4LDuL6d3I/wzClU7B74BAaDqzs9gTK\nFiOPSvb/hwKBgQDqcMKXppr3sA2hOjmDSi/A7mfqdI+JNufsmgc96hfxFLwWOOEZ\ngYEMpZmu2WYaFlNucfl8kdCXr0kT5ewOIyeWsOJJqLZ3IDHFTUadvI97urisHiJF\nuleUC1fxnQ22BvCWJeLx9rB9/3pDO04V5LViuE9zKZG4N7SkSWy68yQgbQKBgQCo\n0PsQ5oz9hYFX43I0hsTHc2X7oYXjofuKoooYJm2qgcCTX8l9rGl9Z0Y29Xuc+MWd\n1UCnoPo9Jr/gRmXJWWbxye7q14/pBL0uTXseYMuc2h8fSMiMGfW7CGbnm134VuU3\np1LQYlYRXnn1p9AEzoLBO8qJX+o1ZEgYHcbF9iY+MQKBgFWBolVPSddIGB/btoOA\nVncnhDSziXdbyEprCO0omsRLG+ciJwWXv4Rd+ftGD7fvFLFFBH/JR9L2lU9eKcF3\nBZA5KMb66zmQ2nVzRiJEUZmE6vxp8E7nXlfxTy76euZSkFDKvhhbYU1XLcPj2ES3\nMMy1jE3LrpNrAT6iHvsz/pt0\n-----END PRIVATE KEY-----\n",
"client_email": "firebase-adminsdk-krrgw@vehicleantitheftrecognition.iam.gserviceaccount.com",
"client_id": "105455250176735842254",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-krrgw%40vehicleantitheftrecognition.iam.gserviceaccount.com"
}

View file

@ -0,0 +1,44 @@
==============
CacheControl
==============
.. image:: https://img.shields.io/pypi/v/cachecontrol.svg
:target: https://pypi.python.org/pypi/cachecontrol
:alt: Latest Version
.. image:: https://travis-ci.org/ionrock/cachecontrol.png?branch=master
:target: https://travis-ci.org/ionrock/cachecontrol
CacheControl is a port of the caching algorithms in httplib2_ for use with
requests_ session object.
It was written because httplib2's better support for caching is often
mitigated by its lack of thread safety. The same is true of requests in
terms of caching.
Quickstart
==========
.. code-block:: python
import requests
from cachecontrol import CacheControl
sess = requests.session()
cached_sess = CacheControl(sess)
response = cached_sess.get('http://google.com')
If the URL contains any caching based headers, it will cache the
result in a simple dictionary.
For more info, check out the docs_
.. _docs: http://cachecontrol.readthedocs.org/en/latest/
.. _httplib2: https://github.com/jcgregorio/httplib2
.. _requests: http://docs.python-requests.org/

View file

@ -0,0 +1,74 @@
Metadata-Version: 2.0
Name: CacheControl
Version: 0.12.6
Summary: httplib2 caching for requests
Home-page: https://github.com/ionrock/cachecontrol
Author: Eric Larson
Author-email: eric@ionrock.org
License: UNKNOWN
Description-Content-Type: UNKNOWN
Keywords: requests http caching web
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Web Environment
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Topic :: Internet :: WWW/HTTP
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Requires-Dist: requests
Requires-Dist: msgpack (>=0.5.2)
Provides-Extra: filecache
Requires-Dist: lockfile (>=0.9); extra == 'filecache'
Provides-Extra: redis
Requires-Dist: redis (>=2.10.5); extra == 'redis'
==============
CacheControl
==============
.. image:: https://img.shields.io/pypi/v/cachecontrol.svg
:target: https://pypi.python.org/pypi/cachecontrol
:alt: Latest Version
.. image:: https://travis-ci.org/ionrock/cachecontrol.png?branch=master
:target: https://travis-ci.org/ionrock/cachecontrol
CacheControl is a port of the caching algorithms in httplib2_ for use with
requests_ session object.
It was written because httplib2's better support for caching is often
mitigated by its lack of thread safety. The same is true of requests in
terms of caching.
Quickstart
==========
.. code-block:: python
import requests
from cachecontrol import CacheControl
sess = requests.session()
cached_sess = CacheControl(sess)
response = cached_sess.get('http://google.com')
If the URL contains any caching based headers, it will cache the
result in a simple dictionary.
For more info, check out the docs_
.. _docs: http://cachecontrol.readthedocs.org/en/latest/
.. _httplib2: https://github.com/jcgregorio/httplib2
.. _requests: http://docs.python-requests.org/

View file

@ -0,0 +1,35 @@
../../Scripts/doesitcache.exe,sha256=oUrPldHAhd6L96tleSYm9TnI_wQAlAXhZcgsF_m648I,97232
CacheControl-0.12.6.dist-info/DESCRIPTION.rst,sha256=AVvOiHd6xGEt-8qj3nBO0wevsy94ATbiolgWP-hAdOw,1090
CacheControl-0.12.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
CacheControl-0.12.6.dist-info/METADATA,sha256=KdpEZki2tVLM6jOflJwcwdg_7YOT-HA08g1lF3BSU8A,2196
CacheControl-0.12.6.dist-info/RECORD,,
CacheControl-0.12.6.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
CacheControl-0.12.6.dist-info/entry_points.txt,sha256=HjCekaRCv8kfNqP5WehMR29IWxIA5VrhoOeKrCykCLc,56
CacheControl-0.12.6.dist-info/metadata.json,sha256=fj7gcSEiRyF5g5Nt5ShwF7GP7QAPJFTqZ0YwhYlxMZE,1380
CacheControl-0.12.6.dist-info/top_level.txt,sha256=vGYWzpbe3h6gkakV4f7iCK2x3KyK3oMkV5pe5v25-d4,13
cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302
cachecontrol/__pycache__/__init__.cpython-36.pyc,,
cachecontrol/__pycache__/_cmd.cpython-36.pyc,,
cachecontrol/__pycache__/adapter.cpython-36.pyc,,
cachecontrol/__pycache__/cache.cpython-36.pyc,,
cachecontrol/__pycache__/compat.cpython-36.pyc,,
cachecontrol/__pycache__/controller.cpython-36.pyc,,
cachecontrol/__pycache__/filewrapper.cpython-36.pyc,,
cachecontrol/__pycache__/heuristics.cpython-36.pyc,,
cachecontrol/__pycache__/serialize.cpython-36.pyc,,
cachecontrol/__pycache__/wrapper.cpython-36.pyc,,
cachecontrol/_cmd.py,sha256=88j4P3JlJGqg6xAXR4btN9fYruXUH4CE-M93Sie5IB8,1242
cachecontrol/adapter.py,sha256=ctnbSXDOj0V0NaxJP2jFauOYRDHaNYMP9QCE8kB4kfk,4870
cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
cachecontrol/caches/__pycache__/__init__.cpython-36.pyc,,
cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc,,
cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc,,
cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
cachecontrol/caches/redis_cache.py,sha256=yZP1PoUgAvxEZZrCVwImZ-5pFKU41v5HYJf1rfbXYmM,844
cachecontrol/compat.py,sha256=Fn_aYzqNbN0bK9gUn8SQUzMLxQ_ruGnsEMvryYDFh3o,647
cachecontrol/controller.py,sha256=fpLmIvxce2mKVFmtDFiiyydqU_pPbCucYLC9qP-LqvY,14137
cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
cachecontrol/serialize.py,sha256=Jms7OS4GB2JFUzuMPlmQtuCDzcjjE-2ijrHpUXC2BV0,7062
cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690

View file

@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.26.0)
Generator: bdist_wheel (0.30.0)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View file

@ -0,0 +1,3 @@
[console_scripts]
doesitcache = cachecontrol._cmd:main

View file

@ -0,0 +1 @@
{"classifiers": ["Development Status :: 4 - Beta", "Environment :: Web Environment", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet :: WWW/HTTP"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"doesitcache": "cachecontrol._cmd:main"}}, "python.details": {"contacts": [{"email": "eric@ionrock.org", "name": "Eric Larson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/ionrock/cachecontrol"}}, "python.exports": {"console_scripts": {"doesitcache": "cachecontrol._cmd:main"}}}, "extras": ["filecache", "redis"], "generator": "bdist_wheel (0.30.0)", "keywords": ["requests", "http", "caching", "web"], "metadata_version": "2.0", "name": "CacheControl", "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", "run_requires": [{"extra": "filecache", "requires": ["lockfile (>=0.9)"]}, {"requires": ["msgpack (>=0.5.2)", "requests"]}, {"extra": "redis", "requires": ["redis (>=2.10.5)"]}], "summary": "httplib2 caching for requests", "version": "0.12.6"}

View file

@ -0,0 +1 @@
cachecontrol

Binary file not shown.

View file

@ -0,0 +1,36 @@
"""Retain apiclient as an alias for googleapiclient."""
from six import iteritems
import googleapiclient
from googleapiclient import channel
from googleapiclient import discovery
from googleapiclient import errors
from googleapiclient import http
from googleapiclient import mimeparse
from googleapiclient import model
try:
from googleapiclient import sample_tools
except ImportError:
# Silently ignore, because the vast majority of consumers won't use it and
# it has deep dependence on oauth2client, an optional dependency.
sample_tools = None
from googleapiclient import schema
_SUBMODULES = {
"channel": channel,
"discovery": discovery,
"errors": errors,
"http": http,
"mimeparse": mimeparse,
"model": model,
"sample_tools": sample_tools,
"schema": schema,
}
import sys
for module_name, module in iteritems(_SUBMODULES):
sys.modules["apiclient.%s" % module_name] = module

View file

@ -0,0 +1,11 @@
"""CacheControl import Interface.
Make it easy to import from cachecontrol without long namespaces.
"""
__author__ = "Eric Larson"
__email__ = "eric@ionrock.org"
__version__ = "0.12.6"
from .wrapper import CacheControl
from .adapter import CacheControlAdapter
from .controller import CacheController

View file

@ -0,0 +1,57 @@
import logging
import requests
from cachecontrol.adapter import CacheControlAdapter
from cachecontrol.cache import DictCache
from cachecontrol.controller import logger
from argparse import ArgumentParser
def setup_logging():
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
logger.addHandler(handler)
def get_session():
adapter = CacheControlAdapter(
DictCache(), cache_etags=True, serializer=None, heuristic=None
)
sess = requests.Session()
sess.mount("http://", adapter)
sess.mount("https://", adapter)
sess.cache_controller = adapter.controller
return sess
def get_args():
parser = ArgumentParser()
parser.add_argument("url", help="The URL to try and cache")
return parser.parse_args()
def main(args=None):
args = get_args()
sess = get_session()
# Make a request to get a response
resp = sess.get(args.url)
# Turn on logging
setup_logging()
# try setting the cache
sess.cache_controller.cache_response(resp.request, resp.raw)
# Now try to get it
if sess.cache_controller.cached_request(resp.request):
print("Cached!")
else:
print("Not cached :(")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,133 @@
import types
import functools
import zlib
from requests.adapters import HTTPAdapter
from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = {"PUT", "DELETE"}
def __init__(
self,
cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
cacheable_methods=None,
*args,
**kw
):
super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = DictCache() if cache is None else cache
self.heuristic = heuristic
self.cacheable_methods = cacheable_methods or ("GET",)
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache, cache_etags=cache_etags, serializer=serializer
)
def send(self, request, cacheable_methods=None, **kw):
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
cacheable = cacheable_methods or self.cacheable_methods
if request.method in cacheable:
try:
cached_response = self.controller.cached_request(request)
except zlib.error:
cached_response = None
if cached_response:
return self.build_response(request, cached_response, from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(self.controller.conditional_headers(request))
resp = super(CacheControlAdapter, self).send(request, **kw)
return resp
def build_response(
self, request, response, from_cache=False, cacheable_methods=None
):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
cacheable = cacheable_methods or self.cacheable_methods
if not from_cache and request.method in cacheable:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif response.status == 301:
self.controller.cache_response(request, response)
else:
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response, request, response
),
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length
def _update_chunk_length(self):
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close()
response._update_chunk_length = types.MethodType(
_update_chunk_length, response
)
resp = super(CacheControlAdapter, self).build_response(request, response)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
def close(self):
self.cache.close()
super(CacheControlAdapter, self).close()

View file

@ -0,0 +1,39 @@
"""
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
"""
from threading import Lock
class BaseCache(object):
def get(self, key):
raise NotImplementedError()
def set(self, key, value):
raise NotImplementedError()
def delete(self, key):
raise NotImplementedError()
def close(self):
pass
class DictCache(BaseCache):
def __init__(self, init_dict=None):
self.lock = Lock()
self.data = init_dict or {}
def get(self, key):
return self.data.get(key, None)
def set(self, key, value):
with self.lock:
self.data.update({key: value})
def delete(self, key):
with self.lock:
if key in self.data:
self.data.pop(key)

View file

@ -0,0 +1,2 @@
from .file_cache import FileCache # noqa
from .redis_cache import RedisCache # noqa

View file

@ -0,0 +1,146 @@
import hashlib
import os
from textwrap import dedent
from ..cache import BaseCache
from ..controller import CacheController
try:
FileNotFoundError
except NameError:
# py2.X
FileNotFoundError = (IOError, OSError)
def _secure_open_write(filename, fmode):
# We only want to write to this file, so open it in write only mode
flags = os.O_WRONLY
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
# will open *new* files.
# We specify this because we want to ensure that the mode we pass is the
# mode of the file.
flags |= os.O_CREAT | os.O_EXCL
# Do not follow symlinks to prevent someone from making a symlink that
# we follow and insecurely open a cache file.
if hasattr(os, "O_NOFOLLOW"):
flags |= os.O_NOFOLLOW
# On Windows we'll mark this file as binary
if hasattr(os, "O_BINARY"):
flags |= os.O_BINARY
# Before we open our file, we want to delete any existing file that is
# there
try:
os.remove(filename)
except (IOError, OSError):
# The file must not exist already, so we can just skip ahead to opening
pass
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
# race condition happens between the os.remove and this line, that an
# error will be raised. Because we utilize a lockfile this should only
# happen if someone is attempting to attack us.
fd = os.open(filename, flags, fmode)
try:
return os.fdopen(fd, "wb")
except:
# An error occurred wrapping our FD in a file object
os.close(fd)
raise
class FileCache(BaseCache):
def __init__(
self,
directory,
forever=False,
filemode=0o0600,
dirmode=0o0700,
use_dir_lock=None,
lock_class=None,
):
if use_dir_lock is not None and lock_class is not None:
raise ValueError("Cannot use use_dir_lock and lock_class together")
try:
from lockfile import LockFile
from lockfile.mkdirlockfile import MkdirLockFile
except ImportError:
notice = dedent(
"""
NOTE: In order to use the FileCache you must have
lockfile installed. You can install it via pip:
pip install lockfile
"""
)
raise ImportError(notice)
else:
if use_dir_lock:
lock_class = MkdirLockFile
elif lock_class is None:
lock_class = LockFile
self.directory = directory
self.forever = forever
self.filemode = filemode
self.dirmode = dirmode
self.lock_class = lock_class
@staticmethod
def encode(x):
return hashlib.sha224(x.encode()).hexdigest()
def _fn(self, name):
# NOTE: This method should not change as some may depend on it.
# See: https://github.com/ionrock/cachecontrol/issues/63
hashed = self.encode(name)
parts = list(hashed[:5]) + [hashed]
return os.path.join(self.directory, *parts)
def get(self, key):
name = self._fn(key)
try:
with open(name, "rb") as fh:
return fh.read()
except FileNotFoundError:
return None
def set(self, key, value):
name = self._fn(key)
# Make sure the directory exists
try:
os.makedirs(os.path.dirname(name), self.dirmode)
except (IOError, OSError):
pass
with self.lock_class(name) as lock:
# Write our actual file
with _secure_open_write(lock.path, self.filemode) as fh:
fh.write(value)
def delete(self, key):
name = self._fn(key)
if not self.forever:
try:
os.remove(name)
except FileNotFoundError:
pass
def url_to_file_path(url, filecache):
"""Return the file cache path based on the URL.
This does not ensure the file exists!
"""
key = CacheController.cache_url(url)
return filecache._fn(key)

View file

@ -0,0 +1,33 @@
from __future__ import division
from datetime import datetime
from cachecontrol.cache import BaseCache
class RedisCache(BaseCache):
def __init__(self, conn):
self.conn = conn
def get(self, key):
return self.conn.get(key)
def set(self, key, value, expires=None):
if not expires:
self.conn.set(key, value)
else:
expires = expires - datetime.utcnow()
self.conn.setex(key, int(expires.total_seconds()), value)
def delete(self, key):
self.conn.delete(key)
def clear(self):
"""Helper for clearing all the keys in a database. Use with
caution!"""
for key in self.conn.keys():
self.conn.delete(key)
def close(self):
"""Redis uses connection pooling, no need to close the connection."""
pass

View file

@ -0,0 +1,29 @@
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
try:
import cPickle as pickle
except ImportError:
import pickle
# Handle the case where the requests module has been patched to not have
# urllib3 bundled as part of its source.
try:
from requests.packages.urllib3.response import HTTPResponse
except ImportError:
from urllib3.response import HTTPResponse
try:
from requests.packages.urllib3.util import is_fp_closed
except ImportError:
from urllib3.util import is_fp_closed
# Replicate some six behaviour
try:
text_type = unicode
except NameError:
text_type = str

View file

@ -0,0 +1,376 @@
"""
The httplib2 algorithms ported for use with requests.
"""
import logging
import re
import calendar
import time
from email.utils import parsedate_tz
from requests.structures import CaseInsensitiveDict
from .cache import DictCache
from .serialize import Serializer
logger = logging.getLogger(__name__)
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
class CacheController(object):
"""An interface to see if request should cached or not.
"""
def __init__(
self, cache=None, cache_etags=True, serializer=None, status_codes=None
):
self.cache = DictCache() if cache is None else cache
self.cache_etags = cache_etags
self.serializer = serializer or Serializer()
self.cacheable_status_codes = status_codes or (200, 203, 300, 301)
@classmethod
def _urlnorm(cls, uri):
"""Normalize the URL to create a safe key for the cache"""
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
scheme = scheme.lower()
authority = authority.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
defrag_uri = scheme + "://" + authority + request_uri
return defrag_uri
@classmethod
def cache_url(cls, uri):
return cls._urlnorm(uri)
def parse_cache_control(self, headers):
known_directives = {
# https://tools.ietf.org/html/rfc7234#section-5.2
"max-age": (int, True),
"max-stale": (int, False),
"min-fresh": (int, True),
"no-cache": (None, False),
"no-store": (None, False),
"no-transform": (None, False),
"only-if-cached": (None, False),
"must-revalidate": (None, False),
"public": (None, False),
"private": (None, False),
"proxy-revalidate": (None, False),
"s-maxage": (int, True),
}
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
retval = {}
for cc_directive in cc_headers.split(","):
if not cc_directive.strip():
continue
parts = cc_directive.split("=", 1)
directive = parts[0].strip()
try:
typ, required = known_directives[directive]
except KeyError:
logger.debug("Ignoring unknown cache-control directive: %s", directive)
continue
if not typ or not required:
retval[directive] = None
if typ:
try:
retval[directive] = typ(parts[1].strip())
except IndexError:
if required:
logger.debug(
"Missing value for cache-control " "directive: %s",
directive,
)
except ValueError:
logger.debug(
"Invalid value for cache-control directive " "%s, must be %s",
directive,
typ.__name__,
)
return retval
def cached_request(self, request):
"""
Return a cached response if it exists in the cache, otherwise
return False.
"""
cache_url = self.cache_url(request.url)
logger.debug('Looking up "%s" in the cache', cache_url)
cc = self.parse_cache_control(request.headers)
# Bail out if the request insists on fresh data
if "no-cache" in cc:
logger.debug('Request header has "no-cache", cache bypassed')
return False
if "max-age" in cc and cc["max-age"] == 0:
logger.debug('Request header has "max_age" as 0, cache bypassed')
return False
# Request allows serving from the cache, let's see if we find something
cache_data = self.cache.get(cache_url)
if cache_data is None:
logger.debug("No cache entry available")
return False
# Check whether it can be deserialized
resp = self.serializer.loads(request, cache_data)
if not resp:
logger.warning("Cache entry deserialization failed, entry ignored")
return False
# If we have a cached 301, return it immediately. We don't
# need to test our response for other headers b/c it is
# intrinsically "cacheable" as it is Permanent.
# See:
# https://tools.ietf.org/html/rfc7231#section-6.4.2
#
# Client can try to refresh the value by repeating the request
# with cache busting headers as usual (ie no-cache).
if resp.status == 301:
msg = (
'Returning cached "301 Moved Permanently" response '
"(ignoring date and etag information)"
)
logger.debug(msg)
return resp
headers = CaseInsensitiveDict(resp.headers)
if not headers or "date" not in headers:
if "etag" not in headers:
# Without date or etag, the cached response can never be used
# and should be deleted.
logger.debug("Purging cached response: no date or etag")
self.cache.delete(cache_url)
logger.debug("Ignoring cached response: no date")
return False
now = time.time()
date = calendar.timegm(parsedate_tz(headers["date"]))
current_age = max(0, now - date)
logger.debug("Current age based on date: %i", current_age)
# TODO: There is an assumption that the result will be a
# urllib3 response object. This may not be best since we
# could probably avoid instantiating or constructing the
# response until we know we need it.
resp_cc = self.parse_cache_control(headers)
# determine freshness
freshness_lifetime = 0
# Check the max-age pragma in the cache control header
if "max-age" in resp_cc:
freshness_lifetime = resp_cc["max-age"]
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
# If there isn't a max-age, check for an expires header
elif "expires" in headers:
expires = parsedate_tz(headers["expires"])
if expires is not None:
expire_time = calendar.timegm(expires) - date
freshness_lifetime = max(0, expire_time)
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
# Determine if we are setting freshness limit in the
# request. Note, this overrides what was in the response.
if "max-age" in cc:
freshness_lifetime = cc["max-age"]
logger.debug(
"Freshness lifetime from request max-age: %i", freshness_lifetime
)
if "min-fresh" in cc:
min_fresh = cc["min-fresh"]
# adjust our current age by our min fresh
current_age += min_fresh
logger.debug("Adjusted current age from min-fresh: %i", current_age)
# Return entry if it is fresh enough
if freshness_lifetime > current_age:
logger.debug('The response is "fresh", returning cached response')
logger.debug("%i > %i", freshness_lifetime, current_age)
return resp
# we're not fresh. If we don't have an Etag, clear it out
if "etag" not in headers:
logger.debug('The cached response is "stale" with no etag, purging')
self.cache.delete(cache_url)
# return the original handler
return False
def conditional_headers(self, request):
cache_url = self.cache_url(request.url)
resp = self.serializer.loads(request, self.cache.get(cache_url))
new_headers = {}
if resp:
headers = CaseInsensitiveDict(resp.headers)
if "etag" in headers:
new_headers["If-None-Match"] = headers["ETag"]
if "last-modified" in headers:
new_headers["If-Modified-Since"] = headers["Last-Modified"]
return new_headers
def cache_response(self, request, response, body=None, status_codes=None):
"""
Algorithm for caching requests.
This assumes a requests Response object.
"""
# From httplib2: Don't cache 206's since we aren't going to
# handle byte range requests
cacheable_status_codes = status_codes or self.cacheable_status_codes
if response.status not in cacheable_status_codes:
logger.debug(
"Status code %s not in %s", response.status, cacheable_status_codes
)
return
response_headers = CaseInsensitiveDict(response.headers)
# If we've been given a body, our response has a Content-Length, that
# Content-Length is valid then we can check to see if the body we've
# been given matches the expected size, and if it doesn't we'll just
# skip trying to cache it.
if (
body is not None
and "content-length" in response_headers
and response_headers["content-length"].isdigit()
and int(response_headers["content-length"]) != len(body)
):
return
cc_req = self.parse_cache_control(request.headers)
cc = self.parse_cache_control(response_headers)
cache_url = self.cache_url(request.url)
logger.debug('Updating cache with response from "%s"', cache_url)
# Delete it from the cache if we happen to have it stored there
no_store = False
if "no-store" in cc:
no_store = True
logger.debug('Response header has "no-store"')
if "no-store" in cc_req:
no_store = True
logger.debug('Request header has "no-store"')
if no_store and self.cache.get(cache_url):
logger.debug('Purging existing cache entry to honor "no-store"')
self.cache.delete(cache_url)
if no_store:
return
# https://tools.ietf.org/html/rfc7234#section-4.1:
# A Vary header field-value of "*" always fails to match.
# Storing such a response leads to a deserialization warning
# during cache lookup and is not allowed to ever be served,
# so storing it can be avoided.
if "*" in response_headers.get("vary", ""):
logger.debug('Response header has "Vary: *"')
return
# If we've been given an etag, then keep the response
if self.cache_etags and "etag" in response_headers:
logger.debug("Caching due to etag")
self.cache.set(
cache_url, self.serializer.dumps(request, response, body=body)
)
# Add to the cache any 301s. We do this before looking that
# the Date headers.
elif response.status == 301:
logger.debug("Caching permanant redirect")
self.cache.set(cache_url, self.serializer.dumps(request, response))
# Add to the cache if the response headers demand it. If there
# is no date header then we can't do anything about expiring
# the cache.
elif "date" in response_headers:
# cache when there is a max-age > 0
if "max-age" in cc and cc["max-age"] > 0:
logger.debug("Caching b/c date exists and max-age > 0")
self.cache.set(
cache_url, self.serializer.dumps(request, response, body=body)
)
# If the request can expire, it means we should cache it
# in the meantime.
elif "expires" in response_headers:
if response_headers["expires"]:
logger.debug("Caching b/c of expires header")
self.cache.set(
cache_url, self.serializer.dumps(request, response, body=body)
)
def update_cached_response(self, request, response):
"""On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one.
This should only ever be called when we've sent an ETag and
gotten a 304 as the response.
"""
cache_url = self.cache_url(request.url)
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
if not cached_response:
# we didn't have a cached response
return response
# Lets update our headers with the headers from the new request:
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
#
# The server isn't supposed to send headers that would make
# the cached body invalid. But... just in case, we'll be sure
# to strip out ones we know that might be problmatic due to
# typical assumptions.
excluded_headers = ["content-length"]
cached_response.headers.update(
dict(
(k, v)
for k, v in response.headers.items()
if k.lower() not in excluded_headers
)
)
# we want a 200 b/c we have content via the cache
cached_response.status = 200
# update our cache
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
return cached_response

View file

@ -0,0 +1,80 @@
from io import BytesIO
class CallbackFileWrapper(object):
"""
Small wrapper around a fp object which will tee everything read into a
buffer, and when that file is closed it will execute a callback with the
contents of that buffer.
All attributes are proxied to the underlying file object.
This class uses members with a double underscore (__) leading prefix so as
not to accidentally shadow an attribute.
"""
def __init__(self, fp, callback):
self.__buf = BytesIO()
self.__fp = fp
self.__callback = callback
def __getattr__(self, name):
# The vaguaries of garbage collection means that self.__fp is
# not always set. By using __getattribute__ and the private
# name[0] allows looking up the attribute value and raising an
# AttributeError when it doesn't exist. This stop thigns from
# infinitely recursing calls to getattr in the case where
# self.__fp hasn't been set.
#
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
fp = self.__getattribute__("_CallbackFileWrapper__fp")
return getattr(fp, name)
def __is_fp_closed(self):
try:
return self.__fp.fp is None
except AttributeError:
pass
try:
return self.__fp.closed
except AttributeError:
pass
# We just don't cache it then.
# TODO: Add some logging here...
return False
def _close(self):
if self.__callback:
self.__callback(self.__buf.getvalue())
# We assign this to None here, because otherwise we can get into
# really tricky problems where the CPython interpreter dead locks
# because the callback is holding a reference to something which
# has a __del__ method. Setting this to None breaks the cycle
# and allows the garbage collector to do it's thing normally.
self.__callback = None
def read(self, amt=None):
data = self.__fp.read(amt)
self.__buf.write(data)
if self.__is_fp_closed():
self._close()
return data
def _safe_read(self, amt):
data = self.__fp._safe_read(amt)
if amt == 2 and data == b"\r\n":
# urllib executes this read to toss the CRLF at the end
# of the chunk.
return data
self.__buf.write(data)
if self.__is_fp_closed():
self._close()
return data

View file

@ -0,0 +1,135 @@
import calendar
import time
from email.utils import formatdate, parsedate, parsedate_tz
from datetime import datetime, timedelta
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
def expire_after(delta, date=None):
date = date or datetime.utcnow()
return date + delta
def datetime_to_header(dt):
return formatdate(calendar.timegm(dt.timetuple()))
class BaseHeuristic(object):
def warning(self, response):
"""
Return a valid 1xx warning header value describing the cache
adjustments.
The response is provided too allow warnings like 113
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
to explicitly say response is over 24 hours old.
"""
return '110 - "Response is Stale"'
def update_headers(self, response):
"""Update the response headers with any new headers.
NOTE: This SHOULD always include some Warning header to
signify that the response was cached by the client, not
by way of the provided headers.
"""
return {}
def apply(self, response):
updated_headers = self.update_headers(response)
if updated_headers:
response.headers.update(updated_headers)
warning_header_value = self.warning(response)
if warning_header_value is not None:
response.headers.update({"Warning": warning_header_value})
return response
class OneDayCache(BaseHeuristic):
"""
Cache the response by providing an expires 1 day in the
future.
"""
def update_headers(self, response):
headers = {}
if "expires" not in response.headers:
date = parsedate(response.headers["date"])
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
headers["expires"] = datetime_to_header(expires)
headers["cache-control"] = "public"
return headers
class ExpiresAfter(BaseHeuristic):
"""
Cache **all** requests for a defined time period.
"""
def __init__(self, **kw):
self.delta = timedelta(**kw)
def update_headers(self, response):
expires = expire_after(self.delta)
return {"expires": datetime_to_header(expires), "cache-control": "public"}
def warning(self, response):
tmpl = "110 - Automatically cached for %s. Response might be stale"
return tmpl % self.delta
class LastModified(BaseHeuristic):
"""
If there is no Expires header already, fall back on Last-Modified
using the heuristic from
http://tools.ietf.org/html/rfc7234#section-4.2.2
to calculate a reasonable value.
Firefox also does something like this per
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""
cacheable_by_default_statuses = {
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
}
def update_headers(self, resp):
headers = resp.headers
if "expires" in headers:
return {}
if "cache-control" in headers and headers["cache-control"] != "public":
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if "date" not in headers or "last-modified" not in headers:
return {}
date = calendar.timegm(parsedate_tz(headers["date"]))
last_modified = parsedate(headers["last-modified"])
if date is None or last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
def warning(self, resp):
return None

View file

@ -0,0 +1,188 @@
import base64
import io
import json
import zlib
import msgpack
from requests.structures import CaseInsensitiveDict
from .compat import HTTPResponse, pickle, text_type
def _b64_decode_bytes(b):
return base64.b64decode(b.encode("ascii"))
def _b64_decode_str(s):
return _b64_decode_bytes(s).decode("utf8")
class Serializer(object):
def dumps(self, request, response, body=None):
response_headers = CaseInsensitiveDict(response.headers)
if body is None:
body = response.read(decode_content=False)
# NOTE: 99% sure this is dead code. I'm only leaving it
# here b/c I don't have a test yet to prove
# it. Basically, before using
# `cachecontrol.filewrapper.CallbackFileWrapper`,
# this made an effort to reset the file handle. The
# `CallbackFileWrapper` short circuits this code by
# setting the body as the content is consumed, the
# result being a `body` argument is *always* passed
# into cache_response, and in turn,
# `Serializer.dump`.
response._fp = io.BytesIO(body)
# NOTE: This is all a bit weird, but it's really important that on
# Python 2.x these objects are unicode and not str, even when
# they contain only ascii. The problem here is that msgpack
# understands the difference between unicode and bytes and we
# have it set to differentiate between them, however Python 2
# doesn't know the difference. Forcing these to unicode will be
# enough to have msgpack know the difference.
data = {
u"response": {
u"body": body,
u"headers": dict(
(text_type(k), text_type(v)) for k, v in response.headers.items()
),
u"status": response.status,
u"version": response.version,
u"reason": text_type(response.reason),
u"strict": response.strict,
u"decode_content": response.decode_content,
}
}
# Construct our vary headers
data[u"vary"] = {}
if u"vary" in response_headers:
varied_headers = response_headers[u"vary"].split(",")
for header in varied_headers:
header = text_type(header).strip()
header_value = request.headers.get(header, None)
if header_value is not None:
header_value = text_type(header_value)
data[u"vary"][header] = header_value
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
def loads(self, request, data):
# Short circuit if we've been given an empty set of data
if not data:
return
# Determine what version of the serializer the data was serialized
# with
try:
ver, data = data.split(b",", 1)
except ValueError:
ver = b"cc=0"
# Make sure that our "ver" is actually a version and isn't a false
# positive from a , being in the data stream.
if ver[:3] != b"cc=":
data = ver + data
ver = b"cc=0"
# Get the version number out of the cc=N
ver = ver.split(b"=", 1)[-1].decode("ascii")
# Dispatch to the actual load method for the given version
try:
return getattr(self, "_loads_v{}".format(ver))(request, data)
except AttributeError:
# This is a version we don't have a loads function for, so we'll
# just treat it as a miss and return None
return
def prepare_response(self, request, cached):
"""Verify our vary headers match and construct a real urllib3
HTTPResponse object.
"""
# Special case the '*' Vary value as it means we cannot actually
# determine if the cached response is suitable for this request.
# This case is also handled in the controller code when creating
# a cache entry, but is left here for backwards compatibility.
if "*" in cached.get("vary", {}):
return
# Ensure that the Vary headers for the cached response match our
# request
for header, value in cached.get("vary", {}).items():
if request.headers.get(header, None) != value:
return
body_raw = cached["response"].pop("body")
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
if headers.get("transfer-encoding", "") == "chunked":
headers.pop("transfer-encoding")
cached["response"]["headers"] = headers
try:
body = io.BytesIO(body_raw)
except TypeError:
# This can happen if cachecontrol serialized to v1 format (pickle)
# using Python 2. A Python 2 str(byte string) will be unpickled as
# a Python 3 str (unicode string), which will cause the above to
# fail with:
#
# TypeError: 'str' does not support the buffer interface
body = io.BytesIO(body_raw.encode("utf8"))
return HTTPResponse(body=body, preload_content=False, **cached["response"])
def _loads_v0(self, request, data):
# The original legacy cache data. This doesn't contain enough
# information to construct everything we need, so we'll treat this as
# a miss.
return
def _loads_v1(self, request, data):
try:
cached = pickle.loads(data)
except ValueError:
return
return self.prepare_response(request, cached)
def _loads_v2(self, request, data):
try:
cached = json.loads(zlib.decompress(data).decode("utf8"))
except (ValueError, zlib.error):
return
# We need to decode the items that we've base64 encoded
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
cached["response"]["headers"] = dict(
(_b64_decode_str(k), _b64_decode_str(v))
for k, v in cached["response"]["headers"].items()
)
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
cached["vary"] = dict(
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
for k, v in cached["vary"].items()
)
return self.prepare_response(request, cached)
def _loads_v3(self, request, data):
# Due to Python 2 encoding issues, it's impossible to know for sure
# exactly how to load v3 entries, thus we'll treat these as a miss so
# that they get rewritten out as v4 entries.
return
def _loads_v4(self, request, data):
try:
cached = msgpack.loads(data, raw=False)
except ValueError:
return
return self.prepare_response(request, cached)

View file

@ -0,0 +1,29 @@
from .adapter import CacheControlAdapter
from .cache import DictCache
def CacheControl(
sess,
cache=None,
cache_etags=True,
serializer=None,
heuristic=None,
controller_class=None,
adapter_class=None,
cacheable_methods=None,
):
cache = DictCache() if cache is None else cache
adapter_class = adapter_class or CacheControlAdapter
adapter = adapter_class(
cache,
cache_etags=cache_etags,
serializer=serializer,
heuristic=heuristic,
controller_class=controller_class,
cacheable_methods=cacheable_methods,
)
sess.mount("http://", adapter)
sess.mount("https://", adapter)
return sess

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2014-2020 Thomas Kemmer
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,124 @@
Metadata-Version: 2.1
Name: cachetools
Version: 4.1.1
Summary: Extensible memoizing collections and decorators
Home-page: https://github.com/tkem/cachetools/
Author: Thomas Kemmer
Author-email: tkemmer@computer.org
License: MIT
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Other Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: ~=3.5
cachetools
========================================================================
.. image:: http://img.shields.io/pypi/v/cachetools
:target: https://pypi.org/project/cachetools/
:alt: Latest PyPI version
.. image:: https://img.shields.io/readthedocs/cachetools
:target: http://cachetools.readthedocs.io/
:alt: Documentation build status
.. image:: http://img.shields.io/travis/tkem/cachetools
:target: https://travis-ci.org/tkem/cachetools/
:alt: Travis CI build status
.. image:: http://img.shields.io/coveralls/tkem/cachetools
:target: https://coveralls.io/r/tkem/cachetools
:alt: Test coverage
.. image:: https://img.shields.io/github/license/tkem/cachetools
:target: http://raw.github.com/tkem/cachetools/master/LICENSE
:alt: License
This module provides various memoizing collections and decorators,
including variants of the Python Standard Library's `@lru_cache`_
function decorator.
.. code-block:: python
from cachetools import cached, LRUCache, TTLCache
# speed up calculating Fibonacci numbers with dynamic programming
@cached(cache={})
def fib(n):
return n if n < 2 else fib(n - 1) + fib(n - 2)
# cache least recently used Python Enhancement Proposals
@cached(cache=LRUCache(maxsize=32))
def get_pep(num):
url = 'http://www.python.org/dev/peps/pep-%04d/' % num
with urllib.request.urlopen(url) as s:
return s.read()
# cache weather data for no longer than ten minutes
@cached(cache=TTLCache(maxsize=1024, ttl=600))
def get_weather(place):
return owm.weather_at_place(place).get_weather()
For the purpose of this module, a *cache* is a mutable_ mapping_ of a
fixed maximum size. When the cache is full, i.e. by adding another
item the cache would exceed its maximum size, the cache must choose
which item(s) to discard based on a suitable `cache algorithm`_. In
general, a cache's size is the total size of its items, and an item's
size is a property or function of its value, e.g. the result of
``sys.getsizeof(value)``. For the trivial but common case that each
item counts as ``1``, a cache's size is equal to the number of its
items, or ``len(cache)``.
Multiple cache classes based on different caching algorithms are
implemented, and decorators for easily memoizing function and method
calls are provided, too.
Installation
------------------------------------------------------------------------
cachetools is available from PyPI_ and can be installed by running::
pip install cachetools
Project Resources
------------------------------------------------------------------------
- `Documentation`_
- `Issue tracker`_
- `Source code`_
- `Change log`_
License
------------------------------------------------------------------------
Copyright (c) 2014-2020 Thomas Kemmer.
Licensed under the `MIT License`_.
.. _@lru_cache: http://docs.python.org/3/library/functools.html#functools.lru_cache
.. _mutable: http://docs.python.org/dev/glossary.html#term-mutable
.. _mapping: http://docs.python.org/dev/glossary.html#term-mapping
.. _cache algorithm: http://en.wikipedia.org/wiki/Cache_algorithms
.. _PyPI: https://pypi.org/project/cachetools/
.. _Documentation: https://cachetools.readthedocs.io/
.. _Issue tracker: https://github.com/tkem/cachetools/issues/
.. _Source code: https://github.com/tkem/cachetools/
.. _Change log: https://github.com/tkem/cachetools/blob/master/CHANGELOG.rst
.. _MIT License: http://raw.github.com/tkem/cachetools/master/LICENSE

View file

@ -0,0 +1,26 @@
cachetools-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cachetools-4.1.1.dist-info/LICENSE,sha256=WjqbFSk9D0xU0ftRzw9RpxHwz1gvgKDnMwR4ZwwX9ns,1085
cachetools-4.1.1.dist-info/METADATA,sha256=UCFBVawzngdeCUWD5P33LTAx5AShjKmQ29q3kcc696A,4383
cachetools-4.1.1.dist-info/RECORD,,
cachetools-4.1.1.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92
cachetools-4.1.1.dist-info/top_level.txt,sha256=ai2FH78TGwoBcCgVfoqbzk5IQCtnDukdSs4zKuVPvDs,11
cachetools/__init__.py,sha256=65iD423Ll5taTrDqqSQH2oxmUBHfLP48oWTcOQGGS6M,375
cachetools/__pycache__/__init__.cpython-36.pyc,,
cachetools/__pycache__/abc.cpython-36.pyc,,
cachetools/__pycache__/cache.cpython-36.pyc,,
cachetools/__pycache__/decorators.cpython-36.pyc,,
cachetools/__pycache__/func.cpython-36.pyc,,
cachetools/__pycache__/keys.cpython-36.pyc,,
cachetools/__pycache__/lfu.cpython-36.pyc,,
cachetools/__pycache__/lru.cpython-36.pyc,,
cachetools/__pycache__/rr.cpython-36.pyc,,
cachetools/__pycache__/ttl.cpython-36.pyc,,
cachetools/abc.py,sha256=KdAOSBVp5jb_MUYdaoiWqbfXsiO9epC-KWVEXXD2TXc,1076
cachetools/cache.py,sha256=JQPstpjP-TgdpLdQbrGN3gU8F9yk1IQdkFtaK0_CJEo,2272
cachetools/decorators.py,sha256=Z8XaWDAnlq50Qf3FVrKSPbwr15dDkGRITMcHsVdy2AQ,2829
cachetools/func.py,sha256=XXIllKSnfzt_Z8NcALeT5gz-tc1uU2V91502Z2QFTYQ,4009
cachetools/keys.py,sha256=bKwFwU15s-vKWM1lnNdcJWfyQxu7uqIcRRJNg9hUfFg,1466
cachetools/lfu.py,sha256=xAkYTpx8-7Gg1IOw08UVxncQys8tn7sPg09lr9IvTyQ,1065
cachetools/lru.py,sha256=0XNTY7VzYEdV9yCdOMwnhkBeQox_N6VscVzNFm-VwRo,1188
cachetools/rr.py,sha256=uoIxqj9xFYcA2sfKwoOQYd8JE6wzMXPrHLlUsuscILA,974
cachetools/ttl.py,sha256=VI1Dci_sozLA8m15-l5OfNFfJ1GUhuWm39ISjvxrMg4,5830

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1 @@
cachetools

View file

@ -0,0 +1,20 @@
"""Extensible memoizing collections and decorators."""
from .cache import Cache
from .decorators import cached, cachedmethod
from .lfu import LFUCache
from .lru import LRUCache
from .rr import RRCache
from .ttl import TTLCache
__all__ = (
'Cache',
'LFUCache',
'LRUCache',
'RRCache',
'TTLCache',
'cached',
'cachedmethod'
)
__version__ = '4.1.1'

View file

@ -0,0 +1,46 @@
from abc import abstractmethod
from collections.abc import MutableMapping
class DefaultMapping(MutableMapping):
__slots__ = ()
@abstractmethod
def __contains__(self, key): # pragma: nocover
return False
@abstractmethod
def __getitem__(self, key): # pragma: nocover
if hasattr(self.__class__, '__missing__'):
return self.__class__.__missing__(self, key)
else:
raise KeyError(key)
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
__marker = object()
def pop(self, key, default=__marker):
if key in self:
value = self[key]
del self[key]
elif default is self.__marker:
raise KeyError(key)
else:
value = default
return value
def setdefault(self, key, default=None):
if key in self:
value = self[key]
else:
self[key] = value = default
return value
DefaultMapping.register(dict)

View file

@ -0,0 +1,89 @@
from .abc import DefaultMapping
class _DefaultSize(object):
def __getitem__(self, _):
return 1
def __setitem__(self, _, value):
assert value == 1
def pop(self, _):
return 1
class Cache(DefaultMapping):
"""Mutable mapping to serve as a simple cache or cache base class."""
__size = _DefaultSize()
def __init__(self, maxsize, getsizeof=None):
if getsizeof:
self.getsizeof = getsizeof
if self.getsizeof is not Cache.getsizeof:
self.__size = dict()
self.__data = dict()
self.__currsize = 0
self.__maxsize = maxsize
def __repr__(self):
return '%s(%r, maxsize=%r, currsize=%r)' % (
self.__class__.__name__,
list(self.__data.items()),
self.__maxsize,
self.__currsize,
)
def __getitem__(self, key):
try:
return self.__data[key]
except KeyError:
return self.__missing__(key)
def __setitem__(self, key, value):
maxsize = self.__maxsize
size = self.getsizeof(value)
if size > maxsize:
raise ValueError('value too large')
if key not in self.__data or self.__size[key] < size:
while self.__currsize + size > maxsize:
self.popitem()
if key in self.__data:
diffsize = size - self.__size[key]
else:
diffsize = size
self.__data[key] = value
self.__size[key] = size
self.__currsize += diffsize
def __delitem__(self, key):
size = self.__size.pop(key)
del self.__data[key]
self.__currsize -= size
def __contains__(self, key):
return key in self.__data
def __missing__(self, key):
raise KeyError(key)
def __iter__(self):
return iter(self.__data)
def __len__(self):
return len(self.__data)
@property
def maxsize(self):
"""The maximum size of the cache."""
return self.__maxsize
@property
def currsize(self):
"""The current size of the cache."""
return self.__currsize
@staticmethod
def getsizeof(value):
"""Return the size of a cache element's value."""
return 1

View file

@ -0,0 +1,88 @@
import functools
from .keys import hashkey
def cached(cache, key=hashkey, lock=None):
"""Decorator to wrap a function with a memoizing callable that saves
results in a cache.
"""
def decorator(func):
if cache is None:
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
elif lock is None:
def wrapper(*args, **kwargs):
k = key(*args, **kwargs)
try:
return cache[k]
except KeyError:
pass # key not found
v = func(*args, **kwargs)
try:
cache[k] = v
except ValueError:
pass # value too large
return v
else:
def wrapper(*args, **kwargs):
k = key(*args, **kwargs)
try:
with lock:
return cache[k]
except KeyError:
pass # key not found
v = func(*args, **kwargs)
try:
with lock:
cache[k] = v
except ValueError:
pass # value too large
return v
return functools.update_wrapper(wrapper, func)
return decorator
def cachedmethod(cache, key=hashkey, lock=None):
"""Decorator to wrap a class or instance method with a memoizing
callable that saves results in a cache.
"""
def decorator(method):
if lock is None:
def wrapper(self, *args, **kwargs):
c = cache(self)
if c is None:
return method(self, *args, **kwargs)
k = key(*args, **kwargs)
try:
return c[k]
except KeyError:
pass # key not found
v = method(self, *args, **kwargs)
try:
c[k] = v
except ValueError:
pass # value too large
return v
else:
def wrapper(self, *args, **kwargs):
c = cache(self)
if c is None:
return method(self, *args, **kwargs)
k = key(*args, **kwargs)
try:
with lock(self):
return c[k]
except KeyError:
pass # key not found
v = method(self, *args, **kwargs)
try:
with lock(self):
c[k] = v
except ValueError:
pass # value too large
return v
return functools.update_wrapper(wrapper, method)
return decorator

View file

@ -0,0 +1,147 @@
"""`functools.lru_cache` compatible memoizing function decorators."""
import collections
import functools
import math
import random
import time
try:
from threading import RLock
except ImportError: # pragma: no cover
from dummy_threading import RLock
from . import keys
from .lfu import LFUCache
from .lru import LRUCache
from .rr import RRCache
from .ttl import TTLCache
__all__ = ('lfu_cache', 'lru_cache', 'rr_cache', 'ttl_cache')
_CacheInfo = collections.namedtuple('CacheInfo', [
'hits', 'misses', 'maxsize', 'currsize'
])
class _UnboundCache(dict):
@property
def maxsize(self):
return None
@property
def currsize(self):
return len(self)
class _UnboundTTLCache(TTLCache):
def __init__(self, ttl, timer):
TTLCache.__init__(self, math.inf, ttl, timer)
@property
def maxsize(self):
return None
def _cache(cache, typed):
maxsize = cache.maxsize
def decorator(func):
key = keys.typedkey if typed else keys.hashkey
lock = RLock()
stats = [0, 0]
def wrapper(*args, **kwargs):
k = key(*args, **kwargs)
with lock:
try:
v = cache[k]
stats[0] += 1
return v
except KeyError:
stats[1] += 1
v = func(*args, **kwargs)
try:
with lock:
cache[k] = v
except ValueError:
pass # value too large
return v
def cache_info():
with lock:
hits, misses = stats
maxsize = cache.maxsize
currsize = cache.currsize
return _CacheInfo(hits, misses, maxsize, currsize)
def cache_clear():
with lock:
try:
cache.clear()
finally:
stats[:] = [0, 0]
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
functools.update_wrapper(wrapper, func)
return wrapper
return decorator
def lfu_cache(maxsize=128, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Frequently Used (LFU)
algorithm.
"""
if maxsize is None:
return _cache(_UnboundCache(), typed)
elif callable(maxsize):
return _cache(LFUCache(128), typed)(maxsize)
else:
return _cache(LFUCache(maxsize), typed)
def lru_cache(maxsize=128, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Recently Used (LRU)
algorithm.
"""
if maxsize is None:
return _cache(_UnboundCache(), typed)
elif callable(maxsize):
return _cache(LRUCache(128), typed)(maxsize)
else:
return _cache(LRUCache(maxsize), typed)
def rr_cache(maxsize=128, choice=random.choice, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Random Replacement (RR)
algorithm.
"""
if maxsize is None:
return _cache(_UnboundCache(), typed)
elif callable(maxsize):
return _cache(RRCache(128, choice), typed)(maxsize)
else:
return _cache(RRCache(maxsize, choice), typed)
def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Recently Used (LRU)
algorithm with a per-item time-to-live (TTL) value.
"""
if maxsize is None:
return _cache(_UnboundTTLCache(ttl, timer), typed)
elif callable(maxsize):
return _cache(TTLCache(128, ttl, timer), typed)(maxsize)
else:
return _cache(TTLCache(maxsize, ttl, timer), typed)

View file

@ -0,0 +1,52 @@
"""Key functions for memoizing decorators."""
__all__ = ('hashkey', 'typedkey')
class _HashedTuple(tuple):
"""A tuple that ensures that hash() will be called no more than once
per element, since cache decorators will hash the key multiple
times on a cache miss. See also _HashedSeq in the standard
library functools implementation.
"""
__hashvalue = None
def __hash__(self, hash=tuple.__hash__):
hashvalue = self.__hashvalue
if hashvalue is None:
self.__hashvalue = hashvalue = hash(self)
return hashvalue
def __add__(self, other, add=tuple.__add__):
return _HashedTuple(add(self, other))
def __radd__(self, other, add=tuple.__add__):
return _HashedTuple(add(other, self))
def __getstate__(self):
return {}
# used for separating keyword arguments; we do not use an object
# instance here so identity is preserved when pickling/unpickling
_kwmark = (_HashedTuple,)
def hashkey(*args, **kwargs):
"""Return a cache key for the specified hashable arguments."""
if kwargs:
return _HashedTuple(args + sum(sorted(kwargs.items()), _kwmark))
else:
return _HashedTuple(args)
def typedkey(*args, **kwargs):
"""Return a typed cache key for the specified hashable arguments."""
key = hashkey(*args, **kwargs)
key += tuple(type(v) for v in args)
key += tuple(type(v) for _, v in sorted(kwargs.items()))
return key

View file

@ -0,0 +1,34 @@
import collections
from .cache import Cache
class LFUCache(Cache):
"""Least Frequently Used (LFU) cache implementation."""
def __init__(self, maxsize, getsizeof=None):
Cache.__init__(self, maxsize, getsizeof)
self.__counter = collections.Counter()
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
value = cache_getitem(self, key)
self.__counter[key] -= 1
return value
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
cache_setitem(self, key, value)
self.__counter[key] -= 1
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
cache_delitem(self, key)
del self.__counter[key]
def popitem(self):
"""Remove and return the `(key, value)` pair least frequently used."""
try:
(key, _), = self.__counter.most_common(1)
except ValueError:
msg = '%s is empty' % self.__class__.__name__
raise KeyError(msg) from None
else:
return (key, self.pop(key))

View file

@ -0,0 +1,40 @@
import collections
from .cache import Cache
class LRUCache(Cache):
"""Least Recently Used (LRU) cache implementation."""
def __init__(self, maxsize, getsizeof=None):
Cache.__init__(self, maxsize, getsizeof)
self.__order = collections.OrderedDict()
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
value = cache_getitem(self, key)
self.__update(key)
return value
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
cache_setitem(self, key, value)
self.__update(key)
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
cache_delitem(self, key)
del self.__order[key]
def popitem(self):
"""Remove and return the `(key, value)` pair least recently used."""
try:
key = next(iter(self.__order))
except StopIteration:
msg = '%s is empty' % self.__class__.__name__
raise KeyError(msg) from None
else:
return (key, self.pop(key))
def __update(self, key):
try:
self.__order.move_to_end(key)
except KeyError:
self.__order[key] = None

View file

@ -0,0 +1,35 @@
import random
from .cache import Cache
# random.choice cannot be pickled in Python 2.7
def _choice(seq):
return random.choice(seq)
class RRCache(Cache):
"""Random Replacement (RR) cache implementation."""
def __init__(self, maxsize, choice=random.choice, getsizeof=None):
Cache.__init__(self, maxsize, getsizeof)
# TODO: use None as default, assing to self.choice directly?
if choice is random.choice:
self.__choice = _choice
else:
self.__choice = choice
@property
def choice(self):
"""The `choice` function used by the cache."""
return self.__choice
def popitem(self):
"""Remove and return a random `(key, value)` pair."""
try:
key = self.__choice(list(self))
except IndexError:
msg = '%s is empty' % self.__class__.__name__
raise KeyError(msg) from None
else:
return (key, self.pop(key))

View file

@ -0,0 +1,209 @@
import collections
import time
from .cache import Cache
class _Link(object):
__slots__ = ('key', 'expire', 'next', 'prev')
def __init__(self, key=None, expire=None):
self.key = key
self.expire = expire
def __reduce__(self):
return _Link, (self.key, self.expire)
def unlink(self):
next = self.next
prev = self.prev
prev.next = next
next.prev = prev
class _Timer(object):
def __init__(self, timer):
self.__timer = timer
self.__nesting = 0
def __call__(self):
if self.__nesting == 0:
return self.__timer()
else:
return self.__time
def __enter__(self):
if self.__nesting == 0:
self.__time = time = self.__timer()
else:
time = self.__time
self.__nesting += 1
return time
def __exit__(self, *exc):
self.__nesting -= 1
def __reduce__(self):
return _Timer, (self.__timer,)
def __getattr__(self, name):
return getattr(self.__timer, name)
class TTLCache(Cache):
"""LRU Cache implementation with per-item time-to-live (TTL) value."""
def __init__(self, maxsize, ttl, timer=time.monotonic, getsizeof=None):
Cache.__init__(self, maxsize, getsizeof)
self.__root = root = _Link()
root.prev = root.next = root
self.__links = collections.OrderedDict()
self.__timer = _Timer(timer)
self.__ttl = ttl
def __contains__(self, key):
try:
link = self.__links[key] # no reordering
except KeyError:
return False
else:
return not (link.expire < self.__timer())
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
try:
link = self.__getlink(key)
except KeyError:
expired = False
else:
expired = link.expire < self.__timer()
if expired:
return self.__missing__(key)
else:
return cache_getitem(self, key)
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
with self.__timer as time:
self.expire(time)
cache_setitem(self, key, value)
try:
link = self.__getlink(key)
except KeyError:
self.__links[key] = link = _Link(key)
else:
link.unlink()
link.expire = time + self.__ttl
link.next = root = self.__root
link.prev = prev = root.prev
prev.next = root.prev = link
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
cache_delitem(self, key)
link = self.__links.pop(key)
link.unlink()
if link.expire < self.__timer():
raise KeyError(key)
def __iter__(self):
root = self.__root
curr = root.next
while curr is not root:
# "freeze" time for iterator access
with self.__timer as time:
if not (curr.expire < time):
yield curr.key
curr = curr.next
def __len__(self):
root = self.__root
curr = root.next
time = self.__timer()
count = len(self.__links)
while curr is not root and curr.expire < time:
count -= 1
curr = curr.next
return count
def __setstate__(self, state):
self.__dict__.update(state)
root = self.__root
root.prev = root.next = root
for link in sorted(self.__links.values(), key=lambda obj: obj.expire):
link.next = root
link.prev = prev = root.prev
prev.next = root.prev = link
self.expire(self.__timer())
def __repr__(self, cache_repr=Cache.__repr__):
with self.__timer as time:
self.expire(time)
return cache_repr(self)
@property
def currsize(self):
with self.__timer as time:
self.expire(time)
return super(TTLCache, self).currsize
@property
def timer(self):
"""The timer function used by the cache."""
return self.__timer
@property
def ttl(self):
"""The time-to-live value of the cache's items."""
return self.__ttl
def expire(self, time=None):
"""Remove expired items from the cache."""
if time is None:
time = self.__timer()
root = self.__root
curr = root.next
links = self.__links
cache_delitem = Cache.__delitem__
while curr is not root and curr.expire < time:
cache_delitem(self, curr.key)
del links[curr.key]
next = curr.next
curr.unlink()
curr = next
def clear(self):
with self.__timer as time:
self.expire(time)
Cache.clear(self)
def get(self, *args, **kwargs):
with self.__timer:
return Cache.get(self, *args, **kwargs)
def pop(self, *args, **kwargs):
with self.__timer:
return Cache.pop(self, *args, **kwargs)
def setdefault(self, *args, **kwargs):
with self.__timer:
return Cache.setdefault(self, *args, **kwargs)
def popitem(self):
"""Remove and return the `(key, value)` pair least recently used that
has not already expired.
"""
with self.__timer as time:
self.expire(time)
try:
key = next(iter(self.__links))
except StopIteration:
msg = '%s is empty' % self.__class__.__name__
raise KeyError(msg) from None
else:
return (key, self.pop(key))
def __getlink(self, key):
value = self.__links[key]
self.__links.move_to_end(key)
return value

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,21 @@
This packge contains a modified version of ca-bundle.crt:
ca-bundle.crt -- Bundle of CA Root Certificates
Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
This is a bundle of X.509 certificates of public Certificate Authorities
(CA). These were automatically extracted from Mozilla's root certificates
file (certdata.txt). This file can be found in the mozilla source tree:
http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
It contains the certificates in PEM format and therefore
can be directly used with curl / libcurl / php_curl, or with
an Apache+mod_ssl webserver for SSL client authentication.
Just configure this file as the SSLCACertificateFile.#
***** BEGIN LICENSE BLOCK *****
This Source Code Form is subject to the terms of the Mozilla Public License,
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
one at http://mozilla.org/MPL/2.0/.
***** END LICENSE BLOCK *****
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $

View file

@ -0,0 +1,82 @@
Metadata-Version: 2.1
Name: certifi
Version: 2020.6.20
Summary: Python package for providing Mozilla's CA Bundle.
Home-page: https://certifiio.readthedocs.io/en/latest/
Author: Kenneth Reitz
Author-email: me@kennethreitz.com
License: MPL-2.0
Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/
Project-URL: Source, https://github.com/certifi/python-certifi
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
Classifier: Natural Language :: English
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Certifi: Python SSL Certificates
================================
`Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for
validating the trustworthiness of SSL certificates while verifying the identity
of TLS hosts. It has been extracted from the `Requests`_ project.
Installation
------------
``certifi`` is available on PyPI. Simply install it with ``pip``::
$ pip install certifi
Usage
-----
To reference the installed certificate authority (CA) bundle, you can use the
built-in function::
>>> import certifi
>>> certifi.where()
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
Or from the command line::
$ python -m certifi
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
Enjoy!
1024-bit Root Certificates
~~~~~~~~~~~~~~~~~~~~~~~~~~
Browsers and certificate authorities have concluded that 1024-bit keys are
unacceptably weak for certificates, particularly root certificates. For this
reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
certificate from the same CA. Because Mozilla removed these certificates from
its bundle, ``certifi`` removed them as well.
In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
to intentionally re-add the 1024-bit roots back into your bundle. This was not
recommended in production and therefore was removed at the end of 2018.
.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/
.. _`Requests`: https://requests.readthedocs.io/en/master/
Addition/Removal of Certificates
--------------------------------
Certifi does not support any addition/removal or other modification of the
CA trust store content. This project is intended to provide a reliable and
highly portable root of trust to python deployments. Look to upstream projects
for methods to use alternate trust.

View file

@ -0,0 +1,13 @@
certifi-2020.6.20.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
certifi-2020.6.20.dist-info/LICENSE,sha256=anCkv2sBABbVmmS4rkrY3H9e8W8ftFPMLs13HFo0ETE,1048
certifi-2020.6.20.dist-info/METADATA,sha256=_0lH4pmUKzXqjJAq6fIlE4JB2g1CFLPpjpwwOsqNqwk,2944
certifi-2020.6.20.dist-info/RECORD,,
certifi-2020.6.20.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
certifi-2020.6.20.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
certifi/__init__.py,sha256=u1E_DrSGj_nnEkK5VglvEqP8D80KpghLVWL0A_pq41A,62
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
certifi/__pycache__/__init__.cpython-36.pyc,,
certifi/__pycache__/__main__.cpython-36.pyc,,
certifi/__pycache__/core.cpython-36.pyc,,
certifi/cacert.pem,sha256=GhT24f0R7_9y4YY_hkXwkO7BthZhRGDCEMO348E9S14,282394
certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303

View file

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View file

@ -0,0 +1 @@
certifi

View file

@ -0,0 +1,3 @@
from .core import contents, where
__version__ = "2020.06.20"

View file

@ -0,0 +1,12 @@
import argparse
from certifi import contents, where
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--contents", action="store_true")
args = parser.parse_args()
if args.contents:
print(contents())
else:
print(where())

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
"""
certifi.py
~~~~~~~~~~
This module returns the installation location of cacert.pem or its contents.
"""
import os
try:
from importlib.resources import path as get_path, read_text
_CACERT_CTX = None
_CACERT_PATH = None
def where():
# This is slightly terrible, but we want to delay extracting the file
# in cases where we're inside of a zipimport situation until someone
# actually calls where(), but we don't want to re-extract the file
# on every call of where(), so we'll do it once then store it in a
# global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you to
# manage the cleanup of this file, so it doesn't actually return a
# path, it returns a context manager that will give you the path
# when you enter it and will do any cleanup when you leave it. In
# the common case of not needing a temporary file, it will just
# return the file system location and the __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = get_path("certifi", "cacert.pem")
_CACERT_PATH = str(_CACERT_CTX.__enter__())
return _CACERT_PATH
except ImportError:
# This fallback will work for Python versions prior to 3.7 that lack the
# importlib.resources module but relies on the existing `where` function
# so won't address issues with environments like PyOxidizer that don't set
# __file__ on modules.
def read_text(_module, _path, encoding="ascii"):
with open(where(), "r", encoding=encoding) as data:
return data.read()
# If we don't have importlib.resources, then we will just do the old logic
# of assuming we're on the filesystem and munge the path directly.
def where():
f = os.path.dirname(__file__)
return os.path.join(f, "cacert.pem")
def contents():
return read_text("certifi", "cacert.pem", encoding="ascii")

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,26 @@
Except when otherwise stated (look for LICENSE files in directories or
information at the beginning of each file) all software and
documentation is licensed as follows:
The MIT License
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,37 @@
Metadata-Version: 2.1
Name: cffi
Version: 1.14.3
Summary: Foreign Function Interface for Python calling C code.
Home-page: http://cffi.readthedocs.org
Author: Armin Rigo, Maciej Fijalkowski
Author-email: python-cffi@googlegroups.com
License: MIT
Platform: UNKNOWN
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: License :: OSI Approved :: MIT License
Requires-Dist: pycparser
CFFI
====
Foreign Function Interface for Python calling C code.
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
Contact
-------
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_

View file

@ -0,0 +1,44 @@
_cffi_backend.cp36-win32.pyd,sha256=G6qu3i9zJLMfZ_PT7JiYLHWlQ4oeg6Xdtt0U2fz452g,146432
cffi-1.14.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cffi-1.14.3.dist-info/LICENSE,sha256=esEZUOct9bRcUXFqeyLnuzSzJNZ_Bl4pOBUt1HLEgV8,1320
cffi-1.14.3.dist-info/METADATA,sha256=SOxoTo-W2jbz8qj7XHL2Ry3Shfpbeg6IHx7crTt4NRs,1191
cffi-1.14.3.dist-info/RECORD,,
cffi-1.14.3.dist-info/WHEEL,sha256=AV2Nvbg-pC-zIZdxGMBky9Ya05hYOPClxAaFNPEQVgE,102
cffi-1.14.3.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
cffi-1.14.3.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
cffi/__init__.py,sha256=r5h8errFupyJOsTuicf5dYchgmL9Hwsy-GzAH5AAMOw,527
cffi/__pycache__/__init__.cpython-36.pyc,,
cffi/__pycache__/api.cpython-36.pyc,,
cffi/__pycache__/backend_ctypes.cpython-36.pyc,,
cffi/__pycache__/cffi_opcode.cpython-36.pyc,,
cffi/__pycache__/commontypes.cpython-36.pyc,,
cffi/__pycache__/cparser.cpython-36.pyc,,
cffi/__pycache__/error.cpython-36.pyc,,
cffi/__pycache__/ffiplatform.cpython-36.pyc,,
cffi/__pycache__/lock.cpython-36.pyc,,
cffi/__pycache__/model.cpython-36.pyc,,
cffi/__pycache__/pkgconfig.cpython-36.pyc,,
cffi/__pycache__/recompiler.cpython-36.pyc,,
cffi/__pycache__/setuptools_ext.cpython-36.pyc,,
cffi/__pycache__/vengine_cpy.cpython-36.pyc,,
cffi/__pycache__/vengine_gen.cpython-36.pyc,,
cffi/__pycache__/verifier.cpython-36.pyc,,
cffi/_cffi_errors.h,sha256=INd0GxZQna8TTRYNOOr9_iFy0FZa84I_KH1qlmPgulQ,4003
cffi/_cffi_include.h,sha256=H7cgdZR-POwmUFrIup4jOGzmje8YoQHhN99gVFg7w08,15185
cffi/_embedding.h,sha256=4vlPtC1Zof0KjwnohsyLcWSDv-Kxa_bLmkobilrHbx0,18108
cffi/api.py,sha256=Xs_dAN5x1ehfnn_F9ZTdA3Ce0bmPrqeIOkO4Ya1tfbQ,43029
cffi/backend_ctypes.py,sha256=BHN3q2giL2_Y8wMDST2CIcc_qoMrs65qV9Ob5JvxBZ4,43575
cffi/cffi_opcode.py,sha256=57P2NHLZkuTWueZybu5iosWljb6ocQmUXzGrCplrnyE,5911
cffi/commontypes.py,sha256=mEZD4g0qtadnv6O6CEXvMQaJ1K6SRbG5S1h4YvVZHOU,2769
cffi/cparser.py,sha256=CwVk2V3ATYlCoywG6zN35w6UQ7zj2EWX68KjoJp2Mzk,45237
cffi/error.py,sha256=Bka7fSV22aIglTQDPIDfpnxTc1aWZLMQdQOJY-h_PUA,908
cffi/ffiplatform.py,sha256=qioydJeC63dEvrQ3ht5_BPmSs7wzzzuWnZAJtfhic7I,4173
cffi/lock.py,sha256=vnbsel7392Ib8gGBifIfAfc7MHteSwd3nP725pvc25Q,777
cffi/model.py,sha256=HRD0WEYHF2Vr6RjS-4wyncElrZxU2256zY0fbMkSKec,22385
cffi/parse_c_type.h,sha256=fKYNqWNX5f9kZNNhbXcRLTOlpRGRhh8eCLyHmTXIZnQ,6157
cffi/pkgconfig.py,sha256=9zDcDf0XKIJaxFHLg7e-W8-Xb8Yq5hdhqH7kLg-ugRo,4495
cffi/recompiler.py,sha256=_Hti-7dC_XumeGfj8tnodXwg1KplG_Iv-7P_5Xl41pA,65632
cffi/setuptools_ext.py,sha256=8y14TOlRAkgdczmwtPOahyFXJHNyIqhLjUHMYQmjOHs,9150
cffi/vengine_cpy.py,sha256=ukugKCIsURxJzHxlxS265tGjQfPTFDbThwsqBrwKh-A,44396
cffi/vengine_gen.py,sha256=mykUhLFJIcV6AyQ5cMJ3n_7dbqw0a9WEjXW0E-WfgiI,27359
cffi/verifier.py,sha256=La8rdbEkvdvbqAHDzTk5lsNUvdkqB_GcFnO7wXI6Mgk,11513

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.35.1)
Root-Is-Purelib: false
Tag: cp36-cp36m-win32

View file

@ -0,0 +1,3 @@
[distutils.setup_keywords]
cffi_modules = cffi.setuptools_ext:cffi_modules

View file

@ -0,0 +1,2 @@
_cffi_backend
cffi

View file

@ -0,0 +1,14 @@
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
'FFIError']
from .api import FFI
from .error import CDefError, FFIError, VerificationError, VerificationMissing
from .error import PkgConfigError
__version__ = "1.14.3"
__version_info__ = (1, 14, 3)
# The verifier module file names are based on the CRC32 of a string that
# contains the following version number. It may be older than __version__
# if nothing is clearly incompatible.
__version_verifier_modules__ = "0.8.6"

Some files were not shown because too many files have changed in this diff Show more