Updated DB_Helper by adding firebase methods.
This commit is contained in:
parent
485cc3bbba
commit
c82121d036
1810 changed files with 537281 additions and 1 deletions
49
venv/Lib/site-packages/gcloud/storage/__init__.py
Normal file
49
venv/Lib/site-packages/gcloud/storage/__init__.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Shortcut methods for getting set up with Google Cloud Storage.
|
||||
|
||||
You'll typically use these to get started with the API:
|
||||
|
||||
>>> from gcloud import storage
|
||||
>>> client = storage.Client()
|
||||
>>> bucket = client.get_bucket('bucket-id-here')
|
||||
>>> # Then do other things...
|
||||
>>> blob = bucket.get_blob('/remote/path/to/file.txt')
|
||||
>>> print blob.download_as_string()
|
||||
>>> blob.upload_from_string('New contents!')
|
||||
>>> blob2 = bucket.blob('/remote/path/storage.txt')
|
||||
>>> blob2.upload_from_filename(filename='/local/path.txt')
|
||||
|
||||
The main concepts with this API are:
|
||||
|
||||
- :class:`gcloud.storage.connection.Connection` which represents a
|
||||
connection between your machine and the Cloud Storage API.
|
||||
|
||||
- :class:`gcloud.storage.bucket.Bucket` which represents a particular
|
||||
bucket (akin to a mounted disk on a computer).
|
||||
|
||||
- :class:`gcloud.storage.blob.Blob` which represents a pointer to a
|
||||
particular entity in Cloud Storage (akin to a file path on a remote
|
||||
machine).
|
||||
"""
|
||||
|
||||
from gcloud.storage.batch import Batch
|
||||
from gcloud.storage.blob import Blob
|
||||
from gcloud.storage.bucket import Bucket
|
||||
from gcloud.storage.client import Client
|
||||
from gcloud.storage.connection import Connection
|
||||
|
||||
|
||||
SCOPE = Connection.SCOPE
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
173
venv/Lib/site-packages/gcloud/storage/_helpers.py
Normal file
173
venv/Lib/site-packages/gcloud/storage/_helpers.py
Normal file
|
@ -0,0 +1,173 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper functions for Cloud Storage utility classes.
|
||||
|
||||
These are *not* part of the API.
|
||||
"""
|
||||
|
||||
import base64
|
||||
from hashlib import md5
|
||||
|
||||
|
||||
class _PropertyMixin(object):
|
||||
"""Abstract mixin for cloud storage classes with associated propertties.
|
||||
|
||||
Non-abstract subclasses should implement:
|
||||
- client
|
||||
- path
|
||||
|
||||
:type name: string
|
||||
:param name: The name of the object.
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
self.name = name
|
||||
self._properties = {}
|
||||
self._changes = set()
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""Abstract getter for the object path."""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
"""Abstract getter for the object client."""
|
||||
raise NotImplementedError
|
||||
|
||||
def _require_client(self, client):
|
||||
"""Check client or verify over-ride.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: the client to use. If not passed, falls back to the
|
||||
``client`` stored on the current object.
|
||||
|
||||
:rtype: :class:`gcloud.storage.client.Client`
|
||||
:returns: The client passed in or the currently bound client.
|
||||
"""
|
||||
if client is None:
|
||||
client = self.client
|
||||
return client
|
||||
|
||||
def reload(self, client=None):
|
||||
"""Reload properties from Cloud Storage.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: the client to use. If not passed, falls back to the
|
||||
``client`` stored on the current object.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
# Pass only '?projection=noAcl' here because 'acl' and related
|
||||
# are handled via custom endpoints.
|
||||
query_params = {'projection': 'noAcl'}
|
||||
api_response = client.connection.api_request(
|
||||
method='GET', path=self.path, query_params=query_params,
|
||||
_target_object=self)
|
||||
self._set_properties(api_response)
|
||||
|
||||
def _patch_property(self, name, value):
|
||||
"""Update field of this object's properties.
|
||||
|
||||
This method will only update the field provided and will not
|
||||
touch the other fields.
|
||||
|
||||
It **will not** reload the properties from the server. The behavior is
|
||||
local only and syncing occurs via :meth:`patch`.
|
||||
|
||||
:type name: string
|
||||
:param name: The field name to update.
|
||||
|
||||
:type value: object
|
||||
:param value: The value being updated.
|
||||
"""
|
||||
self._changes.add(name)
|
||||
self._properties[name] = value
|
||||
|
||||
def _set_properties(self, value):
|
||||
"""Set the properties for the current object.
|
||||
|
||||
:type value: dict or :class:`gcloud.storage.batch._FutureDict`
|
||||
:param value: The properties to be set.
|
||||
"""
|
||||
self._properties = value
|
||||
# If the values are reset, the changes must as well.
|
||||
self._changes = set()
|
||||
|
||||
def patch(self, client=None):
|
||||
"""Sends all changed properties in a PATCH request.
|
||||
|
||||
Updates the ``_properties`` with the response from the backend.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: the client to use. If not passed, falls back to the
|
||||
``client`` stored on the current object.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
# Pass '?projection=full' here because 'PATCH' documented not
|
||||
# to work properly w/ 'noAcl'.
|
||||
update_properties = dict((key, self._properties[key])
|
||||
for key in self._changes)
|
||||
api_response = client.connection.api_request(
|
||||
method='PATCH', path=self.path, data=update_properties,
|
||||
query_params={'projection': 'full'}, _target_object=self)
|
||||
self._set_properties(api_response)
|
||||
|
||||
|
||||
def _scalar_property(fieldname):
|
||||
"""Create a property descriptor around the :class:`_PropertyMixin` helpers.
|
||||
"""
|
||||
def _getter(self):
|
||||
"""Scalar property getter."""
|
||||
return self._properties.get(fieldname)
|
||||
|
||||
def _setter(self, value):
|
||||
"""Scalar property setter."""
|
||||
self._patch_property(fieldname, value)
|
||||
|
||||
return property(_getter, _setter)
|
||||
|
||||
|
||||
def _write_buffer_to_hash(buffer_object, hash_obj, digest_block_size=8192):
|
||||
"""Read blocks from a buffer and update a hash with them.
|
||||
|
||||
:type buffer_object: bytes buffer
|
||||
:param buffer_object: Buffer containing bytes used to update a hash object.
|
||||
|
||||
:type hash_obj: object that implements update
|
||||
:param hash_obj: A hash object (MD5 or CRC32-C).
|
||||
|
||||
:type digest_block_size: integer
|
||||
:param digest_block_size: The block size to write to the hash.
|
||||
Defaults to 8192.
|
||||
"""
|
||||
block = buffer_object.read(digest_block_size)
|
||||
|
||||
while len(block) > 0:
|
||||
hash_obj.update(block)
|
||||
# Update the block for the next iteration.
|
||||
block = buffer_object.read(digest_block_size)
|
||||
|
||||
|
||||
def _base64_md5hash(buffer_object):
|
||||
"""Get MD5 hash of bytes (as base64).
|
||||
|
||||
:type buffer_object: bytes buffer
|
||||
:param buffer_object: Buffer containing bytes used to compute an MD5
|
||||
hash (as base64).
|
||||
"""
|
||||
hash_obj = md5()
|
||||
_write_buffer_to_hash(buffer_object, hash_obj)
|
||||
digest_bytes = hash_obj.digest()
|
||||
return base64.b64encode(digest_bytes)
|
560
venv/Lib/site-packages/gcloud/storage/acl.py
Normal file
560
venv/Lib/site-packages/gcloud/storage/acl.py
Normal file
|
@ -0,0 +1,560 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Manipulate access control lists that Cloud Storage provides.
|
||||
|
||||
:class:`gcloud.storage.bucket.Bucket` has a getting method that creates
|
||||
an ACL object under the hood, and you can interact with that using
|
||||
:func:`gcloud.storage.bucket.Bucket.acl`::
|
||||
|
||||
>>> from gcloud import storage
|
||||
>>> client = storage.Client()
|
||||
>>> bucket = client.get_bucket(bucket_name)
|
||||
>>> acl = bucket.acl
|
||||
|
||||
Adding and removing permissions can be done with the following methods
|
||||
(in increasing order of granularity):
|
||||
|
||||
- :func:`ACL.all`
|
||||
corresponds to access for all users.
|
||||
- :func:`ACL.all_authenticated` corresponds
|
||||
to access for all users that are signed into a Google account.
|
||||
- :func:`ACL.domain` corresponds to access on a
|
||||
per Google Apps domain (ie, ``example.com``).
|
||||
- :func:`ACL.group` corresponds to access on a
|
||||
per group basis (either by ID or e-mail address).
|
||||
- :func:`ACL.user` corresponds to access on a
|
||||
per user basis (either by ID or e-mail address).
|
||||
|
||||
And you are able to ``grant`` and ``revoke`` the following roles:
|
||||
|
||||
- **Reading**:
|
||||
:func:`_ACLEntity.grant_read` and :func:`_ACLEntity.revoke_read`
|
||||
- **Writing**:
|
||||
:func:`_ACLEntity.grant_write` and :func:`_ACLEntity.revoke_write`
|
||||
- **Owning**:
|
||||
:func:`_ACLEntity.grant_owner` and :func:`_ACLEntity.revoke_owner`
|
||||
|
||||
You can use any of these like any other factory method (these happen to
|
||||
be :class:`_ACLEntity` factories)::
|
||||
|
||||
>>> acl.user('me@example.org').grant_read()
|
||||
>>> acl.all_authenticated().grant_write()
|
||||
|
||||
You can also chain these ``grant_*`` and ``revoke_*`` methods together
|
||||
for brevity::
|
||||
|
||||
>>> acl.all().grant_read().revoke_write()
|
||||
|
||||
After that, you can save any changes you make with the
|
||||
:func:`gcloud.storage.acl.ACL.save` method::
|
||||
|
||||
>>> acl.save()
|
||||
|
||||
You can alternatively save any existing :class:`gcloud.storage.acl.ACL`
|
||||
object (whether it was created by a factory method or not) from a
|
||||
:class:`gcloud.storage.bucket.Bucket`::
|
||||
|
||||
>>> bucket.acl.save(acl=acl)
|
||||
|
||||
To get the list of ``entity`` and ``role`` for each unique pair, the
|
||||
:class:`ACL` class is iterable::
|
||||
|
||||
>>> print list(ACL)
|
||||
[{'role': 'OWNER', 'entity': 'allUsers'}, ...]
|
||||
|
||||
This list of tuples can be used as the ``entity`` and ``role`` fields
|
||||
when sending metadata for ACLs to the API.
|
||||
"""
|
||||
|
||||
|
||||
class _ACLEntity(object):
|
||||
"""Class representing a set of roles for an entity.
|
||||
|
||||
This is a helper class that you likely won't ever construct
|
||||
outside of using the factor methods on the :class:`ACL` object.
|
||||
|
||||
:type entity_type: string
|
||||
:param entity_type: The type of entity (ie, 'group' or 'user').
|
||||
|
||||
:type identifier: string
|
||||
:param identifier: The ID or e-mail of the entity. For the special
|
||||
entity types (like 'allUsers') this is optional.
|
||||
"""
|
||||
|
||||
READER_ROLE = 'READER'
|
||||
WRITER_ROLE = 'WRITER'
|
||||
OWNER_ROLE = 'OWNER'
|
||||
|
||||
def __init__(self, entity_type, identifier=None):
|
||||
self.identifier = identifier
|
||||
self.roles = set([])
|
||||
self.type = entity_type
|
||||
|
||||
def __str__(self):
|
||||
if not self.identifier:
|
||||
return str(self.type)
|
||||
else:
|
||||
return '{acl.type}-{acl.identifier}'.format(acl=self)
|
||||
|
||||
def __repr__(self):
|
||||
return '<ACL Entity: {acl} ({roles})>'.format(
|
||||
acl=self, roles=', '.join(self.roles))
|
||||
|
||||
def get_roles(self):
|
||||
"""Get the list of roles permitted by this entity.
|
||||
|
||||
:rtype: list of strings
|
||||
:returns: The list of roles associated with this entity.
|
||||
"""
|
||||
return self.roles
|
||||
|
||||
def grant(self, role):
|
||||
"""Add a role to the entity.
|
||||
|
||||
:type role: string
|
||||
:param role: The role to add to the entity.
|
||||
"""
|
||||
self.roles.add(role)
|
||||
|
||||
def revoke(self, role):
|
||||
"""Remove a role from the entity.
|
||||
|
||||
:type role: string
|
||||
:param role: The role to remove from the entity.
|
||||
"""
|
||||
if role in self.roles:
|
||||
self.roles.remove(role)
|
||||
|
||||
def grant_read(self):
|
||||
"""Grant read access to the current entity."""
|
||||
self.grant(_ACLEntity.READER_ROLE)
|
||||
|
||||
def grant_write(self):
|
||||
"""Grant write access to the current entity."""
|
||||
self.grant(_ACLEntity.WRITER_ROLE)
|
||||
|
||||
def grant_owner(self):
|
||||
"""Grant owner access to the current entity."""
|
||||
self.grant(_ACLEntity.OWNER_ROLE)
|
||||
|
||||
def revoke_read(self):
|
||||
"""Revoke read access from the current entity."""
|
||||
self.revoke(_ACLEntity.READER_ROLE)
|
||||
|
||||
def revoke_write(self):
|
||||
"""Revoke write access from the current entity."""
|
||||
self.revoke(_ACLEntity.WRITER_ROLE)
|
||||
|
||||
def revoke_owner(self):
|
||||
"""Revoke owner access from the current entity."""
|
||||
self.revoke(_ACLEntity.OWNER_ROLE)
|
||||
|
||||
|
||||
class ACL(object):
|
||||
"""Container class representing a list of access controls."""
|
||||
|
||||
_URL_PATH_ELEM = 'acl'
|
||||
_PREDEFINED_QUERY_PARAM = 'predefinedAcl'
|
||||
|
||||
PREDEFINED_XML_ACLS = {
|
||||
# XML API name -> JSON API name
|
||||
'project-private': 'projectPrivate',
|
||||
'public-read': 'publicRead',
|
||||
'public-read-write': 'publicReadWrite',
|
||||
'authenticated-read': 'authenticatedRead',
|
||||
'bucket-owner-read': 'bucketOwnerRead',
|
||||
'bucket-owner-full-control': 'bucketOwnerFullControl',
|
||||
}
|
||||
|
||||
PREDEFINED_JSON_ACLS = frozenset([
|
||||
'private',
|
||||
'projectPrivate',
|
||||
'publicRead',
|
||||
'publicReadWrite',
|
||||
'authenticatedRead',
|
||||
'bucketOwnerRead',
|
||||
'bucketOwnerFullControl',
|
||||
])
|
||||
"""See:
|
||||
https://cloud.google.com/storage/docs/access-control#predefined-acl
|
||||
"""
|
||||
|
||||
loaded = False
|
||||
|
||||
# Subclasses must override to provide these attributes (typically,
|
||||
# as properties).
|
||||
reload_path = None
|
||||
save_path = None
|
||||
|
||||
def __init__(self):
|
||||
self.entities = {}
|
||||
|
||||
def _ensure_loaded(self):
|
||||
"""Load if not already loaded."""
|
||||
if not self.loaded:
|
||||
self.reload()
|
||||
|
||||
def reset(self):
|
||||
"""Remove all entities from the ACL, and clear the ``loaded`` flag."""
|
||||
self.entities.clear()
|
||||
self.loaded = False
|
||||
|
||||
def __iter__(self):
|
||||
self._ensure_loaded()
|
||||
|
||||
for entity in self.entities.values():
|
||||
for role in entity.get_roles():
|
||||
if role:
|
||||
yield {'entity': str(entity), 'role': role}
|
||||
|
||||
def entity_from_dict(self, entity_dict):
|
||||
"""Build an _ACLEntity object from a dictionary of data.
|
||||
|
||||
An entity is a mutable object that represents a list of roles
|
||||
belonging to either a user or group or the special types for all
|
||||
users and all authenticated users.
|
||||
|
||||
:type entity_dict: dict
|
||||
:param entity_dict: Dictionary full of data from an ACL lookup.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: An Entity constructed from the dictionary.
|
||||
"""
|
||||
entity = entity_dict['entity']
|
||||
role = entity_dict['role']
|
||||
|
||||
if entity == 'allUsers':
|
||||
entity = self.all()
|
||||
|
||||
elif entity == 'allAuthenticatedUsers':
|
||||
entity = self.all_authenticated()
|
||||
|
||||
elif '-' in entity:
|
||||
entity_type, identifier = entity.split('-', 1)
|
||||
entity = self.entity(entity_type=entity_type,
|
||||
identifier=identifier)
|
||||
|
||||
if not isinstance(entity, _ACLEntity):
|
||||
raise ValueError('Invalid dictionary: %s' % entity_dict)
|
||||
|
||||
entity.grant(role)
|
||||
return entity
|
||||
|
||||
def has_entity(self, entity):
|
||||
"""Returns whether or not this ACL has any entries for an entity.
|
||||
|
||||
:type entity: :class:`_ACLEntity`
|
||||
:param entity: The entity to check for existence in this ACL.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: True of the entity exists in the ACL.
|
||||
"""
|
||||
self._ensure_loaded()
|
||||
return str(entity) in self.entities
|
||||
|
||||
def get_entity(self, entity, default=None):
|
||||
"""Gets an entity object from the ACL.
|
||||
|
||||
:type entity: :class:`_ACLEntity` or string
|
||||
:param entity: The entity to get lookup in the ACL.
|
||||
|
||||
:type default: anything
|
||||
:param default: This value will be returned if the entity
|
||||
doesn't exist.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: The corresponding entity or the value provided
|
||||
to ``default``.
|
||||
"""
|
||||
self._ensure_loaded()
|
||||
return self.entities.get(str(entity), default)
|
||||
|
||||
def add_entity(self, entity):
|
||||
"""Add an entity to the ACL.
|
||||
|
||||
:type entity: :class:`_ACLEntity`
|
||||
:param entity: The entity to add to this ACL.
|
||||
"""
|
||||
self._ensure_loaded()
|
||||
self.entities[str(entity)] = entity
|
||||
|
||||
def entity(self, entity_type, identifier=None):
|
||||
"""Factory method for creating an Entity.
|
||||
|
||||
If an entity with the same type and identifier already exists,
|
||||
this will return a reference to that entity. If not, it will
|
||||
create a new one and add it to the list of known entities for
|
||||
this ACL.
|
||||
|
||||
:type entity_type: string
|
||||
:param entity_type: The type of entity to create
|
||||
(ie, ``user``, ``group``, etc)
|
||||
|
||||
:type identifier: string
|
||||
:param identifier: The ID of the entity (if applicable).
|
||||
This can be either an ID or an e-mail address.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: A new Entity or a reference to an existing identical entity.
|
||||
"""
|
||||
entity = _ACLEntity(entity_type=entity_type, identifier=identifier)
|
||||
if self.has_entity(entity):
|
||||
entity = self.get_entity(entity)
|
||||
else:
|
||||
self.add_entity(entity)
|
||||
return entity
|
||||
|
||||
def user(self, identifier):
|
||||
"""Factory method for a user Entity.
|
||||
|
||||
:type identifier: string
|
||||
:param identifier: An id or e-mail for this particular user.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: An Entity corresponding to this user.
|
||||
"""
|
||||
return self.entity('user', identifier=identifier)
|
||||
|
||||
def group(self, identifier):
|
||||
"""Factory method for a group Entity.
|
||||
|
||||
:type identifier: string
|
||||
:param identifier: An id or e-mail for this particular group.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: An Entity corresponding to this group.
|
||||
"""
|
||||
return self.entity('group', identifier=identifier)
|
||||
|
||||
def domain(self, domain):
|
||||
"""Factory method for a domain Entity.
|
||||
|
||||
:type domain: string
|
||||
:param domain: The domain for this entity.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: An entity corresponding to this domain.
|
||||
"""
|
||||
return self.entity('domain', identifier=domain)
|
||||
|
||||
def all(self):
|
||||
"""Factory method for an Entity representing all users.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: An entity representing all users.
|
||||
"""
|
||||
return self.entity('allUsers')
|
||||
|
||||
def all_authenticated(self):
|
||||
"""Factory method for an Entity representing all authenticated users.
|
||||
|
||||
:rtype: :class:`_ACLEntity`
|
||||
:returns: An entity representing all authenticated users.
|
||||
"""
|
||||
return self.entity('allAuthenticatedUsers')
|
||||
|
||||
def get_entities(self):
|
||||
"""Get a list of all Entity objects.
|
||||
|
||||
:rtype: list of :class:`_ACLEntity` objects
|
||||
:returns: A list of all Entity objects.
|
||||
"""
|
||||
self._ensure_loaded()
|
||||
return list(self.entities.values())
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
"""Abstract getter for the object client."""
|
||||
raise NotImplementedError
|
||||
|
||||
def _require_client(self, client):
|
||||
"""Check client or verify over-ride.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: the client to use. If not passed, falls back to the
|
||||
``client`` stored on the current ACL.
|
||||
|
||||
:rtype: :class:`gcloud.storage.client.Client`
|
||||
:returns: The client passed in or the currently bound client.
|
||||
"""
|
||||
if client is None:
|
||||
client = self.client
|
||||
return client
|
||||
|
||||
def reload(self, client=None):
|
||||
"""Reload the ACL data from Cloud Storage.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the ACL's parent.
|
||||
"""
|
||||
path = self.reload_path
|
||||
client = self._require_client(client)
|
||||
|
||||
self.entities.clear()
|
||||
|
||||
found = client.connection.api_request(method='GET', path=path)
|
||||
self.loaded = True
|
||||
for entry in found.get('items', ()):
|
||||
self.add_entity(self.entity_from_dict(entry))
|
||||
|
||||
def _save(self, acl, predefined, client):
|
||||
"""Helper for :meth:`save` and :meth:`save_predefined`.
|
||||
|
||||
:type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list.
|
||||
:param acl: The ACL object to save. If left blank, this will save
|
||||
current entries.
|
||||
|
||||
:type predefined: string or None
|
||||
:param predefined: An identifier for a predefined ACL. Must be one
|
||||
of the keys in :attr:`PREDEFINED_JSON_ACLS`
|
||||
If passed, `acl` must be None.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the ACL's parent.
|
||||
"""
|
||||
query_params = {'projection': 'full'}
|
||||
if predefined is not None:
|
||||
acl = []
|
||||
query_params[self._PREDEFINED_QUERY_PARAM] = predefined
|
||||
|
||||
path = self.save_path
|
||||
client = self._require_client(client)
|
||||
result = client.connection.api_request(
|
||||
method='PATCH',
|
||||
path=path,
|
||||
data={self._URL_PATH_ELEM: list(acl)},
|
||||
query_params=query_params)
|
||||
self.entities.clear()
|
||||
for entry in result.get(self._URL_PATH_ELEM, ()):
|
||||
self.add_entity(self.entity_from_dict(entry))
|
||||
self.loaded = True
|
||||
|
||||
def save(self, acl=None, client=None):
|
||||
"""Save this ACL for the current bucket.
|
||||
|
||||
:type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list.
|
||||
:param acl: The ACL object to save. If left blank, this will save
|
||||
current entries.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the ACL's parent.
|
||||
"""
|
||||
if acl is None:
|
||||
acl = self
|
||||
save_to_backend = acl.loaded
|
||||
else:
|
||||
save_to_backend = True
|
||||
|
||||
if save_to_backend:
|
||||
self._save(acl, None, client)
|
||||
|
||||
def save_predefined(self, predefined, client=None):
|
||||
"""Save this ACL for the current bucket using a predefined ACL.
|
||||
|
||||
:type predefined: string
|
||||
:param predefined: An identifier for a predefined ACL. Must be one
|
||||
of the keys in :attr:`PREDEFINED_JSON_ACLS`
|
||||
or :attr:`PREDEFINED_XML_ACLS` (which will be
|
||||
aliased to the corresponding JSON name).
|
||||
If passed, `acl` must be None.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the ACL's parent.
|
||||
"""
|
||||
predefined = self.PREDEFINED_XML_ACLS.get(predefined, predefined)
|
||||
|
||||
if predefined not in self.PREDEFINED_JSON_ACLS:
|
||||
raise ValueError("Invalid predefined ACL: %s" % (predefined,))
|
||||
|
||||
self._save(None, predefined, client)
|
||||
|
||||
def clear(self, client=None):
|
||||
"""Remove all ACL entries.
|
||||
|
||||
Note that this won't actually remove *ALL* the rules, but it
|
||||
will remove all the non-default rules. In short, you'll still
|
||||
have access to a bucket that you created even after you clear
|
||||
ACL rules with this method.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the ACL's parent.
|
||||
"""
|
||||
self.save([], client=client)
|
||||
|
||||
|
||||
class BucketACL(ACL):
|
||||
"""An ACL specifically for a bucket.
|
||||
|
||||
:type bucket: :class:`gcloud.storage.bucket.Bucket`
|
||||
:param bucket: The bucket to which this ACL relates.
|
||||
"""
|
||||
|
||||
def __init__(self, bucket):
|
||||
super(BucketACL, self).__init__()
|
||||
self.bucket = bucket
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
"""The client bound to this ACL's bucket."""
|
||||
return self.bucket.client
|
||||
|
||||
@property
|
||||
def reload_path(self):
|
||||
"""Compute the path for GET API requests for this ACL."""
|
||||
return '%s/%s' % (self.bucket.path, self._URL_PATH_ELEM)
|
||||
|
||||
@property
|
||||
def save_path(self):
|
||||
"""Compute the path for PATCH API requests for this ACL."""
|
||||
return self.bucket.path
|
||||
|
||||
|
||||
class DefaultObjectACL(BucketACL):
|
||||
"""A class representing the default object ACL for a bucket."""
|
||||
|
||||
_URL_PATH_ELEM = 'defaultObjectAcl'
|
||||
_PREDEFINED_QUERY_PARAM = 'predefinedDefaultObjectAcl'
|
||||
|
||||
|
||||
class ObjectACL(ACL):
|
||||
"""An ACL specifically for a Cloud Storage object / blob.
|
||||
|
||||
:type blob: :class:`gcloud.storage.blob.Blob`
|
||||
:param blob: The blob that this ACL corresponds to.
|
||||
"""
|
||||
|
||||
def __init__(self, blob):
|
||||
super(ObjectACL, self).__init__()
|
||||
self.blob = blob
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
"""The client bound to this ACL's blob."""
|
||||
return self.blob.client
|
||||
|
||||
@property
|
||||
def reload_path(self):
|
||||
"""Compute the path for GET API requests for this ACL."""
|
||||
return '%s/acl' % self.blob.path
|
||||
|
||||
@property
|
||||
def save_path(self):
|
||||
"""Compute the path for PATCH API requests for this ACL."""
|
||||
return self.blob.path
|
327
venv/Lib/site-packages/gcloud/storage/batch.py
Normal file
327
venv/Lib/site-packages/gcloud/storage/batch.py
Normal file
|
@ -0,0 +1,327 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Batch updates / deletes of storage buckets / blobs.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch
|
||||
"""
|
||||
from email.encoders import encode_noop
|
||||
from email.generator import Generator
|
||||
from email.mime.application import MIMEApplication
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.parser import Parser
|
||||
import io
|
||||
import json
|
||||
|
||||
import httplib2
|
||||
import six
|
||||
|
||||
from gcloud.exceptions import make_exception
|
||||
from gcloud.storage.connection import Connection
|
||||
|
||||
|
||||
class MIMEApplicationHTTP(MIMEApplication):
|
||||
"""MIME type for ``application/http``.
|
||||
|
||||
Constructs payload from headers and body
|
||||
|
||||
:type method: str
|
||||
:param method: HTTP method
|
||||
|
||||
:type uri: str
|
||||
:param uri: URI for HTTP request
|
||||
|
||||
:type headers: dict
|
||||
:param headers: HTTP headers
|
||||
|
||||
:type body: str or None
|
||||
:param body: HTTP payload
|
||||
|
||||
"""
|
||||
def __init__(self, method, uri, headers, body):
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
headers['Content-Type'] = 'application/json'
|
||||
headers['Content-Length'] = len(body)
|
||||
if body is None:
|
||||
body = ''
|
||||
lines = ['%s %s HTTP/1.1' % (method, uri)]
|
||||
lines.extend(['%s: %s' % (key, value)
|
||||
for key, value in sorted(headers.items())])
|
||||
lines.append('')
|
||||
lines.append(body)
|
||||
payload = '\r\n'.join(lines)
|
||||
if six.PY2:
|
||||
# email.message.Message is an old-style class, so we
|
||||
# cannot use 'super()'.
|
||||
MIMEApplication.__init__(self, payload, 'http', encode_noop)
|
||||
else: # pragma: NO COVER Python3
|
||||
super_init = super(MIMEApplicationHTTP, self).__init__
|
||||
super_init(payload, 'http', encode_noop)
|
||||
|
||||
|
||||
class NoContent(object):
|
||||
"""Emulate an HTTP '204 No Content' response."""
|
||||
status = 204
|
||||
|
||||
|
||||
class _FutureDict(object):
|
||||
"""Class to hold a future value for a deferred request.
|
||||
|
||||
Used by for requests that get sent in a :class:`Batch`.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get(key, default=None):
|
||||
"""Stand-in for dict.get.
|
||||
|
||||
:type key: object
|
||||
:param key: Hashable dictionary key.
|
||||
|
||||
:type default: object
|
||||
:param default: Fallback value to dict.get.
|
||||
|
||||
:raises: :class:`KeyError` always since the future is intended to fail
|
||||
as a dictionary.
|
||||
"""
|
||||
raise KeyError('Cannot get(%r, default=%r) on a future' % (
|
||||
key, default))
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Stand-in for dict[key].
|
||||
|
||||
:type key: object
|
||||
:param key: Hashable dictionary key.
|
||||
|
||||
:raises: :class:`KeyError` always since the future is intended to fail
|
||||
as a dictionary.
|
||||
"""
|
||||
raise KeyError('Cannot get item %r from a future' % (key,))
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Stand-in for dict[key] = value.
|
||||
|
||||
:type key: object
|
||||
:param key: Hashable dictionary key.
|
||||
|
||||
:type value: object
|
||||
:param value: Dictionary value.
|
||||
|
||||
:raises: :class:`KeyError` always since the future is intended to fail
|
||||
as a dictionary.
|
||||
"""
|
||||
raise KeyError('Cannot set %r -> %r on a future' % (key, value))
|
||||
|
||||
|
||||
class Batch(Connection):
|
||||
"""Proxy an underlying connection, batching up change operations.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client`
|
||||
:param client: The client to use for making connections.
|
||||
"""
|
||||
_MAX_BATCH_SIZE = 1000
|
||||
|
||||
def __init__(self, client):
|
||||
super(Batch, self).__init__()
|
||||
self._client = client
|
||||
self._requests = []
|
||||
self._target_objects = []
|
||||
|
||||
def _do_request(self, method, url, headers, data, target_object):
|
||||
"""Override Connection: defer actual HTTP request.
|
||||
|
||||
Only allow up to ``_MAX_BATCH_SIZE`` requests to be deferred.
|
||||
|
||||
:type method: str
|
||||
:param method: The HTTP method to use in the request.
|
||||
|
||||
:type url: str
|
||||
:param url: The URL to send the request to.
|
||||
|
||||
:type headers: dict
|
||||
:param headers: A dictionary of HTTP headers to send with the request.
|
||||
|
||||
:type data: str
|
||||
:param data: The data to send as the body of the request.
|
||||
|
||||
:type target_object: object or :class:`NoneType`
|
||||
:param target_object: This allows us to enable custom behavior in our
|
||||
batch connection. Here we defer an HTTP request
|
||||
and complete initialization of the object at a
|
||||
later time.
|
||||
|
||||
:rtype: tuple of ``response`` (a dictionary of sorts)
|
||||
and ``content`` (a string).
|
||||
:returns: The HTTP response object and the content of the response.
|
||||
"""
|
||||
if len(self._requests) >= self._MAX_BATCH_SIZE:
|
||||
raise ValueError("Too many deferred requests (max %d)" %
|
||||
self._MAX_BATCH_SIZE)
|
||||
self._requests.append((method, url, headers, data))
|
||||
result = _FutureDict()
|
||||
self._target_objects.append(target_object)
|
||||
if target_object is not None:
|
||||
target_object._properties = result
|
||||
return NoContent(), result
|
||||
|
||||
def _prepare_batch_request(self):
|
||||
"""Prepares headers and body for a batch request.
|
||||
|
||||
:rtype: tuple (dict, str)
|
||||
:returns: The pair of headers and body of the batch request to be sent.
|
||||
:raises: :class:`ValueError` if no requests have been deferred.
|
||||
"""
|
||||
if len(self._requests) == 0:
|
||||
raise ValueError("No deferred requests")
|
||||
|
||||
multi = MIMEMultipart()
|
||||
|
||||
for method, uri, headers, body in self._requests:
|
||||
subrequest = MIMEApplicationHTTP(method, uri, headers, body)
|
||||
multi.attach(subrequest)
|
||||
|
||||
# The `email` package expects to deal with "native" strings
|
||||
if six.PY3: # pragma: NO COVER Python3
|
||||
buf = io.StringIO()
|
||||
else:
|
||||
buf = io.BytesIO()
|
||||
generator = Generator(buf, False, 0)
|
||||
generator.flatten(multi)
|
||||
payload = buf.getvalue()
|
||||
|
||||
# Strip off redundant header text
|
||||
_, body = payload.split('\n\n', 1)
|
||||
return dict(multi._headers), body
|
||||
|
||||
def _finish_futures(self, responses):
|
||||
"""Apply all the batch responses to the futures created.
|
||||
|
||||
:type responses: list of (headers, payload) tuples.
|
||||
:param responses: List of headers and payloads from each response in
|
||||
the batch.
|
||||
|
||||
:raises: :class:`ValueError` if no requests have been deferred.
|
||||
"""
|
||||
# If a bad status occurs, we track it, but don't raise an exception
|
||||
# until all futures have been populated.
|
||||
exception_args = None
|
||||
|
||||
if len(self._target_objects) != len(responses):
|
||||
raise ValueError('Expected a response for every request.')
|
||||
|
||||
for target_object, sub_response in zip(self._target_objects,
|
||||
responses):
|
||||
resp_headers, sub_payload = sub_response
|
||||
if not 200 <= resp_headers.status < 300:
|
||||
exception_args = exception_args or (resp_headers,
|
||||
sub_payload)
|
||||
elif target_object is not None:
|
||||
target_object._properties = sub_payload
|
||||
|
||||
if exception_args is not None:
|
||||
raise make_exception(*exception_args)
|
||||
|
||||
def finish(self):
|
||||
"""Submit a single `multipart/mixed` request w/ deferred requests.
|
||||
|
||||
:rtype: list of tuples
|
||||
:returns: one ``(headers, payload)`` tuple per deferred request.
|
||||
"""
|
||||
headers, body = self._prepare_batch_request()
|
||||
|
||||
url = '%s/batch' % self.API_BASE_URL
|
||||
|
||||
# Use the private ``_connection`` rather than the public
|
||||
# ``.connection``, since the public connection may be this
|
||||
# current batch.
|
||||
response, content = self._client._connection._make_request(
|
||||
'POST', url, data=body, headers=headers)
|
||||
responses = list(_unpack_batch_response(response, content))
|
||||
self._finish_futures(responses)
|
||||
return responses
|
||||
|
||||
def current(self):
|
||||
"""Return the topmost batch, or None."""
|
||||
return self._client.current_batch
|
||||
|
||||
def __enter__(self):
|
||||
self._client._push_batch(self)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
try:
|
||||
if exc_type is None:
|
||||
self.finish()
|
||||
finally:
|
||||
self._client._pop_batch()
|
||||
|
||||
|
||||
def _generate_faux_mime_message(parser, response, content):
|
||||
"""Convert response, content -> (multipart) email.message.
|
||||
|
||||
Helper for _unpack_batch_response.
|
||||
"""
|
||||
# We coerce to bytes to get consitent concat across
|
||||
# Py2 and Py3. Percent formatting is insufficient since
|
||||
# it includes the b in Py3.
|
||||
if not isinstance(content, six.binary_type):
|
||||
content = content.encode('utf-8')
|
||||
content_type = response['content-type']
|
||||
if not isinstance(content_type, six.binary_type):
|
||||
content_type = content_type.encode('utf-8')
|
||||
faux_message = b''.join([
|
||||
b'Content-Type: ',
|
||||
content_type,
|
||||
b'\nMIME-Version: 1.0\n\n',
|
||||
content,
|
||||
])
|
||||
|
||||
if six.PY2:
|
||||
return parser.parsestr(faux_message)
|
||||
else: # pragma: NO COVER Python3
|
||||
return parser.parsestr(faux_message.decode('utf-8'))
|
||||
|
||||
|
||||
def _unpack_batch_response(response, content):
|
||||
"""Convert response, content -> [(headers, payload)].
|
||||
|
||||
Creates a generator of tuples of emulating the responses to
|
||||
:meth:`httplib2.Http.request` (a pair of headers and payload).
|
||||
|
||||
:type response: :class:`httplib2.Response`
|
||||
:param response: HTTP response / headers from a request.
|
||||
|
||||
:type content: str
|
||||
:param content: Response payload with a batch response.
|
||||
|
||||
:rtype: generator
|
||||
:returns: A generator of header, payload pairs.
|
||||
"""
|
||||
parser = Parser()
|
||||
message = _generate_faux_mime_message(parser, response, content)
|
||||
|
||||
if not isinstance(message._payload, list):
|
||||
raise ValueError('Bad response: not multi-part')
|
||||
|
||||
for subrequest in message._payload:
|
||||
status_line, rest = subrequest._payload.split('\n', 1)
|
||||
_, status, _ = status_line.split(' ', 2)
|
||||
sub_message = parser.parsestr(rest)
|
||||
payload = sub_message._payload
|
||||
ctype = sub_message['Content-Type']
|
||||
msg_headers = dict(sub_message._headers)
|
||||
msg_headers['status'] = status
|
||||
headers = httplib2.Response(msg_headers)
|
||||
if ctype and ctype.startswith('application/json'):
|
||||
payload = json.loads(payload)
|
||||
yield headers, payload
|
937
venv/Lib/site-packages/gcloud/storage/blob.py
Normal file
937
venv/Lib/site-packages/gcloud/storage/blob.py
Normal file
|
@ -0,0 +1,937 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with Google Cloud Storage blobs."""
|
||||
|
||||
import base64
|
||||
import copy
|
||||
import hashlib
|
||||
from io import BytesIO
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import time
|
||||
|
||||
import httplib2
|
||||
import six
|
||||
from six.moves.urllib.parse import quote
|
||||
|
||||
from gcloud._helpers import _rfc3339_to_datetime
|
||||
from gcloud._helpers import _to_bytes
|
||||
from gcloud._helpers import _bytes_to_unicode
|
||||
from gcloud.credentials import generate_signed_url
|
||||
from gcloud.exceptions import NotFound
|
||||
from gcloud.exceptions import make_exception
|
||||
from gcloud.storage._helpers import _PropertyMixin
|
||||
from gcloud.storage._helpers import _scalar_property
|
||||
from gcloud.storage.acl import ObjectACL
|
||||
from gcloud.streaming.http_wrapper import Request
|
||||
from gcloud.streaming.http_wrapper import make_api_request
|
||||
from gcloud.streaming.transfer import Download
|
||||
from gcloud.streaming.transfer import RESUMABLE_UPLOAD
|
||||
from gcloud.streaming.transfer import Upload
|
||||
|
||||
|
||||
_API_ACCESS_ENDPOINT = 'https://storage.googleapis.com'
|
||||
|
||||
|
||||
class Blob(_PropertyMixin):
|
||||
"""A wrapper around Cloud Storage's concept of an ``Object``.
|
||||
|
||||
:type name: string
|
||||
:param name: The name of the blob. This corresponds to the
|
||||
unique path of the object in the bucket.
|
||||
|
||||
:type bucket: :class:`gcloud.storage.bucket.Bucket`
|
||||
:param bucket: The bucket to which this blob belongs.
|
||||
|
||||
:type chunk_size: integer
|
||||
:param chunk_size: The size of a chunk of data whenever iterating (1 MB).
|
||||
This must be a multiple of 256 KB per the API
|
||||
specification.
|
||||
"""
|
||||
|
||||
_chunk_size = None # Default value for each instance.
|
||||
|
||||
_CHUNK_SIZE_MULTIPLE = 256 * 1024
|
||||
"""Number (256 KB, in bytes) that must divide the chunk size."""
|
||||
|
||||
def __init__(self, name, bucket, chunk_size=None):
|
||||
super(Blob, self).__init__(name=name)
|
||||
|
||||
self.chunk_size = chunk_size # Check that setter accepts value.
|
||||
self.bucket = bucket
|
||||
self._acl = ObjectACL(self)
|
||||
|
||||
@property
|
||||
def chunk_size(self):
|
||||
"""Get the blob's default chunk size.
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The current blob's chunk size, if it is set.
|
||||
"""
|
||||
return self._chunk_size
|
||||
|
||||
@chunk_size.setter
|
||||
def chunk_size(self, value):
|
||||
"""Set the blob's default chunk size.
|
||||
|
||||
:type value: integer or ``NoneType``
|
||||
:param value: The current blob's chunk size, if it is set.
|
||||
|
||||
:raises: :class:`ValueError` if ``value`` is not ``None`` and is not a
|
||||
multiple of 256 KB.
|
||||
"""
|
||||
if value is not None and value % self._CHUNK_SIZE_MULTIPLE != 0:
|
||||
raise ValueError('Chunk size must be a multiple of %d.' % (
|
||||
self._CHUNK_SIZE_MULTIPLE,))
|
||||
self._chunk_size = value
|
||||
|
||||
@staticmethod
|
||||
def path_helper(bucket_path, blob_name):
|
||||
"""Relative URL path for a blob.
|
||||
|
||||
:type bucket_path: string
|
||||
:param bucket_path: The URL path for a bucket.
|
||||
|
||||
:type blob_name: string
|
||||
:param blob_name: The name of the blob.
|
||||
|
||||
:rtype: string
|
||||
:returns: The relative URL path for ``blob_name``.
|
||||
"""
|
||||
return bucket_path + '/o/' + quote(blob_name, safe='')
|
||||
|
||||
@property
|
||||
def acl(self):
|
||||
"""Create our ACL on demand."""
|
||||
return self._acl
|
||||
|
||||
def __repr__(self):
|
||||
if self.bucket:
|
||||
bucket_name = self.bucket.name
|
||||
else:
|
||||
bucket_name = None
|
||||
|
||||
return '<Blob: %s, %s>' % (bucket_name, self.name)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""Getter property for the URL path to this Blob.
|
||||
|
||||
:rtype: string
|
||||
:returns: The URL path to this Blob.
|
||||
"""
|
||||
if not self.name:
|
||||
raise ValueError('Cannot determine path without a blob name.')
|
||||
|
||||
return self.path_helper(self.bucket.path, self.name)
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
"""The client bound to this blob."""
|
||||
return self.bucket.client
|
||||
|
||||
@property
|
||||
def public_url(self):
|
||||
"""The public URL for this blob's object.
|
||||
|
||||
:rtype: `string`
|
||||
:returns: The public URL for this blob.
|
||||
"""
|
||||
return '{storage_base_url}/{bucket_name}/{quoted_name}'.format(
|
||||
storage_base_url='https://storage.googleapis.com',
|
||||
bucket_name=self.bucket.name,
|
||||
quoted_name=quote(self.name, safe=''))
|
||||
|
||||
def generate_signed_url(self, expiration, method='GET',
|
||||
content_type=None,
|
||||
generation=None, response_disposition=None,
|
||||
response_type=None, client=None, credentials=None):
|
||||
"""Generates a signed URL for this blob.
|
||||
|
||||
.. note::
|
||||
|
||||
If you are on Google Compute Engine, you can't generate a signed
|
||||
URL. Follow `Issue 922`_ for updates on this. If you'd like to
|
||||
be able to generate a signed URL from GCE, you can use a standard
|
||||
service account from a JSON file rather than a GCE service account.
|
||||
|
||||
.. _Issue 922: https://github.com/GoogleCloudPlatform/\
|
||||
gcloud-python/issues/922
|
||||
|
||||
If you have a blob that you want to allow access to for a set
|
||||
amount of time, you can use this method to generate a URL that
|
||||
is only valid within a certain time period.
|
||||
|
||||
This is particularly useful if you don't want publicly
|
||||
accessible blobs, but don't want to require users to explicitly
|
||||
log in.
|
||||
|
||||
:type expiration: int, long, datetime.datetime, datetime.timedelta
|
||||
:param expiration: When the signed URL should expire.
|
||||
|
||||
:type method: str
|
||||
:param method: The HTTP verb that will be used when requesting the URL.
|
||||
|
||||
:type content_type: str
|
||||
:param content_type: (Optional) The content type of the object
|
||||
referenced by ``resource``.
|
||||
|
||||
:type generation: str
|
||||
:param generation: (Optional) A value that indicates which generation
|
||||
of the resource to fetch.
|
||||
|
||||
:type response_disposition: str
|
||||
:param response_disposition: (Optional) Content disposition of
|
||||
responses to requests for the signed URL.
|
||||
For example, to enable the signed URL
|
||||
to initiate a file of ``blog.png``, use
|
||||
the value
|
||||
``'attachment; filename=blob.png'``.
|
||||
|
||||
:type response_type: str
|
||||
:param response_type: (Optional) Content type of responses to requests
|
||||
for the signed URL. Used to over-ride the content
|
||||
type of the underlying blob/object.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: (Optional) The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
|
||||
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
|
||||
:class:`NoneType`
|
||||
:param credentials: (Optional) The OAuth2 credentials to use to sign
|
||||
the URL. Defaults to the credentials stored on the
|
||||
client used.
|
||||
|
||||
:rtype: str
|
||||
:returns: A signed URL you can use to access the resource
|
||||
until expiration.
|
||||
"""
|
||||
resource = '/{bucket_name}/{quoted_name}'.format(
|
||||
bucket_name=self.bucket.name,
|
||||
quoted_name=quote(self.name, safe=''))
|
||||
|
||||
if credentials is None:
|
||||
client = self._require_client(client)
|
||||
credentials = client._connection.credentials
|
||||
|
||||
return generate_signed_url(
|
||||
credentials, resource=resource,
|
||||
api_access_endpoint=_API_ACCESS_ENDPOINT,
|
||||
expiration=expiration, method=method,
|
||||
content_type=content_type,
|
||||
response_type=response_type,
|
||||
response_disposition=response_disposition,
|
||||
generation=generation)
|
||||
|
||||
def exists(self, client=None):
|
||||
"""Determines whether or not this blob exists.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: True if the blob exists in Cloud Storage.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
try:
|
||||
# We only need the status code (200 or not) so we seek to
|
||||
# minimize the returned payload.
|
||||
query_params = {'fields': 'name'}
|
||||
# We intentionally pass `_target_object=None` since fields=name
|
||||
# would limit the local properties.
|
||||
client.connection.api_request(method='GET', path=self.path,
|
||||
query_params=query_params,
|
||||
_target_object=None)
|
||||
# NOTE: This will not fail immediately in a batch. However, when
|
||||
# Batch.finish() is called, the resulting `NotFound` will be
|
||||
# raised.
|
||||
return True
|
||||
except NotFound:
|
||||
return False
|
||||
|
||||
def delete(self, client=None):
|
||||
"""Deletes a blob from Cloud Storage.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
:rtype: :class:`Blob`
|
||||
:returns: The blob that was just deleted.
|
||||
:raises: :class:`gcloud.exceptions.NotFound`
|
||||
(propagated from
|
||||
:meth:`gcloud.storage.bucket.Bucket.delete_blob`).
|
||||
"""
|
||||
return self.bucket.delete_blob(self.name, client=client)
|
||||
|
||||
def download_to_file(self, file_obj, encryption_key=None, client=None):
|
||||
"""Download the contents of this blob into a file-like object.
|
||||
|
||||
.. note::
|
||||
|
||||
If the server-set property, :attr:`media_link`, is not yet
|
||||
initialized, makes an additional API request to load it.
|
||||
|
||||
Downloading a file that has been encrypted with a `customer-supplied`_
|
||||
encryption key::
|
||||
|
||||
>>> from gcloud import storage
|
||||
>>> from gcloud.storage import Blob
|
||||
|
||||
>>> client = storage.Client(project='my-project')
|
||||
>>> bucket = client.get_bucket('my-bucket')
|
||||
>>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
|
||||
>>> blob = Blob('secure-data', bucket)
|
||||
>>> with open('/tmp/my-secure-file', 'wb') as file_obj:
|
||||
>>> blob.download_to_file(file_obj,
|
||||
... encryption_key=encryption_key)
|
||||
|
||||
The ``encryption_key`` should be a str or bytes with a length of at
|
||||
least 32.
|
||||
|
||||
.. _customer-supplied: https://cloud.google.com/storage/docs/\
|
||||
encryption#customer-supplied
|
||||
|
||||
:type file_obj: file
|
||||
:param file_obj: A file handle to which to write the blob's data.
|
||||
|
||||
:type encryption_key: str or bytes
|
||||
:param encryption_key: Optional 32 byte encryption key for
|
||||
customer-supplied encryption.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
:raises: :class:`gcloud.exceptions.NotFound`
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
if self.media_link is None: # not yet loaded
|
||||
self.reload()
|
||||
|
||||
download_url = self.media_link
|
||||
|
||||
# Use apitools 'Download' facility.
|
||||
download = Download.from_stream(file_obj)
|
||||
|
||||
if self.chunk_size is not None:
|
||||
download.chunksize = self.chunk_size
|
||||
|
||||
headers = {}
|
||||
if encryption_key:
|
||||
_set_encryption_headers(encryption_key, headers)
|
||||
|
||||
request = Request(download_url, 'GET', headers)
|
||||
|
||||
# Use the private ``_connection`` rather than the public
|
||||
# ``.connection``, since the public connection may be a batch. A
|
||||
# batch wraps a client's connection, but does not store the `http`
|
||||
# object. The rest (API_BASE_URL and build_api_url) are also defined
|
||||
# on the Batch class, but we just use the wrapped connection since
|
||||
# it has all three (http, API_BASE_URL and build_api_url).
|
||||
download.initialize_download(request, client._connection.http)
|
||||
|
||||
def download_to_filename(self, filename, encryption_key=None, client=None):
|
||||
"""Download the contents of this blob into a named file.
|
||||
|
||||
:type filename: string
|
||||
:param filename: A filename to be passed to ``open``.
|
||||
|
||||
:type encryption_key: str or bytes
|
||||
:param encryption_key: Optional 32 byte encryption key for
|
||||
customer-supplied encryption.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
:raises: :class:`gcloud.exceptions.NotFound`
|
||||
"""
|
||||
with open(filename, 'wb') as file_obj:
|
||||
self.download_to_file(file_obj, encryption_key=encryption_key,
|
||||
client=client)
|
||||
|
||||
mtime = time.mktime(self.updated.timetuple())
|
||||
os.utime(file_obj.name, (mtime, mtime))
|
||||
|
||||
def download_as_string(self, encryption_key=None, client=None):
|
||||
"""Download the contents of this blob as a string.
|
||||
|
||||
:type encryption_key: str or bytes
|
||||
:param encryption_key: Optional 32 byte encryption key for
|
||||
customer-supplied encryption.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
:rtype: bytes
|
||||
:returns: The data stored in this blob.
|
||||
:raises: :class:`gcloud.exceptions.NotFound`
|
||||
"""
|
||||
string_buffer = BytesIO()
|
||||
self.download_to_file(string_buffer, encryption_key=encryption_key,
|
||||
client=client)
|
||||
return string_buffer.getvalue()
|
||||
|
||||
@staticmethod
|
||||
def _check_response_error(request, http_response):
|
||||
"""Helper for :meth:`upload_from_file`."""
|
||||
info = http_response.info
|
||||
status = int(info['status'])
|
||||
if not 200 <= status < 300:
|
||||
faux_response = httplib2.Response({'status': status})
|
||||
raise make_exception(faux_response, http_response.content,
|
||||
error_info=request.url)
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def upload_from_file(self, file_obj, rewind=False, size=None,
|
||||
encryption_key=None, content_type=None, num_retries=6,
|
||||
client=None):
|
||||
"""Upload the contents of this blob from a file-like object.
|
||||
|
||||
The content type of the upload will either be
|
||||
- The value passed in to the function (if any)
|
||||
- The value stored on the current blob
|
||||
- The default value of 'application/octet-stream'
|
||||
|
||||
.. note::
|
||||
The effect of uploading to an existing blob depends on the
|
||||
"versioning" and "lifecycle" policies defined on the blob's
|
||||
bucket. In the absence of those policies, upload will
|
||||
overwrite any existing contents.
|
||||
|
||||
See the `object versioning
|
||||
<https://cloud.google.com/storage/docs/object-versioning>`_ and
|
||||
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
|
||||
API documents for details.
|
||||
|
||||
Uploading a file with a `customer-supplied`_ encryption key::
|
||||
|
||||
>>> from gcloud import storage
|
||||
>>> from gcloud.storage import Blob
|
||||
|
||||
>>> client = storage.Client(project='my-project')
|
||||
>>> bucket = client.get_bucket('my-bucket')
|
||||
>>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
|
||||
>>> blob = Blob('secure-data', bucket)
|
||||
>>> with open('my-file', 'rb') as my_file:
|
||||
>>> blob.upload_from_file(my_file,
|
||||
... encryption_key=encryption_key)
|
||||
|
||||
The ``encryption_key`` should be a str or bytes with a length of at
|
||||
least 32.
|
||||
|
||||
.. _customer-supplied: https://cloud.google.com/storage/docs/\
|
||||
encryption#customer-supplied
|
||||
|
||||
:type file_obj: file
|
||||
:param file_obj: A file handle open for reading.
|
||||
|
||||
:type rewind: boolean
|
||||
:param rewind: If True, seek to the beginning of the file handle before
|
||||
writing the file to Cloud Storage.
|
||||
|
||||
:type size: int
|
||||
:param size: The number of bytes to read from the file handle.
|
||||
If not provided, we'll try to guess the size using
|
||||
:func:`os.fstat`. (If the file handle is not from the
|
||||
filesystem this won't be possible.)
|
||||
|
||||
:type encryption_key: str or bytes
|
||||
:param encryption_key: Optional 32 byte encryption key for
|
||||
customer-supplied encryption.
|
||||
|
||||
:type content_type: string or ``NoneType``
|
||||
:param content_type: Optional type of content being uploaded.
|
||||
|
||||
:type num_retries: integer
|
||||
:param num_retries: Number of upload retries. Defaults to 6.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
|
||||
:raises: :class:`ValueError` if size is not passed in and can not be
|
||||
determined; :class:`gcloud.exceptions.GCloudError` if the
|
||||
upload response returns an error status.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
# Use the private ``_connection`` rather than the public
|
||||
# ``.connection``, since the public connection may be a batch. A
|
||||
# batch wraps a client's connection, but does not store the `http`
|
||||
# object. The rest (API_BASE_URL and build_api_url) are also defined
|
||||
# on the Batch class, but we just use the wrapped connection since
|
||||
# it has all three (http, API_BASE_URL and build_api_url).
|
||||
connection = client._connection
|
||||
content_type = (content_type or self._properties.get('contentType') or
|
||||
'application/octet-stream')
|
||||
|
||||
# Rewind the file if desired.
|
||||
if rewind:
|
||||
file_obj.seek(0, os.SEEK_SET)
|
||||
|
||||
# Get the basic stats about the file.
|
||||
total_bytes = size
|
||||
if total_bytes is None:
|
||||
if hasattr(file_obj, 'fileno'):
|
||||
total_bytes = os.fstat(file_obj.fileno()).st_size
|
||||
else:
|
||||
raise ValueError('total bytes could not be determined. Please '
|
||||
'pass an explicit size.')
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'gzip, deflate',
|
||||
'User-Agent': connection.USER_AGENT,
|
||||
}
|
||||
|
||||
if encryption_key:
|
||||
_set_encryption_headers(encryption_key, headers)
|
||||
|
||||
upload = Upload(file_obj, content_type, total_bytes,
|
||||
auto_transfer=False)
|
||||
|
||||
if self.chunk_size is not None:
|
||||
upload.chunksize = self.chunk_size
|
||||
|
||||
url_builder = _UrlBuilder(bucket_name=self.bucket.name,
|
||||
object_name=self.name)
|
||||
upload_config = _UploadConfig()
|
||||
|
||||
# Temporary URL, until we know simple vs. resumable.
|
||||
base_url = connection.API_BASE_URL + '/upload'
|
||||
upload_url = connection.build_api_url(api_base_url=base_url,
|
||||
path=self.bucket.path + '/o')
|
||||
|
||||
# Use apitools 'Upload' facility.
|
||||
request = Request(upload_url, 'POST', headers)
|
||||
|
||||
upload.configure_request(upload_config, request, url_builder)
|
||||
query_params = url_builder.query_params
|
||||
base_url = connection.API_BASE_URL + '/upload'
|
||||
request.url = connection.build_api_url(api_base_url=base_url,
|
||||
path=self.bucket.path + '/o',
|
||||
query_params=query_params)
|
||||
upload.initialize_upload(request, connection.http)
|
||||
|
||||
if upload.strategy == RESUMABLE_UPLOAD:
|
||||
http_response = upload.stream_file(use_chunks=True)
|
||||
else:
|
||||
http_response = make_api_request(connection.http, request,
|
||||
retries=num_retries)
|
||||
|
||||
self._check_response_error(request, http_response)
|
||||
response_content = http_response.content
|
||||
|
||||
if not isinstance(response_content,
|
||||
six.string_types): # pragma: NO COVER Python3
|
||||
response_content = response_content.decode('utf-8')
|
||||
self._set_properties(json.loads(response_content))
|
||||
# pylint: enable=too-many-locals
|
||||
|
||||
def upload_from_filename(self, filename, content_type=None,
|
||||
encryption_key=None, client=None):
|
||||
"""Upload this blob's contents from the content of a named file.
|
||||
|
||||
The content type of the upload will either be
|
||||
- The value passed in to the function (if any)
|
||||
- The value stored on the current blob
|
||||
- The value given by mimetypes.guess_type
|
||||
|
||||
.. note::
|
||||
The effect of uploading to an existing blob depends on the
|
||||
"versioning" and "lifecycle" policies defined on the blob's
|
||||
bucket. In the absence of those policies, upload will
|
||||
overwrite any existing contents.
|
||||
|
||||
See the `object versioning
|
||||
<https://cloud.google.com/storage/docs/object-versioning>`_ and
|
||||
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
|
||||
API documents for details.
|
||||
|
||||
:type filename: string
|
||||
:param filename: The path to the file.
|
||||
|
||||
:type content_type: string or ``NoneType``
|
||||
:param content_type: Optional type of content being uploaded.
|
||||
|
||||
:type encryption_key: str or bytes
|
||||
:param encryption_key: Optional 32 byte encryption key for
|
||||
customer-supplied encryption.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
"""
|
||||
content_type = content_type or self._properties.get('contentType')
|
||||
if content_type is None:
|
||||
content_type, _ = mimetypes.guess_type(filename)
|
||||
|
||||
with open(filename, 'rb') as file_obj:
|
||||
self.upload_from_file(file_obj, content_type=content_type,
|
||||
encryption_key=encryption_key, client=client)
|
||||
|
||||
def upload_from_string(self, data, content_type='text/plain',
|
||||
encryption_key=None, client=None):
|
||||
"""Upload contents of this blob from the provided string.
|
||||
|
||||
.. note::
|
||||
The effect of uploading to an existing blob depends on the
|
||||
"versioning" and "lifecycle" policies defined on the blob's
|
||||
bucket. In the absence of those policies, upload will
|
||||
overwrite any existing contents.
|
||||
|
||||
See the `object versioning
|
||||
<https://cloud.google.com/storage/docs/object-versioning>`_ and
|
||||
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
|
||||
API documents for details.
|
||||
|
||||
:type data: bytes or text
|
||||
:param data: The data to store in this blob. If the value is
|
||||
text, it will be encoded as UTF-8.
|
||||
|
||||
:type content_type: string
|
||||
:param content_type: Optional type of content being uploaded. Defaults
|
||||
to ``'text/plain'``.
|
||||
|
||||
:type encryption_key: str or bytes
|
||||
:param encryption_key: Optional 32 byte encryption key for
|
||||
customer-supplied encryption.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
"""
|
||||
if isinstance(data, six.text_type):
|
||||
data = data.encode('utf-8')
|
||||
string_buffer = BytesIO()
|
||||
string_buffer.write(data)
|
||||
self.upload_from_file(file_obj=string_buffer, rewind=True,
|
||||
size=len(data), content_type=content_type,
|
||||
encryption_key=encryption_key, client=client)
|
||||
|
||||
def make_public(self, client=None):
|
||||
"""Make this blob public giving all users read access.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the blob's bucket.
|
||||
"""
|
||||
self.acl.all().grant_read()
|
||||
self.acl.save(client=client)
|
||||
|
||||
cache_control = _scalar_property('cacheControl')
|
||||
"""HTTP 'Cache-Control' header for this object.
|
||||
|
||||
See: https://tools.ietf.org/html/rfc7234#section-5.2 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
content_disposition = _scalar_property('contentDisposition')
|
||||
"""HTTP 'Content-Disposition' header for this object.
|
||||
|
||||
See: https://tools.ietf.org/html/rfc6266 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
content_encoding = _scalar_property('contentEncoding')
|
||||
"""HTTP 'Content-Encoding' header for this object.
|
||||
|
||||
See: https://tools.ietf.org/html/rfc7231#section-3.1.2.2 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
content_language = _scalar_property('contentLanguage')
|
||||
"""HTTP 'Content-Language' header for this object.
|
||||
|
||||
See: http://tools.ietf.org/html/bcp47 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
content_type = _scalar_property('contentType')
|
||||
"""HTTP 'Content-Type' header for this object.
|
||||
|
||||
See: https://tools.ietf.org/html/rfc2616#section-14.17 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
crc32c = _scalar_property('crc32c')
|
||||
"""CRC32C checksum for this object.
|
||||
|
||||
See: http://tools.ietf.org/html/rfc4960#appendix-B and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
@property
|
||||
def component_count(self):
|
||||
"""Number of underlying components that make up this object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The component count (in case of a composed object) or
|
||||
``None`` if the property is not set locally. This property
|
||||
will not be set on objects not created via ``compose``.
|
||||
"""
|
||||
component_count = self._properties.get('componentCount')
|
||||
if component_count is not None:
|
||||
return int(component_count)
|
||||
|
||||
@property
|
||||
def etag(self):
|
||||
"""Retrieve the ETag for the object.
|
||||
|
||||
See: http://tools.ietf.org/html/rfc2616#section-3.11 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The blob etag or ``None`` if the property is not set locally.
|
||||
"""
|
||||
return self._properties.get('etag')
|
||||
|
||||
@property
|
||||
def generation(self):
|
||||
"""Retrieve the generation for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The generation of the blob or ``None`` if the property
|
||||
is not set locally.
|
||||
"""
|
||||
generation = self._properties.get('generation')
|
||||
if generation is not None:
|
||||
return int(generation)
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Retrieve the ID for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The ID of the blob or ``None`` if the property is not
|
||||
set locally.
|
||||
"""
|
||||
return self._properties.get('id')
|
||||
|
||||
md5_hash = _scalar_property('md5Hash')
|
||||
"""MD5 hash for this object.
|
||||
|
||||
See: http://tools.ietf.org/html/rfc4960#appendix-B and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
@property
|
||||
def media_link(self):
|
||||
"""Retrieve the media download URI for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The media link for the blob or ``None`` if the property is
|
||||
not set locally.
|
||||
"""
|
||||
return self._properties.get('mediaLink')
|
||||
|
||||
@property
|
||||
def metadata(self):
|
||||
"""Retrieve arbitrary/application specific metadata for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: dict or ``NoneType``
|
||||
:returns: The metadata associated with the blob or ``None`` if the
|
||||
property is not set locally.
|
||||
"""
|
||||
return copy.deepcopy(self._properties.get('metadata'))
|
||||
|
||||
@metadata.setter
|
||||
def metadata(self, value):
|
||||
"""Update arbitrary/application specific metadata for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:type value: dict or ``NoneType``
|
||||
:param value: The blob metadata to set.
|
||||
"""
|
||||
self._patch_property('metadata', value)
|
||||
|
||||
@property
|
||||
def metageneration(self):
|
||||
"""Retrieve the metageneration for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The metageneration of the blob or ``None`` if the property
|
||||
is not set locally.
|
||||
"""
|
||||
metageneration = self._properties.get('metageneration')
|
||||
if metageneration is not None:
|
||||
return int(metageneration)
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
"""Retrieve info about the owner of the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: dict or ``NoneType``
|
||||
:returns: Mapping of owner's role/ID. If the property is not set
|
||||
locally, returns ``None``.
|
||||
"""
|
||||
return copy.deepcopy(self._properties.get('owner'))
|
||||
|
||||
@property
|
||||
def self_link(self):
|
||||
"""Retrieve the URI for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The self link for the blob or ``None`` if the property is
|
||||
not set locally.
|
||||
"""
|
||||
return self._properties.get('selfLink')
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""Size of the object, in bytes.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The size of the blob or ``None`` if the property
|
||||
is not set locally.
|
||||
"""
|
||||
size = self._properties.get('size')
|
||||
if size is not None:
|
||||
return int(size)
|
||||
|
||||
@property
|
||||
def storage_class(self):
|
||||
"""Retrieve the storage class for the object.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/storage-classes
|
||||
https://cloud.google.com/storage/docs/nearline-storage
|
||||
https://cloud.google.com/storage/docs/durable-reduced-availability
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: If set, one of "STANDARD", "NEARLINE", or
|
||||
"DURABLE_REDUCED_AVAILABILITY", else ``None``.
|
||||
"""
|
||||
return self._properties.get('storageClass')
|
||||
|
||||
@property
|
||||
def time_deleted(self):
|
||||
"""Retrieve the timestamp at which the object was deleted.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: :class:`datetime.datetime` or ``NoneType``
|
||||
:returns: Datetime object parsed from RFC3339 valid timestamp, or
|
||||
``None`` if the property is not set locally. If the blob has
|
||||
not been deleted, this will never be set.
|
||||
"""
|
||||
value = self._properties.get('timeDeleted')
|
||||
if value is not None:
|
||||
return _rfc3339_to_datetime(value)
|
||||
|
||||
@property
|
||||
def updated(self):
|
||||
"""Retrieve the timestamp at which the object was updated.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/objects
|
||||
|
||||
:rtype: :class:`datetime.datetime` or ``NoneType``
|
||||
:returns: Datetime object parsed from RFC3339 valid timestamp, or
|
||||
``None`` if the property is not set locally.
|
||||
"""
|
||||
value = self._properties.get('updated')
|
||||
if value is not None:
|
||||
return _rfc3339_to_datetime(value)
|
||||
|
||||
|
||||
class _UploadConfig(object):
|
||||
"""Faux message FBO apitools' 'configure_request'.
|
||||
|
||||
Values extracted from apitools
|
||||
'samples/storage_sample/storage/storage_v1_client.py'
|
||||
"""
|
||||
accept = ['*/*']
|
||||
max_size = None
|
||||
resumable_multipart = True
|
||||
resumable_path = u'/resumable/upload/storage/v1/b/{bucket}/o'
|
||||
simple_multipart = True
|
||||
simple_path = u'/upload/storage/v1/b/{bucket}/o'
|
||||
|
||||
|
||||
class _UrlBuilder(object):
|
||||
"""Faux builder FBO apitools' 'configure_request'"""
|
||||
def __init__(self, bucket_name, object_name):
|
||||
self.query_params = {'name': object_name}
|
||||
self._bucket_name = bucket_name
|
||||
self._relative_path = ''
|
||||
|
||||
|
||||
def _set_encryption_headers(key, headers):
|
||||
"""Builds customer encyrption key headers
|
||||
|
||||
:type key: str or bytes
|
||||
:param key: 32 byte key to build request key and hash.
|
||||
|
||||
:type headers: dict
|
||||
:param headers: dict of HTTP headers being sent in request.
|
||||
"""
|
||||
key = _to_bytes(key)
|
||||
sha256_key = hashlib.sha256(key).digest()
|
||||
key_hash = base64.b64encode(sha256_key).rstrip()
|
||||
encoded_key = base64.b64encode(key).rstrip()
|
||||
headers['X-Goog-Encryption-Algorithm'] = 'AES256'
|
||||
headers['X-Goog-Encryption-Key'] = _bytes_to_unicode(encoded_key)
|
||||
headers['X-Goog-Encryption-Key-Sha256'] = _bytes_to_unicode(key_hash)
|
829
venv/Lib/site-packages/gcloud/storage/bucket.py
Normal file
829
venv/Lib/site-packages/gcloud/storage/bucket.py
Normal file
|
@ -0,0 +1,829 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with gcloud storage buckets."""
|
||||
|
||||
import copy
|
||||
|
||||
import six
|
||||
|
||||
from gcloud._helpers import _rfc3339_to_datetime
|
||||
from gcloud.exceptions import NotFound
|
||||
from gcloud.iterator import Iterator
|
||||
from gcloud.storage._helpers import _PropertyMixin
|
||||
from gcloud.storage._helpers import _scalar_property
|
||||
from gcloud.storage.acl import BucketACL
|
||||
from gcloud.storage.acl import DefaultObjectACL
|
||||
from gcloud.storage.blob import Blob
|
||||
|
||||
|
||||
class _BlobIterator(Iterator):
|
||||
"""An iterator listing blobs in a bucket
|
||||
|
||||
You shouldn't have to use this directly, but instead should use the
|
||||
:class:`gcloud.storage.blob.Bucket.list_blobs` method.
|
||||
|
||||
:type bucket: :class:`gcloud.storage.bucket.Bucket`
|
||||
:param bucket: The bucket from which to list blobs.
|
||||
|
||||
:type extra_params: dict or None
|
||||
:param extra_params: Extra query string parameters for the API call.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client`
|
||||
:param client: Optional. The client to use for making connections.
|
||||
Defaults to the bucket's client.
|
||||
"""
|
||||
def __init__(self, bucket, extra_params=None, client=None):
|
||||
if client is None:
|
||||
client = bucket.client
|
||||
self.bucket = bucket
|
||||
self.prefixes = set()
|
||||
self._current_prefixes = None
|
||||
super(_BlobIterator, self).__init__(
|
||||
client=client, path=bucket.path + '/o',
|
||||
extra_params=extra_params)
|
||||
|
||||
def get_items_from_response(self, response):
|
||||
"""Yield :class:`.storage.blob.Blob` items from response.
|
||||
|
||||
:type response: dict
|
||||
:param response: The JSON API response for a page of blobs.
|
||||
"""
|
||||
self._current_prefixes = tuple(response.get('prefixes', ()))
|
||||
self.prefixes.update(self._current_prefixes)
|
||||
for item in response.get('items', []):
|
||||
name = item.get('name')
|
||||
blob = Blob(name, bucket=self.bucket)
|
||||
blob._set_properties(item)
|
||||
yield blob
|
||||
|
||||
|
||||
class Bucket(_PropertyMixin):
|
||||
"""A class representing a Bucket on Cloud Storage.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client`
|
||||
:param client: A client which holds credentials and project configuration
|
||||
for the bucket (which requires a project).
|
||||
|
||||
:type name: string
|
||||
:param name: The name of the bucket.
|
||||
"""
|
||||
_iterator_class = _BlobIterator
|
||||
|
||||
_MAX_OBJECTS_FOR_ITERATION = 256
|
||||
"""Maximum number of existing objects allowed in iteration.
|
||||
|
||||
This is used in Bucket.delete() and Bucket.make_public().
|
||||
"""
|
||||
|
||||
_STORAGE_CLASSES = ('STANDARD', 'NEARLINE', 'DURABLE_REDUCED_AVAILABILITY')
|
||||
|
||||
def __init__(self, client, name=None):
|
||||
super(Bucket, self).__init__(name=name)
|
||||
self._client = client
|
||||
self._acl = BucketACL(self)
|
||||
self._default_object_acl = DefaultObjectACL(self)
|
||||
|
||||
def __repr__(self):
|
||||
return '<Bucket: %s>' % self.name
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
"""The client bound to this bucket."""
|
||||
return self._client
|
||||
|
||||
def blob(self, blob_name, chunk_size=None):
|
||||
"""Factory constructor for blob object.
|
||||
|
||||
.. note::
|
||||
This will not make an HTTP request; it simply instantiates
|
||||
a blob object owned by this bucket.
|
||||
|
||||
:type blob_name: string
|
||||
:param blob_name: The name of the blob to be instantiated.
|
||||
|
||||
:type chunk_size: integer
|
||||
:param chunk_size: The size of a chunk of data whenever iterating
|
||||
(1 MB). This must be a multiple of 256 KB per the
|
||||
API specification.
|
||||
|
||||
:rtype: :class:`gcloud.storage.blob.Blob`
|
||||
:returns: The blob object created.
|
||||
"""
|
||||
return Blob(name=blob_name, bucket=self, chunk_size=chunk_size)
|
||||
|
||||
def exists(self, client=None):
|
||||
"""Determines whether or not this bucket exists.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: True if the bucket exists in Cloud Storage.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
try:
|
||||
# We only need the status code (200 or not) so we seek to
|
||||
# minimize the returned payload.
|
||||
query_params = {'fields': 'name'}
|
||||
# We intentionally pass `_target_object=None` since fields=name
|
||||
# would limit the local properties.
|
||||
client.connection.api_request(method='GET', path=self.path,
|
||||
query_params=query_params,
|
||||
_target_object=None)
|
||||
# NOTE: This will not fail immediately in a batch. However, when
|
||||
# Batch.finish() is called, the resulting `NotFound` will be
|
||||
# raised.
|
||||
return True
|
||||
except NotFound:
|
||||
return False
|
||||
|
||||
def create(self, client=None):
|
||||
"""Creates current bucket.
|
||||
|
||||
If the bucket already exists, will raise
|
||||
:class:`gcloud.exceptions.Conflict`.
|
||||
|
||||
This implements "storage.buckets.insert".
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:rtype: :class:`gcloud.storage.bucket.Bucket`
|
||||
:returns: The newly created bucket.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
query_params = {'project': client.project}
|
||||
properties = dict(
|
||||
(key, self._properties[key]) for key in self._changes)
|
||||
properties['name'] = self.name
|
||||
api_response = client.connection.api_request(
|
||||
method='POST', path='/b', query_params=query_params,
|
||||
data=properties, _target_object=self)
|
||||
self._set_properties(api_response)
|
||||
|
||||
@property
|
||||
def acl(self):
|
||||
"""Create our ACL on demand."""
|
||||
return self._acl
|
||||
|
||||
@property
|
||||
def default_object_acl(self):
|
||||
"""Create our defaultObjectACL on demand."""
|
||||
return self._default_object_acl
|
||||
|
||||
@staticmethod
|
||||
def path_helper(bucket_name):
|
||||
"""Relative URL path for a bucket.
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: The bucket name in the path.
|
||||
|
||||
:rtype: string
|
||||
:returns: The relative URL path for ``bucket_name``.
|
||||
"""
|
||||
return '/b/' + bucket_name
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""The URL path to this bucket."""
|
||||
if not self.name:
|
||||
raise ValueError('Cannot determine path without bucket name.')
|
||||
|
||||
return self.path_helper(self.name)
|
||||
|
||||
def get_blob(self, blob_name, client=None):
|
||||
"""Get a blob object by name.
|
||||
|
||||
This will return None if the blob doesn't exist::
|
||||
|
||||
>>> from gcloud import storage
|
||||
>>> client = storage.Client()
|
||||
>>> bucket = client.get_bucket('my-bucket')
|
||||
>>> print bucket.get_blob('/path/to/blob.txt')
|
||||
<Blob: my-bucket, /path/to/blob.txt>
|
||||
>>> print bucket.get_blob('/does-not-exist.txt')
|
||||
None
|
||||
|
||||
:type blob_name: string
|
||||
:param blob_name: The name of the blob to retrieve.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:rtype: :class:`gcloud.storage.blob.Blob` or None
|
||||
:returns: The blob object if it exists, otherwise None.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
blob = Blob(bucket=self, name=blob_name)
|
||||
try:
|
||||
response = client.connection.api_request(
|
||||
method='GET', path=blob.path, _target_object=blob)
|
||||
# NOTE: We assume response.get('name') matches `blob_name`.
|
||||
blob._set_properties(response)
|
||||
# NOTE: This will not fail immediately in a batch. However, when
|
||||
# Batch.finish() is called, the resulting `NotFound` will be
|
||||
# raised.
|
||||
return blob
|
||||
except NotFound:
|
||||
return None
|
||||
|
||||
def list_blobs(self, max_results=None, page_token=None, prefix=None,
|
||||
delimiter=None, versions=None,
|
||||
projection='noAcl', fields=None, client=None):
|
||||
"""Return an iterator used to find blobs in the bucket.
|
||||
|
||||
:type max_results: integer or ``NoneType``
|
||||
:param max_results: maximum number of blobs to return.
|
||||
|
||||
:type page_token: string
|
||||
:param page_token: opaque marker for the next "page" of blobs. If not
|
||||
passed, will return the first page of blobs.
|
||||
|
||||
:type prefix: string or ``NoneType``
|
||||
:param prefix: optional prefix used to filter blobs.
|
||||
|
||||
:type delimiter: string or ``NoneType``
|
||||
:param delimiter: optional delimter, used with ``prefix`` to
|
||||
emulate hierarchy.
|
||||
|
||||
:type versions: boolean or ``NoneType``
|
||||
:param versions: whether object versions should be returned as
|
||||
separate blobs.
|
||||
|
||||
:type projection: string or ``NoneType``
|
||||
:param projection: If used, must be 'full' or 'noAcl'. Defaults to
|
||||
'noAcl'. Specifies the set of properties to return.
|
||||
|
||||
:type fields: string or ``NoneType``
|
||||
:param fields: Selector specifying which fields to include in a
|
||||
partial response. Must be a list of fields. For example
|
||||
to get a partial response with just the next page token
|
||||
and the language of each blob returned:
|
||||
'items/contentLanguage,nextPageToken'
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:rtype: :class:`_BlobIterator`.
|
||||
:returns: An iterator of blobs.
|
||||
"""
|
||||
extra_params = {}
|
||||
|
||||
if max_results is not None:
|
||||
extra_params['maxResults'] = max_results
|
||||
|
||||
if prefix is not None:
|
||||
extra_params['prefix'] = prefix
|
||||
|
||||
if delimiter is not None:
|
||||
extra_params['delimiter'] = delimiter
|
||||
|
||||
if versions is not None:
|
||||
extra_params['versions'] = versions
|
||||
|
||||
extra_params['projection'] = projection
|
||||
|
||||
if fields is not None:
|
||||
extra_params['fields'] = fields
|
||||
|
||||
result = self._iterator_class(
|
||||
self, extra_params=extra_params, client=client)
|
||||
# Page token must be handled specially since the base `Iterator`
|
||||
# class has it as a reserved property.
|
||||
if page_token is not None:
|
||||
result.next_page_token = page_token
|
||||
return result
|
||||
|
||||
def delete(self, force=False, client=None):
|
||||
"""Delete this bucket.
|
||||
|
||||
The bucket **must** be empty in order to submit a delete request. If
|
||||
``force=True`` is passed, this will first attempt to delete all the
|
||||
objects / blobs in the bucket (i.e. try to empty the bucket).
|
||||
|
||||
If the bucket doesn't exist, this will raise
|
||||
:class:`gcloud.exceptions.NotFound`. If the bucket is not empty
|
||||
(and ``force=False``), will raise :class:`gcloud.exceptions.Conflict`.
|
||||
|
||||
If ``force=True`` and the bucket contains more than 256 objects / blobs
|
||||
this will cowardly refuse to delete the objects (or the bucket). This
|
||||
is to prevent accidental bucket deletion and to prevent extremely long
|
||||
runtime of this method.
|
||||
|
||||
:type force: boolean
|
||||
:param force: If True, empties the bucket's objects then deletes it.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
|
||||
contains more than 256 objects / blobs.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
if force:
|
||||
blobs = list(self.list_blobs(
|
||||
max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
|
||||
client=client))
|
||||
if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
|
||||
message = (
|
||||
'Refusing to delete bucket with more than '
|
||||
'%d objects. If you actually want to delete '
|
||||
'this bucket, please delete the objects '
|
||||
'yourself before calling Bucket.delete().'
|
||||
) % (self._MAX_OBJECTS_FOR_ITERATION,)
|
||||
raise ValueError(message)
|
||||
|
||||
# Ignore 404 errors on delete.
|
||||
self.delete_blobs(blobs, on_error=lambda blob: None,
|
||||
client=client)
|
||||
|
||||
# We intentionally pass `_target_object=None` since a DELETE
|
||||
# request has no response value (whether in a standard request or
|
||||
# in a batch request).
|
||||
client.connection.api_request(method='DELETE', path=self.path,
|
||||
_target_object=None)
|
||||
|
||||
def delete_blob(self, blob_name, client=None):
|
||||
"""Deletes a blob from the current bucket.
|
||||
|
||||
If the blob isn't found (backend 404), raises a
|
||||
:class:`gcloud.exceptions.NotFound`.
|
||||
|
||||
For example::
|
||||
|
||||
>>> from gcloud.exceptions import NotFound
|
||||
>>> from gcloud import storage
|
||||
>>> client = storage.Client()
|
||||
>>> bucket = client.get_bucket('my-bucket')
|
||||
>>> print bucket.list_blobs()
|
||||
[<Blob: my-bucket, my-file.txt>]
|
||||
>>> bucket.delete_blob('my-file.txt')
|
||||
>>> try:
|
||||
... bucket.delete_blob('doesnt-exist')
|
||||
... except NotFound:
|
||||
... pass
|
||||
|
||||
:type blob_name: string
|
||||
:param blob_name: A blob name to delete.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:raises: :class:`gcloud.exceptions.NotFound` (to suppress
|
||||
the exception, call ``delete_blobs``, passing a no-op
|
||||
``on_error`` callback, e.g.::
|
||||
|
||||
>>> bucket.delete_blobs([blob], on_error=lambda blob: None)
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
blob_path = Blob.path_helper(self.path, blob_name)
|
||||
# We intentionally pass `_target_object=None` since a DELETE
|
||||
# request has no response value (whether in a standard request or
|
||||
# in a batch request).
|
||||
client.connection.api_request(method='DELETE', path=blob_path,
|
||||
_target_object=None)
|
||||
|
||||
def delete_blobs(self, blobs, on_error=None, client=None):
|
||||
"""Deletes a list of blobs from the current bucket.
|
||||
|
||||
Uses :func:`Bucket.delete_blob` to delete each individual blob.
|
||||
|
||||
:type blobs: list of string or :class:`gcloud.storage.blob.Blob`
|
||||
:param blobs: A list of blob names or Blob objects to delete.
|
||||
|
||||
:type on_error: a callable taking (blob)
|
||||
:param on_error: If not ``None``, called once for each blob raising
|
||||
:class:`gcloud.exceptions.NotFound`;
|
||||
otherwise, the exception is propagated.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:raises: :class:`gcloud.exceptions.NotFound` (if
|
||||
`on_error` is not passed).
|
||||
"""
|
||||
for blob in blobs:
|
||||
try:
|
||||
blob_name = blob
|
||||
if not isinstance(blob_name, six.string_types):
|
||||
blob_name = blob.name
|
||||
self.delete_blob(blob_name, client=client)
|
||||
except NotFound:
|
||||
if on_error is not None:
|
||||
on_error(blob)
|
||||
else:
|
||||
raise
|
||||
|
||||
def copy_blob(self, blob, destination_bucket, new_name=None,
|
||||
client=None):
|
||||
"""Copy the given blob to the given bucket, optionally with a new name.
|
||||
|
||||
:type blob: :class:`gcloud.storage.blob.Blob`
|
||||
:param blob: The blob to be copied.
|
||||
|
||||
:type destination_bucket: :class:`gcloud.storage.bucket.Bucket`
|
||||
:param destination_bucket: The bucket into which the blob should be
|
||||
copied.
|
||||
|
||||
:type new_name: string
|
||||
:param new_name: (optional) the new name for the copied file.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:rtype: :class:`gcloud.storage.blob.Blob`
|
||||
:returns: The new Blob.
|
||||
"""
|
||||
client = self._require_client(client)
|
||||
if new_name is None:
|
||||
new_name = blob.name
|
||||
new_blob = Blob(bucket=destination_bucket, name=new_name)
|
||||
api_path = blob.path + '/copyTo' + new_blob.path
|
||||
copy_result = client.connection.api_request(
|
||||
method='POST', path=api_path, _target_object=new_blob)
|
||||
new_blob._set_properties(copy_result)
|
||||
return new_blob
|
||||
|
||||
def rename_blob(self, blob, new_name, client=None):
|
||||
"""Rename the given blob using copy and delete operations.
|
||||
|
||||
Effectively, copies blob to the same bucket with a new name, then
|
||||
deletes the blob.
|
||||
|
||||
.. warning::
|
||||
|
||||
This method will first duplicate the data and then delete the
|
||||
old blob. This means that with very large objects renaming
|
||||
could be a very (temporarily) costly or a very slow operation.
|
||||
|
||||
:type blob: :class:`gcloud.storage.blob.Blob`
|
||||
:param blob: The blob to be renamed.
|
||||
|
||||
:type new_name: string
|
||||
:param new_name: The new name for this blob.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
|
||||
:rtype: :class:`Blob`
|
||||
:returns: The newly-renamed blob.
|
||||
"""
|
||||
new_blob = self.copy_blob(blob, self, new_name, client=client)
|
||||
blob.delete(client=client)
|
||||
return new_blob
|
||||
|
||||
@property
|
||||
def cors(self):
|
||||
"""Retrieve CORS policies configured for this bucket.
|
||||
|
||||
See: http://www.w3.org/TR/cors/ and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: list of dictionaries
|
||||
:returns: A sequence of mappings describing each CORS policy.
|
||||
"""
|
||||
return [copy.deepcopy(policy)
|
||||
for policy in self._properties.get('cors', ())]
|
||||
|
||||
@cors.setter
|
||||
def cors(self, entries):
|
||||
"""Set CORS policies configured for this bucket.
|
||||
|
||||
See: http://www.w3.org/TR/cors/ and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:type entries: list of dictionaries
|
||||
:param entries: A sequence of mappings describing each CORS policy.
|
||||
"""
|
||||
self._patch_property('cors', entries)
|
||||
|
||||
@property
|
||||
def etag(self):
|
||||
"""Retrieve the ETag for the bucket.
|
||||
|
||||
See: http://tools.ietf.org/html/rfc2616#section-3.11 and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The bucket etag or ``None`` if the property is not
|
||||
set locally.
|
||||
"""
|
||||
return self._properties.get('etag')
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Retrieve the ID for the bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The ID of the bucket or ``None`` if the property is not
|
||||
set locally.
|
||||
"""
|
||||
return self._properties.get('id')
|
||||
|
||||
@property
|
||||
def lifecycle_rules(self):
|
||||
"""Lifecycle rules configured for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/lifecycle and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: list(dict)
|
||||
:returns: A sequence of mappings describing each lifecycle rule.
|
||||
"""
|
||||
info = self._properties.get('lifecycle', {})
|
||||
return [copy.deepcopy(rule) for rule in info.get('rule', ())]
|
||||
|
||||
@lifecycle_rules.setter
|
||||
def lifecycle_rules(self, rules):
|
||||
"""Update the lifecycle rules configured for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/lifecycle and
|
||||
https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: list(dict)
|
||||
:returns: A sequence of mappings describing each lifecycle rule.
|
||||
"""
|
||||
self._patch_property('lifecycle', {'rule': rules})
|
||||
|
||||
location = _scalar_property('location')
|
||||
"""Retrieve location configured for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets and
|
||||
https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations
|
||||
|
||||
If the property is not set locally, returns ``None``.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
"""
|
||||
|
||||
def get_logging(self):
|
||||
"""Return info about access logging for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/accesslogs#status
|
||||
|
||||
:rtype: dict or None
|
||||
:returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
|
||||
(if logging is enabled), or None (if not).
|
||||
"""
|
||||
info = self._properties.get('logging')
|
||||
return copy.deepcopy(info)
|
||||
|
||||
def enable_logging(self, bucket_name, object_prefix=''):
|
||||
"""Enable access logging for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/accesslogs#delivery
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: name of bucket in which to store access logs
|
||||
|
||||
:type object_prefix: string
|
||||
:param object_prefix: prefix for access log filenames
|
||||
"""
|
||||
info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix}
|
||||
self._patch_property('logging', info)
|
||||
|
||||
def disable_logging(self):
|
||||
"""Disable access logging for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/accesslogs#disabling
|
||||
"""
|
||||
self._patch_property('logging', None)
|
||||
|
||||
@property
|
||||
def metageneration(self):
|
||||
"""Retrieve the metageneration for the bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The metageneration of the bucket or ``None`` if the property
|
||||
is not set locally.
|
||||
"""
|
||||
metageneration = self._properties.get('metageneration')
|
||||
if metageneration is not None:
|
||||
return int(metageneration)
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
"""Retrieve info about the owner of the bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: dict or ``NoneType``
|
||||
:returns: Mapping of owner's role/ID. If the property is not set
|
||||
locally, returns ``None``.
|
||||
"""
|
||||
return copy.deepcopy(self._properties.get('owner'))
|
||||
|
||||
@property
|
||||
def project_number(self):
|
||||
"""Retrieve the number of the project to which the bucket is assigned.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: integer or ``NoneType``
|
||||
:returns: The project number that owns the bucket or ``None`` if the
|
||||
property is not set locally.
|
||||
"""
|
||||
project_number = self._properties.get('projectNumber')
|
||||
if project_number is not None:
|
||||
return int(project_number)
|
||||
|
||||
@property
|
||||
def self_link(self):
|
||||
"""Retrieve the URI for the bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: The self link for the bucket or ``None`` if the property is
|
||||
not set locally.
|
||||
"""
|
||||
return self._properties.get('selfLink')
|
||||
|
||||
@property
|
||||
def storage_class(self):
|
||||
"""Retrieve the storage class for the bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/storage-classes
|
||||
https://cloud.google.com/storage/docs/nearline-storage
|
||||
https://cloud.google.com/storage/docs/durable-reduced-availability
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: If set, one of "STANDARD", "NEARLINE", or
|
||||
"DURABLE_REDUCED_AVAILABILITY", else ``None``.
|
||||
"""
|
||||
return self._properties.get('storageClass')
|
||||
|
||||
@storage_class.setter
|
||||
def storage_class(self, value):
|
||||
"""Set the storage class for the bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/storage-classes
|
||||
https://cloud.google.com/storage/docs/nearline-storage
|
||||
https://cloud.google.com/storage/docs/durable-reduced-availability
|
||||
|
||||
:type value: string
|
||||
:param value: one of "STANDARD", "NEARLINE", or
|
||||
"DURABLE_REDUCED_AVAILABILITY"
|
||||
"""
|
||||
if value not in self._STORAGE_CLASSES:
|
||||
raise ValueError('Invalid storage class: %s' % (value,))
|
||||
self._patch_property('storageClass', value)
|
||||
|
||||
@property
|
||||
def time_created(self):
|
||||
"""Retrieve the timestamp at which the bucket was created.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/json_api/v1/buckets
|
||||
|
||||
:rtype: :class:`datetime.datetime` or ``NoneType``
|
||||
:returns: Datetime object parsed from RFC3339 valid timestamp, or
|
||||
``None`` if the property is not set locally.
|
||||
"""
|
||||
value = self._properties.get('timeCreated')
|
||||
if value is not None:
|
||||
return _rfc3339_to_datetime(value)
|
||||
|
||||
@property
|
||||
def versioning_enabled(self):
|
||||
"""Is versioning enabled for this bucket?
|
||||
|
||||
See: https://cloud.google.com/storage/docs/object-versioning for
|
||||
details.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: True if enabled, else False.
|
||||
"""
|
||||
versioning = self._properties.get('versioning', {})
|
||||
return versioning.get('enabled', False)
|
||||
|
||||
@versioning_enabled.setter
|
||||
def versioning_enabled(self, value):
|
||||
"""Enable versioning for this bucket.
|
||||
|
||||
See: https://cloud.google.com/storage/docs/object-versioning for
|
||||
details.
|
||||
|
||||
:type value: convertible to boolean
|
||||
:param value: should versioning be anabled for the bucket?
|
||||
"""
|
||||
self._patch_property('versioning', {'enabled': bool(value)})
|
||||
|
||||
def configure_website(self, main_page_suffix=None, not_found_page=None):
|
||||
"""Configure website-related properties.
|
||||
|
||||
See: https://developers.google.com/storage/docs/website-configuration
|
||||
|
||||
.. note::
|
||||
This (apparently) only works
|
||||
if your bucket name is a domain name
|
||||
(and to do that, you need to get approved somehow...).
|
||||
|
||||
If you want this bucket to host a website, just provide the name
|
||||
of an index page and a page to use when a blob isn't found::
|
||||
|
||||
>>> from gcloud import storage
|
||||
>>> client = storage.Client()
|
||||
>>> bucket = client.get_bucket(bucket_name)
|
||||
>>> bucket.configure_website('index.html', '404.html')
|
||||
|
||||
You probably should also make the whole bucket public::
|
||||
|
||||
>>> bucket.make_public(recursive=True, future=True)
|
||||
|
||||
This says: "Make the bucket public, and all the stuff already in
|
||||
the bucket, and anything else I add to the bucket. Just make it
|
||||
all public."
|
||||
|
||||
:type main_page_suffix: string
|
||||
:param main_page_suffix: The page to use as the main page
|
||||
of a directory.
|
||||
Typically something like index.html.
|
||||
|
||||
:type not_found_page: string
|
||||
:param not_found_page: The file to use when a page isn't found.
|
||||
"""
|
||||
data = {
|
||||
'mainPageSuffix': main_page_suffix,
|
||||
'notFoundPage': not_found_page,
|
||||
}
|
||||
self._patch_property('website', data)
|
||||
|
||||
def disable_website(self):
|
||||
"""Disable the website configuration for this bucket.
|
||||
|
||||
This is really just a shortcut for setting the website-related
|
||||
attributes to ``None``.
|
||||
"""
|
||||
return self.configure_website(None, None)
|
||||
|
||||
def make_public(self, recursive=False, future=False, client=None):
|
||||
"""Make a bucket public.
|
||||
|
||||
If ``recursive=True`` and the bucket contains more than 256
|
||||
objects / blobs this will cowardly refuse to make the objects public.
|
||||
This is to prevent extremely long runtime of this method.
|
||||
|
||||
:type recursive: boolean
|
||||
:param recursive: If True, this will make all blobs inside the bucket
|
||||
public as well.
|
||||
|
||||
:type future: boolean
|
||||
:param future: If True, this will make all objects created in the
|
||||
future public as well.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
|
||||
:param client: Optional. The client to use. If not passed, falls back
|
||||
to the ``client`` stored on the current bucket.
|
||||
"""
|
||||
self.acl.all().grant_read()
|
||||
self.acl.save(client=client)
|
||||
|
||||
if future:
|
||||
doa = self.default_object_acl
|
||||
if not doa.loaded:
|
||||
doa.reload(client=client)
|
||||
doa.all().grant_read()
|
||||
doa.save(client=client)
|
||||
|
||||
if recursive:
|
||||
blobs = list(self.list_blobs(
|
||||
projection='full',
|
||||
max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
|
||||
client=client))
|
||||
if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
|
||||
message = (
|
||||
'Refusing to make public recursively with more than '
|
||||
'%d objects. If you actually want to make every object '
|
||||
'in this bucket public, please do it on the objects '
|
||||
'yourself.'
|
||||
) % (self._MAX_OBJECTS_FOR_ITERATION,)
|
||||
raise ValueError(message)
|
||||
|
||||
for blob in blobs:
|
||||
blob.acl.all().grant_read()
|
||||
blob.acl.save(client=client)
|
306
venv/Lib/site-packages/gcloud/storage/client.py
Normal file
306
venv/Lib/site-packages/gcloud/storage/client.py
Normal file
|
@ -0,0 +1,306 @@
|
|||
# Copyright 2015 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Client for interacting with the Google Cloud Storage API."""
|
||||
|
||||
|
||||
from gcloud._helpers import _LocalStack
|
||||
from gcloud.client import JSONClient
|
||||
from gcloud.exceptions import NotFound
|
||||
from gcloud.iterator import Iterator
|
||||
from gcloud.storage.batch import Batch
|
||||
from gcloud.storage.bucket import Bucket
|
||||
from gcloud.storage.connection import Connection
|
||||
|
||||
|
||||
class Client(JSONClient):
|
||||
"""Client to bundle configuration needed for API requests.
|
||||
|
||||
:type project: string
|
||||
:param project: the project which the client acts on behalf of. Will be
|
||||
passed when creating a topic. If not passed,
|
||||
falls back to the default inferred from the environment.
|
||||
|
||||
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
|
||||
:class:`NoneType`
|
||||
:param credentials: The OAuth2 Credentials to use for the connection
|
||||
owned by this client. If not passed (and if no ``http``
|
||||
object is passed), falls back to the default inferred
|
||||
from the environment.
|
||||
|
||||
:type http: :class:`httplib2.Http` or class that defines ``request()``.
|
||||
:param http: An optional HTTP object to make requests. If not passed, an
|
||||
``http`` object is created that is bound to the
|
||||
``credentials`` for the current object.
|
||||
"""
|
||||
|
||||
_connection_class = Connection
|
||||
|
||||
def __init__(self, project=None, credentials=None, http=None):
|
||||
self._connection = None
|
||||
super(Client, self).__init__(project=project, credentials=credentials,
|
||||
http=http)
|
||||
self._batch_stack = _LocalStack()
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
"""Get connection or batch on the client.
|
||||
|
||||
:rtype: :class:`gcloud.storage.connection.Connection`
|
||||
:returns: The connection set on the client, or the batch
|
||||
if one is set.
|
||||
"""
|
||||
if self.current_batch is not None:
|
||||
return self.current_batch
|
||||
else:
|
||||
return self._connection
|
||||
|
||||
@connection.setter
|
||||
def connection(self, value):
|
||||
"""Set connection on the client.
|
||||
|
||||
Intended to be used by constructor (since the base class calls)
|
||||
self.connection = connection
|
||||
Will raise if the connection is set more than once.
|
||||
|
||||
:type value: :class:`gcloud.storage.connection.Connection`
|
||||
:param value: The connection set on the client.
|
||||
|
||||
:raises: :class:`ValueError` if connection has already been set.
|
||||
"""
|
||||
if self._connection is not None:
|
||||
raise ValueError('Connection already set on client')
|
||||
self._connection = value
|
||||
|
||||
def _push_batch(self, batch):
|
||||
"""Push a batch onto our stack.
|
||||
|
||||
"Protected", intended for use by batch context mgrs.
|
||||
|
||||
:type batch: :class:`gcloud.storage.batch.Batch`
|
||||
:param batch: newly-active batch
|
||||
"""
|
||||
self._batch_stack.push(batch)
|
||||
|
||||
def _pop_batch(self):
|
||||
"""Pop a batch from our stack.
|
||||
|
||||
"Protected", intended for use by batch context mgrs.
|
||||
|
||||
:raises: IndexError if the stack is empty.
|
||||
:rtype: :class:`gcloud.storage.batch.Batch`
|
||||
:returns: the top-most batch/transaction, after removing it.
|
||||
"""
|
||||
return self._batch_stack.pop()
|
||||
|
||||
@property
|
||||
def current_batch(self):
|
||||
"""Currently-active batch.
|
||||
|
||||
:rtype: :class:`gcloud.storage.batch.Batch` or ``NoneType`` (if
|
||||
no batch is active).
|
||||
:returns: The batch at the top of the batch stack.
|
||||
"""
|
||||
return self._batch_stack.top
|
||||
|
||||
def bucket(self, bucket_name):
|
||||
"""Factory constructor for bucket object.
|
||||
|
||||
.. note::
|
||||
This will not make an HTTP request; it simply instantiates
|
||||
a bucket object owned by this client.
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: The name of the bucket to be instantiated.
|
||||
|
||||
:rtype: :class:`gcloud.storage.bucket.Bucket`
|
||||
:returns: The bucket object created.
|
||||
"""
|
||||
return Bucket(client=self, name=bucket_name)
|
||||
|
||||
def batch(self):
|
||||
"""Factory constructor for batch object.
|
||||
|
||||
.. note::
|
||||
This will not make an HTTP request; it simply instantiates
|
||||
a batch object owned by this client.
|
||||
|
||||
:rtype: :class:`gcloud.storage.batch.Batch`
|
||||
:returns: The batch object created.
|
||||
"""
|
||||
return Batch(client=self)
|
||||
|
||||
def get_bucket(self, bucket_name):
|
||||
"""Get a bucket by name.
|
||||
|
||||
If the bucket isn't found, this will raise a
|
||||
:class:`gcloud.storage.exceptions.NotFound`.
|
||||
|
||||
For example::
|
||||
|
||||
>>> try:
|
||||
>>> bucket = client.get_bucket('my-bucket')
|
||||
>>> except gcloud.exceptions.NotFound:
|
||||
>>> print 'Sorry, that bucket does not exist!'
|
||||
|
||||
This implements "storage.buckets.get".
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: The name of the bucket to get.
|
||||
|
||||
:rtype: :class:`gcloud.storage.bucket.Bucket`
|
||||
:returns: The bucket matching the name provided.
|
||||
:raises: :class:`gcloud.exceptions.NotFound`
|
||||
"""
|
||||
bucket = Bucket(self, name=bucket_name)
|
||||
bucket.reload(client=self)
|
||||
return bucket
|
||||
|
||||
def lookup_bucket(self, bucket_name):
|
||||
"""Get a bucket by name, returning None if not found.
|
||||
|
||||
You can use this if you would rather check for a None value
|
||||
than catching an exception::
|
||||
|
||||
>>> bucket = client.lookup_bucket('doesnt-exist')
|
||||
>>> print bucket
|
||||
None
|
||||
>>> bucket = client.lookup_bucket('my-bucket')
|
||||
>>> print bucket
|
||||
<Bucket: my-bucket>
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: The name of the bucket to get.
|
||||
|
||||
:rtype: :class:`gcloud.storage.bucket.Bucket`
|
||||
:returns: The bucket matching the name provided or None if not found.
|
||||
"""
|
||||
try:
|
||||
return self.get_bucket(bucket_name)
|
||||
except NotFound:
|
||||
return None
|
||||
|
||||
def create_bucket(self, bucket_name):
|
||||
"""Create a new bucket.
|
||||
|
||||
For example::
|
||||
|
||||
>>> bucket = client.create_bucket('my-bucket')
|
||||
>>> print bucket
|
||||
<Bucket: my-bucket>
|
||||
|
||||
This implements "storage.buckets.insert".
|
||||
|
||||
If the bucket already exists, will raise
|
||||
:class:`gcloud.exceptions.Conflict`.
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: The bucket name to create.
|
||||
|
||||
:rtype: :class:`gcloud.storage.bucket.Bucket`
|
||||
:returns: The newly created bucket.
|
||||
"""
|
||||
bucket = Bucket(self, name=bucket_name)
|
||||
bucket.create(client=self)
|
||||
return bucket
|
||||
|
||||
def list_buckets(self, max_results=None, page_token=None, prefix=None,
|
||||
projection='noAcl', fields=None):
|
||||
"""Get all buckets in the project associated to the client.
|
||||
|
||||
This will not populate the list of blobs available in each
|
||||
bucket.
|
||||
|
||||
>>> for bucket in client.list_buckets():
|
||||
>>> print bucket
|
||||
|
||||
This implements "storage.buckets.list".
|
||||
|
||||
:type max_results: integer or ``NoneType``
|
||||
:param max_results: Optional. Maximum number of buckets to return.
|
||||
|
||||
:type page_token: string or ``NoneType``
|
||||
:param page_token: Optional. Opaque marker for the next "page" of
|
||||
buckets. If not passed, will return the first page
|
||||
of buckets.
|
||||
|
||||
:type prefix: string or ``NoneType``
|
||||
:param prefix: Optional. Filter results to buckets whose names begin
|
||||
with this prefix.
|
||||
|
||||
:type projection: string or ``NoneType``
|
||||
:param projection: If used, must be 'full' or 'noAcl'. Defaults to
|
||||
'noAcl'. Specifies the set of properties to return.
|
||||
|
||||
:type fields: string or ``NoneType``
|
||||
:param fields: Selector specifying which fields to include in a
|
||||
partial response. Must be a list of fields. For example
|
||||
to get a partial response with just the next page token
|
||||
and the language of each bucket returned:
|
||||
'items/id,nextPageToken'
|
||||
|
||||
:rtype: iterable of :class:`gcloud.storage.bucket.Bucket` objects.
|
||||
:returns: All buckets belonging to this project.
|
||||
"""
|
||||
extra_params = {'project': self.project}
|
||||
|
||||
if max_results is not None:
|
||||
extra_params['maxResults'] = max_results
|
||||
|
||||
if prefix is not None:
|
||||
extra_params['prefix'] = prefix
|
||||
|
||||
extra_params['projection'] = projection
|
||||
|
||||
if fields is not None:
|
||||
extra_params['fields'] = fields
|
||||
|
||||
result = _BucketIterator(client=self,
|
||||
extra_params=extra_params)
|
||||
# Page token must be handled specially since the base `Iterator`
|
||||
# class has it as a reserved property.
|
||||
if page_token is not None:
|
||||
result.next_page_token = page_token
|
||||
return result
|
||||
|
||||
|
||||
class _BucketIterator(Iterator):
|
||||
"""An iterator listing all buckets.
|
||||
|
||||
You shouldn't have to use this directly, but instead should use the
|
||||
helper methods on :class:`gcloud.storage.connection.Connection`
|
||||
objects.
|
||||
|
||||
:type client: :class:`gcloud.storage.client.Client`
|
||||
:param client: The client to use for making connections.
|
||||
|
||||
:type extra_params: dict or ``NoneType``
|
||||
:param extra_params: Extra query string parameters for the API call.
|
||||
"""
|
||||
|
||||
def __init__(self, client, extra_params=None):
|
||||
super(_BucketIterator, self).__init__(client=client, path='/b',
|
||||
extra_params=extra_params)
|
||||
|
||||
def get_items_from_response(self, response):
|
||||
"""Factory method which yields :class:`.Bucket` items from a response.
|
||||
|
||||
:type response: dict
|
||||
:param response: The JSON API response for a page of buckets.
|
||||
"""
|
||||
for item in response.get('items', []):
|
||||
name = item.get('name')
|
||||
bucket = Bucket(self.client, name)
|
||||
bucket._set_properties(item)
|
||||
yield bucket
|
43
venv/Lib/site-packages/gcloud/storage/connection.py
Normal file
43
venv/Lib/site-packages/gcloud/storage/connection.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with gcloud storage connections."""
|
||||
|
||||
from gcloud import connection as base_connection
|
||||
|
||||
|
||||
class Connection(base_connection.JSONConnection):
|
||||
"""A connection to Google Cloud Storage via the JSON REST API.
|
||||
|
||||
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
|
||||
:param credentials: (Optional) The OAuth2 Credentials to use for this
|
||||
connection.
|
||||
|
||||
:type http: :class:`httplib2.Http` or class that defines ``request()``.
|
||||
:param http: (Optional) HTTP object to make requests.
|
||||
"""
|
||||
|
||||
API_BASE_URL = base_connection.API_BASE_URL
|
||||
"""The base of the API call URL."""
|
||||
|
||||
API_VERSION = 'v1'
|
||||
"""The version of the API, used in building the API call's URL."""
|
||||
|
||||
API_URL_TEMPLATE = '{api_base_url}/storage/{api_version}{path}'
|
||||
"""A template for the URL of a particular API call."""
|
||||
|
||||
SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control',
|
||||
'https://www.googleapis.com/auth/devstorage.read_only',
|
||||
'https://www.googleapis.com/auth/devstorage.read_write')
|
||||
"""The scopes required for authenticating as a Cloud Storage consumer."""
|
223
venv/Lib/site-packages/gcloud/storage/test__helpers.py
Normal file
223
venv/Lib/site-packages/gcloud/storage/test__helpers.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class Test_PropertyMixin(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage._helpers import _PropertyMixin
|
||||
return _PropertyMixin
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def _derivedClass(self, path=None):
|
||||
|
||||
class Derived(self._getTargetClass()):
|
||||
|
||||
client = None
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return path
|
||||
|
||||
return Derived
|
||||
|
||||
def test_path_is_abstract(self):
|
||||
mixin = self._makeOne()
|
||||
self.assertRaises(NotImplementedError, lambda: mixin.path)
|
||||
|
||||
def test_client_is_abstract(self):
|
||||
mixin = self._makeOne()
|
||||
self.assertRaises(NotImplementedError, lambda: mixin.client)
|
||||
|
||||
def test_reload(self):
|
||||
connection = _Connection({'foo': 'Foo'})
|
||||
client = _Client(connection)
|
||||
derived = self._derivedClass('/path')()
|
||||
# Make sure changes is not a set, so we can observe a change.
|
||||
derived._changes = object()
|
||||
derived.reload(client=client)
|
||||
self.assertEqual(derived._properties, {'foo': 'Foo'})
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'GET')
|
||||
self.assertEqual(kw[0]['path'], '/path')
|
||||
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})
|
||||
# Make sure changes get reset by reload.
|
||||
self.assertEqual(derived._changes, set())
|
||||
|
||||
def test__set_properties(self):
|
||||
mixin = self._makeOne()
|
||||
self.assertEqual(mixin._properties, {})
|
||||
VALUE = object()
|
||||
mixin._set_properties(VALUE)
|
||||
self.assertEqual(mixin._properties, VALUE)
|
||||
|
||||
def test__patch_property(self):
|
||||
derived = self._derivedClass()()
|
||||
derived._patch_property('foo', 'Foo')
|
||||
self.assertEqual(derived._properties, {'foo': 'Foo'})
|
||||
|
||||
def test_patch(self):
|
||||
connection = _Connection({'foo': 'Foo'})
|
||||
client = _Client(connection)
|
||||
derived = self._derivedClass('/path')()
|
||||
# Make sure changes is non-empty, so we can observe a change.
|
||||
BAR = object()
|
||||
BAZ = object()
|
||||
derived._properties = {'bar': BAR, 'baz': BAZ}
|
||||
derived._changes = set(['bar']) # Ignore baz.
|
||||
derived.patch(client=client)
|
||||
self.assertEqual(derived._properties, {'foo': 'Foo'})
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/path')
|
||||
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
|
||||
# Since changes does not include `baz`, we don't see it sent.
|
||||
self.assertEqual(kw[0]['data'], {'bar': BAR})
|
||||
# Make sure changes get reset by patch().
|
||||
self.assertEqual(derived._changes, set())
|
||||
|
||||
|
||||
class Test__scalar_property(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, fieldName):
|
||||
from gcloud.storage._helpers import _scalar_property
|
||||
return _scalar_property(fieldName)
|
||||
|
||||
def test_getter(self):
|
||||
|
||||
class Test(object):
|
||||
def __init__(self, **kw):
|
||||
self._properties = kw.copy()
|
||||
do_re_mi = self._callFUT('solfege')
|
||||
|
||||
test = Test(solfege='Latido')
|
||||
self.assertEqual(test.do_re_mi, 'Latido')
|
||||
|
||||
def test_setter(self):
|
||||
|
||||
class Test(object):
|
||||
def _patch_property(self, name, value):
|
||||
self._patched = (name, value)
|
||||
do_re_mi = self._callFUT('solfege')
|
||||
|
||||
test = Test()
|
||||
test.do_re_mi = 'Latido'
|
||||
self.assertEqual(test._patched, ('solfege', 'Latido'))
|
||||
|
||||
|
||||
class Test__base64_md5hash(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, bytes_to_sign):
|
||||
from gcloud.storage._helpers import _base64_md5hash
|
||||
return _base64_md5hash(bytes_to_sign)
|
||||
|
||||
def test_it(self):
|
||||
from io import BytesIO
|
||||
BYTES_TO_SIGN = b'FOO'
|
||||
BUFFER = BytesIO()
|
||||
BUFFER.write(BYTES_TO_SIGN)
|
||||
BUFFER.seek(0)
|
||||
|
||||
SIGNED_CONTENT = self._callFUT(BUFFER)
|
||||
self.assertEqual(SIGNED_CONTENT, b'kBiQqOnIz21aGlQrIp/r/w==')
|
||||
|
||||
def test_it_with_stubs(self):
|
||||
from gcloud._testing import _Monkey
|
||||
from gcloud.storage import _helpers as MUT
|
||||
|
||||
class _Buffer(object):
|
||||
|
||||
def __init__(self, return_vals):
|
||||
self.return_vals = return_vals
|
||||
self._block_sizes = []
|
||||
|
||||
def read(self, block_size):
|
||||
self._block_sizes.append(block_size)
|
||||
return self.return_vals.pop()
|
||||
|
||||
BASE64 = _Base64()
|
||||
DIGEST_VAL = object()
|
||||
BYTES_TO_SIGN = b'BYTES_TO_SIGN'
|
||||
BUFFER = _Buffer([b'', BYTES_TO_SIGN])
|
||||
MD5 = _MD5(DIGEST_VAL)
|
||||
|
||||
with _Monkey(MUT, base64=BASE64, md5=MD5):
|
||||
SIGNED_CONTENT = self._callFUT(BUFFER)
|
||||
|
||||
self.assertEqual(BUFFER._block_sizes, [8192, 8192])
|
||||
self.assertTrue(SIGNED_CONTENT is DIGEST_VAL)
|
||||
self.assertEqual(BASE64._called_b64encode, [DIGEST_VAL])
|
||||
self.assertEqual(MD5._called, [None])
|
||||
self.assertEqual(MD5.hash_obj.num_digest_calls, 1)
|
||||
self.assertEqual(MD5.hash_obj._blocks, [BYTES_TO_SIGN])
|
||||
|
||||
|
||||
class _Connection(object):
|
||||
|
||||
def __init__(self, *responses):
|
||||
self._responses = responses
|
||||
self._requested = []
|
||||
|
||||
def api_request(self, **kw):
|
||||
self._requested.append(kw)
|
||||
response, self._responses = self._responses[0], self._responses[1:]
|
||||
return response
|
||||
|
||||
|
||||
class _MD5Hash(object):
|
||||
|
||||
def __init__(self, digest_val):
|
||||
self.digest_val = digest_val
|
||||
self.num_digest_calls = 0
|
||||
self._blocks = []
|
||||
|
||||
def update(self, block):
|
||||
self._blocks.append(block)
|
||||
|
||||
def digest(self):
|
||||
self.num_digest_calls += 1
|
||||
return self.digest_val
|
||||
|
||||
|
||||
class _MD5(object):
|
||||
|
||||
def __init__(self, digest_val):
|
||||
self.hash_obj = _MD5Hash(digest_val)
|
||||
self._called = []
|
||||
|
||||
def __call__(self, data=None):
|
||||
self._called.append(data)
|
||||
return self.hash_obj
|
||||
|
||||
|
||||
class _Base64(object):
|
||||
|
||||
def __init__(self):
|
||||
self._called_b64encode = []
|
||||
|
||||
def b64encode(self, value):
|
||||
self._called_b64encode.append(value)
|
||||
return value
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
812
venv/Lib/site-packages/gcloud/storage/test_acl.py
Normal file
812
venv/Lib/site-packages/gcloud/storage/test_acl.py
Normal file
|
@ -0,0 +1,812 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class Test_ACLEntity(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
return _ACLEntity
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor_default_identifier(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
self.assertEqual(entity.type, TYPE)
|
||||
self.assertEqual(entity.identifier, None)
|
||||
self.assertEqual(entity.get_roles(), set())
|
||||
|
||||
def test_ctor_w_identifier(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
entity = self._makeOne(TYPE, ID)
|
||||
self.assertEqual(entity.type, TYPE)
|
||||
self.assertEqual(entity.identifier, ID)
|
||||
self.assertEqual(entity.get_roles(), set())
|
||||
|
||||
def test___str__no_identifier(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
self.assertEqual(str(entity), TYPE)
|
||||
|
||||
def test___str__w_identifier(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
entity = self._makeOne(TYPE, ID)
|
||||
self.assertEqual(str(entity), '%s-%s' % (TYPE, ID))
|
||||
|
||||
def test_grant_simple(self):
|
||||
TYPE = 'type'
|
||||
ROLE = 'role'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(entity.get_roles(), set([ROLE]))
|
||||
|
||||
def test_grant_duplicate(self):
|
||||
TYPE = 'type'
|
||||
ROLE1 = 'role1'
|
||||
ROLE2 = 'role2'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant(ROLE1)
|
||||
entity.grant(ROLE2)
|
||||
entity.grant(ROLE1)
|
||||
self.assertEqual(entity.get_roles(), set([ROLE1, ROLE2]))
|
||||
|
||||
def test_revoke_miss(self):
|
||||
TYPE = 'type'
|
||||
ROLE = 'nonesuch'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.revoke(ROLE)
|
||||
self.assertEqual(entity.get_roles(), set())
|
||||
|
||||
def test_revoke_hit(self):
|
||||
TYPE = 'type'
|
||||
ROLE1 = 'role1'
|
||||
ROLE2 = 'role2'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant(ROLE1)
|
||||
entity.grant(ROLE2)
|
||||
entity.revoke(ROLE1)
|
||||
self.assertEqual(entity.get_roles(), set([ROLE2]))
|
||||
|
||||
def test_grant_read(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant_read()
|
||||
self.assertEqual(entity.get_roles(), set([entity.READER_ROLE]))
|
||||
|
||||
def test_grant_write(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant_write()
|
||||
self.assertEqual(entity.get_roles(), set([entity.WRITER_ROLE]))
|
||||
|
||||
def test_grant_owner(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant_owner()
|
||||
self.assertEqual(entity.get_roles(), set([entity.OWNER_ROLE]))
|
||||
|
||||
def test_revoke_read(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant(entity.READER_ROLE)
|
||||
entity.revoke_read()
|
||||
self.assertEqual(entity.get_roles(), set())
|
||||
|
||||
def test_revoke_write(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant(entity.WRITER_ROLE)
|
||||
entity.revoke_write()
|
||||
self.assertEqual(entity.get_roles(), set())
|
||||
|
||||
def test_revoke_owner(self):
|
||||
TYPE = 'type'
|
||||
entity = self._makeOne(TYPE)
|
||||
entity.grant(entity.OWNER_ROLE)
|
||||
entity.revoke_owner()
|
||||
self.assertEqual(entity.get_roles(), set())
|
||||
|
||||
|
||||
class Test_ACL(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.acl import ACL
|
||||
return ACL
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor(self):
|
||||
acl = self._makeOne()
|
||||
self.assertEqual(acl.entities, {})
|
||||
self.assertFalse(acl.loaded)
|
||||
|
||||
def test__ensure_loaded(self):
|
||||
acl = self._makeOne()
|
||||
|
||||
def _reload():
|
||||
acl._really_loaded = True
|
||||
|
||||
acl.reload = _reload
|
||||
acl._ensure_loaded()
|
||||
self.assertTrue(acl._really_loaded)
|
||||
|
||||
def test_client_is_abstract(self):
|
||||
acl = self._makeOne()
|
||||
self.assertRaises(NotImplementedError, lambda: acl.client)
|
||||
|
||||
def test_reset(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
acl.entity(TYPE, ID)
|
||||
acl.reset()
|
||||
self.assertEqual(acl.entities, {})
|
||||
self.assertFalse(acl.loaded)
|
||||
|
||||
def test___iter___empty_eager(self):
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertEqual(list(acl), [])
|
||||
|
||||
def test___iter___empty_lazy(self):
|
||||
acl = self._makeOne()
|
||||
|
||||
def _reload():
|
||||
acl.loaded = True
|
||||
|
||||
acl.reload = _reload
|
||||
self.assertEqual(list(acl), [])
|
||||
self.assertTrue(acl.loaded)
|
||||
|
||||
def test___iter___non_empty_no_roles(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
acl.entity(TYPE, ID)
|
||||
self.assertEqual(list(acl), [])
|
||||
|
||||
def test___iter___non_empty_w_roles(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity(TYPE, ID)
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': '%s-%s' % (TYPE, ID), 'role': ROLE}])
|
||||
|
||||
def test___iter___non_empty_w_empty_role(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity(TYPE, ID)
|
||||
entity.grant('')
|
||||
self.assertEqual(list(acl), [])
|
||||
|
||||
def test_entity_from_dict_allUsers_eager(self):
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity_from_dict({'entity': 'allUsers', 'role': ROLE})
|
||||
self.assertEqual(entity.type, 'allUsers')
|
||||
self.assertEqual(entity.identifier, None)
|
||||
self.assertEqual(entity.get_roles(), set([ROLE]))
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'allUsers', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_entity_from_dict_allAuthenticatedUsers(self):
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity_from_dict({'entity': 'allAuthenticatedUsers',
|
||||
'role': ROLE})
|
||||
self.assertEqual(entity.type, 'allAuthenticatedUsers')
|
||||
self.assertEqual(entity.identifier, None)
|
||||
self.assertEqual(entity.get_roles(), set([ROLE]))
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'allAuthenticatedUsers', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_entity_from_dict_string_w_hyphen(self):
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity_from_dict({'entity': 'type-id', 'role': ROLE})
|
||||
self.assertEqual(entity.type, 'type')
|
||||
self.assertEqual(entity.identifier, 'id')
|
||||
self.assertEqual(entity.get_roles(), set([ROLE]))
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'type-id', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_entity_from_dict_string_wo_hyphen(self):
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertRaises(ValueError,
|
||||
acl.entity_from_dict,
|
||||
{'entity': 'bogus', 'role': ROLE})
|
||||
self.assertEqual(list(acl.get_entities()), [])
|
||||
|
||||
def test_has_entity_miss_str_eager(self):
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertFalse(acl.has_entity('nonesuch'))
|
||||
|
||||
def test_has_entity_miss_str_lazy(self):
|
||||
acl = self._makeOne()
|
||||
|
||||
def _reload():
|
||||
acl.loaded = True
|
||||
|
||||
acl.reload = _reload
|
||||
self.assertFalse(acl.has_entity('nonesuch'))
|
||||
self.assertTrue(acl.loaded)
|
||||
|
||||
def test_has_entity_miss_entity(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
entity = _ACLEntity(TYPE, ID)
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertFalse(acl.has_entity(entity))
|
||||
|
||||
def test_has_entity_hit_str(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
acl.entity(TYPE, ID)
|
||||
self.assertTrue(acl.has_entity('%s-%s' % (TYPE, ID)))
|
||||
|
||||
def test_has_entity_hit_entity(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity(TYPE, ID)
|
||||
self.assertTrue(acl.has_entity(entity))
|
||||
|
||||
def test_get_entity_miss_str_no_default_eager(self):
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertEqual(acl.get_entity('nonesuch'), None)
|
||||
|
||||
def test_get_entity_miss_str_no_default_lazy(self):
|
||||
acl = self._makeOne()
|
||||
|
||||
def _reload():
|
||||
acl.loaded = True
|
||||
|
||||
acl.reload = _reload
|
||||
self.assertEqual(acl.get_entity('nonesuch'), None)
|
||||
self.assertTrue(acl.loaded)
|
||||
|
||||
def test_get_entity_miss_entity_no_default(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
entity = _ACLEntity(TYPE, ID)
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertEqual(acl.get_entity(entity), None)
|
||||
|
||||
def test_get_entity_miss_str_w_default(self):
|
||||
DEFAULT = object()
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertTrue(acl.get_entity('nonesuch', DEFAULT) is DEFAULT)
|
||||
|
||||
def test_get_entity_miss_entity_w_default(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
DEFAULT = object()
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
entity = _ACLEntity(TYPE, ID)
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertTrue(acl.get_entity(entity, DEFAULT) is DEFAULT)
|
||||
|
||||
def test_get_entity_hit_str(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
acl.entity(TYPE, ID)
|
||||
self.assertTrue(acl.has_entity('%s-%s' % (TYPE, ID)))
|
||||
|
||||
def test_get_entity_hit_entity(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity(TYPE, ID)
|
||||
self.assertTrue(acl.has_entity(entity))
|
||||
|
||||
def test_add_entity_miss_eager(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
entity = _ACLEntity(TYPE, ID)
|
||||
entity.grant(ROLE)
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
acl.add_entity(entity)
|
||||
self.assertTrue(acl.loaded)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'type-id', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_add_entity_miss_lazy(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
entity = _ACLEntity(TYPE, ID)
|
||||
entity.grant(ROLE)
|
||||
acl = self._makeOne()
|
||||
|
||||
def _reload():
|
||||
acl.loaded = True
|
||||
|
||||
acl.reload = _reload
|
||||
acl.add_entity(entity)
|
||||
self.assertTrue(acl.loaded)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'type-id', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
self.assertTrue(acl.loaded)
|
||||
|
||||
def test_add_entity_hit(self):
|
||||
from gcloud.storage.acl import _ACLEntity
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
ENTITY_VAL = '%s-%s' % (TYPE, ID)
|
||||
ROLE = 'role'
|
||||
entity = _ACLEntity(TYPE, ID)
|
||||
entity.grant(ROLE)
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
before = acl.entity(TYPE, ID)
|
||||
acl.add_entity(entity)
|
||||
self.assertTrue(acl.loaded)
|
||||
self.assertFalse(acl.get_entity(ENTITY_VAL) is before)
|
||||
self.assertTrue(acl.get_entity(ENTITY_VAL) is entity)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'type-id', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_entity_miss(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity(TYPE, ID)
|
||||
self.assertTrue(acl.loaded)
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'type-id', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_entity_hit(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
before = acl.entity(TYPE, ID)
|
||||
before.grant(ROLE)
|
||||
entity = acl.entity(TYPE, ID)
|
||||
self.assertTrue(entity is before)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'type-id', 'role': ROLE}])
|
||||
self.assertEqual(list(acl.get_entities()), [entity])
|
||||
|
||||
def test_user(self):
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.user(ID)
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(entity.type, 'user')
|
||||
self.assertEqual(entity.identifier, ID)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'user-%s' % ID, 'role': ROLE}])
|
||||
|
||||
def test_group(self):
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.group(ID)
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(entity.type, 'group')
|
||||
self.assertEqual(entity.identifier, ID)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'group-%s' % ID, 'role': ROLE}])
|
||||
|
||||
def test_domain(self):
|
||||
ID = 'id'
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.domain(ID)
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(entity.type, 'domain')
|
||||
self.assertEqual(entity.identifier, ID)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'domain-%s' % ID, 'role': ROLE}])
|
||||
|
||||
def test_all(self):
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.all()
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(entity.type, 'allUsers')
|
||||
self.assertEqual(entity.identifier, None)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'allUsers', 'role': ROLE}])
|
||||
|
||||
def test_all_authenticated(self):
|
||||
ROLE = 'role'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.all_authenticated()
|
||||
entity.grant(ROLE)
|
||||
self.assertEqual(entity.type, 'allAuthenticatedUsers')
|
||||
self.assertEqual(entity.identifier, None)
|
||||
self.assertEqual(list(acl),
|
||||
[{'entity': 'allAuthenticatedUsers', 'role': ROLE}])
|
||||
|
||||
def test_get_entities_empty_eager(self):
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
self.assertEqual(acl.get_entities(), [])
|
||||
|
||||
def test_get_entities_empty_lazy(self):
|
||||
acl = self._makeOne()
|
||||
|
||||
def _reload():
|
||||
acl.loaded = True
|
||||
|
||||
acl.reload = _reload
|
||||
self.assertEqual(acl.get_entities(), [])
|
||||
self.assertTrue(acl.loaded)
|
||||
|
||||
def test_get_entities_nonempty(self):
|
||||
TYPE = 'type'
|
||||
ID = 'id'
|
||||
acl = self._makeOne()
|
||||
acl.loaded = True
|
||||
entity = acl.entity(TYPE, ID)
|
||||
self.assertEqual(acl.get_entities(), [entity])
|
||||
|
||||
def test_reload_missing(self):
|
||||
# https://github.com/GoogleCloudPlatform/gcloud-python/issues/652
|
||||
ROLE = 'role'
|
||||
connection = _Connection({})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.reload_path = '/testing/acl'
|
||||
acl.loaded = True
|
||||
acl.entity('allUsers', ROLE)
|
||||
acl.reload(client=client)
|
||||
self.assertEqual(list(acl), [])
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'GET')
|
||||
self.assertEqual(kw[0]['path'], '/testing/acl')
|
||||
|
||||
def test_reload_empty_result_clears_local(self):
|
||||
ROLE = 'role'
|
||||
connection = _Connection({'items': []})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.reload_path = '/testing/acl'
|
||||
acl.loaded = True
|
||||
acl.entity('allUsers', ROLE)
|
||||
acl.reload(client=client)
|
||||
self.assertTrue(acl.loaded)
|
||||
self.assertEqual(list(acl), [])
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'GET')
|
||||
self.assertEqual(kw[0]['path'], '/testing/acl')
|
||||
|
||||
def test_reload_nonempty_result(self):
|
||||
ROLE = 'role'
|
||||
connection = _Connection(
|
||||
{'items': [{'entity': 'allUsers', 'role': ROLE}]})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.reload_path = '/testing/acl'
|
||||
acl.loaded = True
|
||||
acl.reload(client=client)
|
||||
self.assertTrue(acl.loaded)
|
||||
self.assertEqual(list(acl), [{'entity': 'allUsers', 'role': ROLE}])
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'GET')
|
||||
self.assertEqual(kw[0]['path'], '/testing/acl')
|
||||
|
||||
def test_save_none_set_none_passed(self):
|
||||
connection = _Connection()
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.save(client=client)
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 0)
|
||||
|
||||
def test_save_existing_missing_none_passed(self):
|
||||
connection = _Connection({})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl.save(client=client)
|
||||
self.assertEqual(list(acl), [])
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': []})
|
||||
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
|
||||
|
||||
def test_save_no_acl(self):
|
||||
ROLE = 'role'
|
||||
AFTER = [{'entity': 'allUsers', 'role': ROLE}]
|
||||
connection = _Connection({'acl': AFTER})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl.entity('allUsers').grant(ROLE)
|
||||
acl.save(client=client)
|
||||
self.assertEqual(list(acl), AFTER)
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': AFTER})
|
||||
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
|
||||
|
||||
def test_save_w_acl(self):
|
||||
ROLE1 = 'role1'
|
||||
ROLE2 = 'role2'
|
||||
STICKY = {'entity': 'allUsers', 'role': ROLE2}
|
||||
new_acl = [{'entity': 'allUsers', 'role': ROLE1}]
|
||||
connection = _Connection({'acl': [STICKY] + new_acl})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl.save(new_acl, client=client)
|
||||
entries = list(acl)
|
||||
self.assertEqual(len(entries), 2)
|
||||
self.assertTrue(STICKY in entries)
|
||||
self.assertTrue(new_acl[0] in entries)
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': new_acl})
|
||||
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
|
||||
|
||||
def test_save_prefefined_invalid(self):
|
||||
connection = _Connection()
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
with self.assertRaises(ValueError):
|
||||
acl.save_predefined('bogus', client=client)
|
||||
|
||||
def test_save_predefined_valid(self):
|
||||
PREDEFINED = 'private'
|
||||
connection = _Connection({'acl': []})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl.save_predefined(PREDEFINED, client=client)
|
||||
entries = list(acl)
|
||||
self.assertEqual(len(entries), 0)
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': []})
|
||||
self.assertEqual(kw[0]['query_params'],
|
||||
{'projection': 'full', 'predefinedAcl': PREDEFINED})
|
||||
|
||||
def test_save_predefined_w_XML_alias(self):
|
||||
PREDEFINED_XML = 'project-private'
|
||||
PREDEFINED_JSON = 'projectPrivate'
|
||||
connection = _Connection({'acl': []})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl.save_predefined(PREDEFINED_XML, client=client)
|
||||
entries = list(acl)
|
||||
self.assertEqual(len(entries), 0)
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': []})
|
||||
self.assertEqual(kw[0]['query_params'],
|
||||
{'projection': 'full',
|
||||
'predefinedAcl': PREDEFINED_JSON})
|
||||
|
||||
def test_save_predefined_valid_w_alternate_query_param(self):
|
||||
# Cover case where subclass overrides _PREDEFINED_QUERY_PARAM
|
||||
PREDEFINED = 'publicRead'
|
||||
connection = _Connection({'acl': []})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl._PREDEFINED_QUERY_PARAM = 'alternate'
|
||||
acl.save_predefined(PREDEFINED, client=client)
|
||||
entries = list(acl)
|
||||
self.assertEqual(len(entries), 0)
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': []})
|
||||
self.assertEqual(kw[0]['query_params'],
|
||||
{'projection': 'full', 'alternate': PREDEFINED})
|
||||
|
||||
def test_clear(self):
|
||||
ROLE1 = 'role1'
|
||||
ROLE2 = 'role2'
|
||||
STICKY = {'entity': 'allUsers', 'role': ROLE2}
|
||||
connection = _Connection({'acl': [STICKY]})
|
||||
client = _Client(connection)
|
||||
acl = self._makeOne()
|
||||
acl.save_path = '/testing'
|
||||
acl.loaded = True
|
||||
acl.entity('allUsers', ROLE1)
|
||||
acl.clear(client=client)
|
||||
self.assertEqual(list(acl), [STICKY])
|
||||
kw = connection._requested
|
||||
self.assertEqual(len(kw), 1)
|
||||
self.assertEqual(kw[0]['method'], 'PATCH')
|
||||
self.assertEqual(kw[0]['path'], '/testing')
|
||||
self.assertEqual(kw[0]['data'], {'acl': []})
|
||||
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
|
||||
|
||||
|
||||
class Test_BucketACL(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.acl import BucketACL
|
||||
return BucketACL
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor(self):
|
||||
NAME = 'name'
|
||||
bucket = _Bucket(NAME)
|
||||
acl = self._makeOne(bucket)
|
||||
self.assertEqual(acl.entities, {})
|
||||
self.assertFalse(acl.loaded)
|
||||
self.assertTrue(acl.bucket is bucket)
|
||||
self.assertEqual(acl.reload_path, '/b/%s/acl' % NAME)
|
||||
self.assertEqual(acl.save_path, '/b/%s' % NAME)
|
||||
|
||||
|
||||
class Test_DefaultObjectACL(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.acl import DefaultObjectACL
|
||||
return DefaultObjectACL
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor(self):
|
||||
NAME = 'name'
|
||||
bucket = _Bucket(NAME)
|
||||
acl = self._makeOne(bucket)
|
||||
self.assertEqual(acl.entities, {})
|
||||
self.assertFalse(acl.loaded)
|
||||
self.assertTrue(acl.bucket is bucket)
|
||||
self.assertEqual(acl.reload_path, '/b/%s/defaultObjectAcl' % NAME)
|
||||
self.assertEqual(acl.save_path, '/b/%s' % NAME)
|
||||
|
||||
|
||||
class Test_ObjectACL(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.acl import ObjectACL
|
||||
return ObjectACL
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor(self):
|
||||
NAME = 'name'
|
||||
BLOB_NAME = 'blob-name'
|
||||
bucket = _Bucket(NAME)
|
||||
blob = _Blob(bucket, BLOB_NAME)
|
||||
acl = self._makeOne(blob)
|
||||
self.assertEqual(acl.entities, {})
|
||||
self.assertFalse(acl.loaded)
|
||||
self.assertTrue(acl.blob is blob)
|
||||
self.assertEqual(acl.reload_path, '/b/%s/o/%s/acl' % (NAME, BLOB_NAME))
|
||||
self.assertEqual(acl.save_path, '/b/%s/o/%s' % (NAME, BLOB_NAME))
|
||||
|
||||
|
||||
class _Blob(object):
|
||||
|
||||
def __init__(self, bucket, blob):
|
||||
self.bucket = bucket
|
||||
self.blob = blob
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return '%s/o/%s' % (self.bucket.path, self.blob)
|
||||
|
||||
|
||||
class _Bucket(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return '/b/%s' % self.name
|
||||
|
||||
|
||||
class _Connection(object):
|
||||
_delete_ok = False
|
||||
|
||||
def __init__(self, *responses):
|
||||
self._responses = responses
|
||||
self._requested = []
|
||||
self._deleted = []
|
||||
|
||||
def api_request(self, **kw):
|
||||
from gcloud.exceptions import NotFound
|
||||
self._requested.append(kw)
|
||||
response, self._responses = self._responses[0], self._responses[1:]
|
||||
return response
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
612
venv/Lib/site-packages/gcloud/storage/test_batch.py
Normal file
612
venv/Lib/site-packages/gcloud/storage/test_batch.py
Normal file
|
@ -0,0 +1,612 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestMIMEApplicationHTTP(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.batch import MIMEApplicationHTTP
|
||||
return MIMEApplicationHTTP
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor_body_None(self):
|
||||
METHOD = 'DELETE'
|
||||
PATH = '/path/to/api'
|
||||
LINES = [
|
||||
"DELETE /path/to/api HTTP/1.1",
|
||||
"",
|
||||
]
|
||||
mah = self._makeOne(METHOD, PATH, {}, None)
|
||||
self.assertEqual(mah.get_content_type(), 'application/http')
|
||||
self.assertEqual(mah.get_payload().splitlines(), LINES)
|
||||
|
||||
def test_ctor_body_str(self):
|
||||
METHOD = 'GET'
|
||||
PATH = '/path/to/api'
|
||||
BODY = 'ABC'
|
||||
HEADERS = {'Content-Length': len(BODY), 'Content-Type': 'text/plain'}
|
||||
LINES = [
|
||||
"GET /path/to/api HTTP/1.1",
|
||||
"Content-Length: 3",
|
||||
"Content-Type: text/plain",
|
||||
"",
|
||||
"ABC",
|
||||
]
|
||||
mah = self._makeOne(METHOD, PATH, HEADERS, BODY)
|
||||
self.assertEqual(mah.get_payload().splitlines(), LINES)
|
||||
|
||||
def test_ctor_body_dict(self):
|
||||
METHOD = 'GET'
|
||||
PATH = '/path/to/api'
|
||||
BODY = {'foo': 'bar'}
|
||||
HEADERS = {}
|
||||
LINES = [
|
||||
'GET /path/to/api HTTP/1.1',
|
||||
'Content-Length: 14',
|
||||
'Content-Type: application/json',
|
||||
'',
|
||||
'{"foo": "bar"}',
|
||||
]
|
||||
mah = self._makeOne(METHOD, PATH, HEADERS, BODY)
|
||||
self.assertEqual(mah.get_payload().splitlines(), LINES)
|
||||
|
||||
|
||||
class TestBatch(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.batch import Batch
|
||||
return Batch
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor(self):
|
||||
http = _HTTP()
|
||||
connection = _Connection(http=http)
|
||||
client = _Client(connection)
|
||||
batch = self._makeOne(client)
|
||||
self.assertTrue(batch._client is client)
|
||||
self.assertEqual(len(batch._requests), 0)
|
||||
self.assertEqual(len(batch._target_objects), 0)
|
||||
|
||||
def test_current(self):
|
||||
from gcloud.storage.client import Client
|
||||
project = 'PROJECT'
|
||||
credentials = _Credentials()
|
||||
client = Client(project=project, credentials=credentials)
|
||||
batch1 = self._makeOne(client)
|
||||
self.assertTrue(batch1.current() is None)
|
||||
|
||||
client._push_batch(batch1)
|
||||
self.assertTrue(batch1.current() is batch1)
|
||||
|
||||
batch2 = self._makeOne(client)
|
||||
client._push_batch(batch2)
|
||||
self.assertTrue(batch1.current() is batch2)
|
||||
|
||||
def test__make_request_GET_normal(self):
|
||||
from gcloud.storage.batch import _FutureDict
|
||||
URL = 'http://example.com/api'
|
||||
expected = _Response()
|
||||
http = _HTTP((expected, ''))
|
||||
connection = _Connection(http=http)
|
||||
batch = self._makeOne(connection)
|
||||
target = _MockObject()
|
||||
response, content = batch._make_request('GET', URL,
|
||||
target_object=target)
|
||||
self.assertEqual(response.status, 204)
|
||||
self.assertTrue(isinstance(content, _FutureDict))
|
||||
self.assertTrue(target._properties is content)
|
||||
self.assertEqual(http._requests, [])
|
||||
EXPECTED_HEADERS = [
|
||||
('Accept-Encoding', 'gzip'),
|
||||
('Content-Length', '0'),
|
||||
]
|
||||
solo_request, = batch._requests
|
||||
self.assertEqual(solo_request[0], 'GET')
|
||||
self.assertEqual(solo_request[1], URL)
|
||||
headers = solo_request[2]
|
||||
for key, value in EXPECTED_HEADERS:
|
||||
self.assertEqual(headers[key], value)
|
||||
self.assertEqual(solo_request[3], None)
|
||||
|
||||
def test__make_request_POST_normal(self):
|
||||
from gcloud.storage.batch import _FutureDict
|
||||
URL = 'http://example.com/api'
|
||||
http = _HTTP() # no requests expected
|
||||
connection = _Connection(http=http)
|
||||
batch = self._makeOne(connection)
|
||||
target = _MockObject()
|
||||
response, content = batch._make_request('POST', URL, data={'foo': 1},
|
||||
target_object=target)
|
||||
self.assertEqual(response.status, 204)
|
||||
self.assertTrue(isinstance(content, _FutureDict))
|
||||
self.assertTrue(target._properties is content)
|
||||
self.assertEqual(http._requests, [])
|
||||
EXPECTED_HEADERS = [
|
||||
('Accept-Encoding', 'gzip'),
|
||||
('Content-Length', '10'),
|
||||
]
|
||||
solo_request, = batch._requests
|
||||
self.assertEqual(solo_request[0], 'POST')
|
||||
self.assertEqual(solo_request[1], URL)
|
||||
headers = solo_request[2]
|
||||
for key, value in EXPECTED_HEADERS:
|
||||
self.assertEqual(headers[key], value)
|
||||
self.assertEqual(solo_request[3], {'foo': 1})
|
||||
|
||||
def test__make_request_PATCH_normal(self):
|
||||
from gcloud.storage.batch import _FutureDict
|
||||
URL = 'http://example.com/api'
|
||||
http = _HTTP() # no requests expected
|
||||
connection = _Connection(http=http)
|
||||
batch = self._makeOne(connection)
|
||||
target = _MockObject()
|
||||
response, content = batch._make_request('PATCH', URL, data={'foo': 1},
|
||||
target_object=target)
|
||||
self.assertEqual(response.status, 204)
|
||||
self.assertTrue(isinstance(content, _FutureDict))
|
||||
self.assertTrue(target._properties is content)
|
||||
self.assertEqual(http._requests, [])
|
||||
EXPECTED_HEADERS = [
|
||||
('Accept-Encoding', 'gzip'),
|
||||
('Content-Length', '10'),
|
||||
]
|
||||
solo_request, = batch._requests
|
||||
self.assertEqual(solo_request[0], 'PATCH')
|
||||
self.assertEqual(solo_request[1], URL)
|
||||
headers = solo_request[2]
|
||||
for key, value in EXPECTED_HEADERS:
|
||||
self.assertEqual(headers[key], value)
|
||||
self.assertEqual(solo_request[3], {'foo': 1})
|
||||
|
||||
def test__make_request_DELETE_normal(self):
|
||||
from gcloud.storage.batch import _FutureDict
|
||||
URL = 'http://example.com/api'
|
||||
http = _HTTP() # no requests expected
|
||||
connection = _Connection(http=http)
|
||||
batch = self._makeOne(connection)
|
||||
target = _MockObject()
|
||||
response, content = batch._make_request('DELETE', URL,
|
||||
target_object=target)
|
||||
self.assertEqual(response.status, 204)
|
||||
self.assertTrue(isinstance(content, _FutureDict))
|
||||
self.assertTrue(target._properties is content)
|
||||
self.assertEqual(http._requests, [])
|
||||
EXPECTED_HEADERS = [
|
||||
('Accept-Encoding', 'gzip'),
|
||||
('Content-Length', '0'),
|
||||
]
|
||||
solo_request, = batch._requests
|
||||
self.assertEqual(solo_request[0], 'DELETE')
|
||||
self.assertEqual(solo_request[1], URL)
|
||||
headers = solo_request[2]
|
||||
for key, value in EXPECTED_HEADERS:
|
||||
self.assertEqual(headers[key], value)
|
||||
self.assertEqual(solo_request[3], None)
|
||||
|
||||
def test__make_request_POST_too_many_requests(self):
|
||||
URL = 'http://example.com/api'
|
||||
http = _HTTP() # no requests expected
|
||||
connection = _Connection(http=http)
|
||||
batch = self._makeOne(connection)
|
||||
batch._MAX_BATCH_SIZE = 1
|
||||
batch._requests.append(('POST', URL, {}, {'bar': 2}))
|
||||
self.assertRaises(ValueError,
|
||||
batch._make_request, 'POST', URL, data={'foo': 1})
|
||||
self.assertTrue(connection.http is http)
|
||||
|
||||
def test_finish_empty(self):
|
||||
http = _HTTP() # no requests expected
|
||||
connection = _Connection(http=http)
|
||||
batch = self._makeOne(connection)
|
||||
self.assertRaises(ValueError, batch.finish)
|
||||
self.assertTrue(connection.http is http)
|
||||
|
||||
def _check_subrequest_no_payload(self, chunk, method, url):
|
||||
lines = chunk.splitlines()
|
||||
# blank + 2 headers + blank + request + blank + blank
|
||||
self.assertEqual(len(lines), 7)
|
||||
self.assertEqual(lines[0], '')
|
||||
self.assertEqual(lines[1], 'Content-Type: application/http')
|
||||
self.assertEqual(lines[2], 'MIME-Version: 1.0')
|
||||
self.assertEqual(lines[3], '')
|
||||
self.assertEqual(lines[4], '%s %s HTTP/1.1' % (method, url))
|
||||
self.assertEqual(lines[5], '')
|
||||
self.assertEqual(lines[6], '')
|
||||
|
||||
def _check_subrequest_payload(self, chunk, method, url, payload):
|
||||
import json
|
||||
lines = chunk.splitlines()
|
||||
# blank + 2 headers + blank + request + 2 headers + blank + body
|
||||
payload_str = json.dumps(payload)
|
||||
self.assertEqual(lines[0], '')
|
||||
self.assertEqual(lines[1], 'Content-Type: application/http')
|
||||
self.assertEqual(lines[2], 'MIME-Version: 1.0')
|
||||
self.assertEqual(lines[3], '')
|
||||
self.assertEqual(lines[4], '%s %s HTTP/1.1' % (method, url))
|
||||
if method == 'GET':
|
||||
self.assertEqual(len(lines), 7)
|
||||
self.assertEqual(lines[5], '')
|
||||
self.assertEqual(lines[6], '')
|
||||
else:
|
||||
self.assertEqual(len(lines), 9)
|
||||
self.assertEqual(lines[5], 'Content-Length: %d' % len(payload_str))
|
||||
self.assertEqual(lines[6], 'Content-Type: application/json')
|
||||
self.assertEqual(lines[7], '')
|
||||
self.assertEqual(json.loads(lines[8]), payload)
|
||||
|
||||
def test_finish_nonempty(self):
|
||||
import httplib2
|
||||
URL = 'http://api.example.com/other_api'
|
||||
expected = _Response()
|
||||
expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="'
|
||||
http = _HTTP((expected, _THREE_PART_MIME_RESPONSE))
|
||||
connection = _Connection(http=http)
|
||||
client = _Client(connection)
|
||||
batch = self._makeOne(client)
|
||||
batch.API_BASE_URL = 'http://api.example.com'
|
||||
batch._do_request('POST', URL, {}, {'foo': 1, 'bar': 2}, None)
|
||||
batch._do_request('PATCH', URL, {}, {'bar': 3}, None)
|
||||
batch._do_request('DELETE', URL, {}, None, None)
|
||||
result = batch.finish()
|
||||
self.assertEqual(len(result), len(batch._requests))
|
||||
response0 = httplib2.Response({
|
||||
'content-length': '20',
|
||||
'content-type': 'application/json; charset=UTF-8',
|
||||
'status': '200',
|
||||
})
|
||||
self.assertEqual(result[0], (response0, {'foo': 1, 'bar': 2}))
|
||||
response1 = response0
|
||||
self.assertEqual(result[1], (response1, {u'foo': 1, u'bar': 3}))
|
||||
response2 = httplib2.Response({
|
||||
'content-length': '0',
|
||||
'status': '204',
|
||||
})
|
||||
self.assertEqual(result[2], (response2, ''))
|
||||
self.assertEqual(len(http._requests), 1)
|
||||
method, uri, headers, body = http._requests[0]
|
||||
self.assertEqual(method, 'POST')
|
||||
self.assertEqual(uri, 'http://api.example.com/batch')
|
||||
self.assertEqual(len(headers), 2)
|
||||
ctype, boundary = [x.strip()
|
||||
for x in headers['Content-Type'].split(';')]
|
||||
self.assertEqual(ctype, 'multipart/mixed')
|
||||
self.assertTrue(boundary.startswith('boundary="=='))
|
||||
self.assertTrue(boundary.endswith('=="'))
|
||||
self.assertEqual(headers['MIME-Version'], '1.0')
|
||||
|
||||
divider = '--' + boundary[len('boundary="'):-1]
|
||||
chunks = body.split(divider)[1:-1] # discard prolog / epilog
|
||||
self.assertEqual(len(chunks), 3)
|
||||
|
||||
self._check_subrequest_payload(chunks[0], 'POST', URL,
|
||||
{'foo': 1, 'bar': 2})
|
||||
|
||||
self._check_subrequest_payload(chunks[1], 'PATCH', URL, {'bar': 3})
|
||||
|
||||
self._check_subrequest_no_payload(chunks[2], 'DELETE', URL)
|
||||
|
||||
def test_finish_responses_mismatch(self):
|
||||
URL = 'http://api.example.com/other_api'
|
||||
expected = _Response()
|
||||
expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="'
|
||||
http = _HTTP((expected, _TWO_PART_MIME_RESPONSE_WITH_FAIL))
|
||||
connection = _Connection(http=http)
|
||||
client = _Client(connection)
|
||||
batch = self._makeOne(client)
|
||||
batch.API_BASE_URL = 'http://api.example.com'
|
||||
batch._requests.append(('GET', URL, {}, None))
|
||||
self.assertRaises(ValueError, batch.finish)
|
||||
|
||||
def test_finish_nonempty_with_status_failure(self):
|
||||
from gcloud.exceptions import NotFound
|
||||
URL = 'http://api.example.com/other_api'
|
||||
expected = _Response()
|
||||
expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="'
|
||||
http = _HTTP((expected, _TWO_PART_MIME_RESPONSE_WITH_FAIL))
|
||||
connection = _Connection(http=http)
|
||||
client = _Client(connection)
|
||||
batch = self._makeOne(client)
|
||||
batch.API_BASE_URL = 'http://api.example.com'
|
||||
target1 = _MockObject()
|
||||
target2 = _MockObject()
|
||||
batch._do_request('GET', URL, {}, None, target1)
|
||||
batch._do_request('GET', URL, {}, None, target2)
|
||||
# Make sure futures are not populated.
|
||||
self.assertEqual([future for future in batch._target_objects],
|
||||
[target1, target2])
|
||||
target2_future_before = target2._properties
|
||||
self.assertRaises(NotFound, batch.finish)
|
||||
self.assertEqual(target1._properties,
|
||||
{'foo': 1, 'bar': 2})
|
||||
self.assertTrue(target2._properties is target2_future_before)
|
||||
|
||||
self.assertEqual(len(http._requests), 1)
|
||||
method, uri, headers, body = http._requests[0]
|
||||
self.assertEqual(method, 'POST')
|
||||
self.assertEqual(uri, 'http://api.example.com/batch')
|
||||
self.assertEqual(len(headers), 2)
|
||||
ctype, boundary = [x.strip()
|
||||
for x in headers['Content-Type'].split(';')]
|
||||
self.assertEqual(ctype, 'multipart/mixed')
|
||||
self.assertTrue(boundary.startswith('boundary="=='))
|
||||
self.assertTrue(boundary.endswith('=="'))
|
||||
self.assertEqual(headers['MIME-Version'], '1.0')
|
||||
|
||||
divider = '--' + boundary[len('boundary="'):-1]
|
||||
chunks = body.split(divider)[1:-1] # discard prolog / epilog
|
||||
self.assertEqual(len(chunks), 2)
|
||||
|
||||
self._check_subrequest_payload(chunks[0], 'GET', URL, {})
|
||||
self._check_subrequest_payload(chunks[1], 'GET', URL, {})
|
||||
|
||||
def test_finish_nonempty_non_multipart_response(self):
|
||||
URL = 'http://api.example.com/other_api'
|
||||
expected = _Response()
|
||||
expected['content-type'] = 'text/plain'
|
||||
http = _HTTP((expected, 'NOT A MIME_RESPONSE'))
|
||||
connection = _Connection(http=http)
|
||||
client = _Client(connection)
|
||||
batch = self._makeOne(client)
|
||||
batch._requests.append(('POST', URL, {}, {'foo': 1, 'bar': 2}))
|
||||
batch._requests.append(('PATCH', URL, {}, {'bar': 3}))
|
||||
batch._requests.append(('DELETE', URL, {}, None))
|
||||
self.assertRaises(ValueError, batch.finish)
|
||||
|
||||
def test_as_context_mgr_wo_error(self):
|
||||
from gcloud.storage.client import Client
|
||||
URL = 'http://example.com/api'
|
||||
expected = _Response()
|
||||
expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="'
|
||||
http = _HTTP((expected, _THREE_PART_MIME_RESPONSE))
|
||||
project = 'PROJECT'
|
||||
credentials = _Credentials()
|
||||
client = Client(project=project, credentials=credentials)
|
||||
client._connection._http = http
|
||||
|
||||
self.assertEqual(list(client._batch_stack), [])
|
||||
|
||||
target1 = _MockObject()
|
||||
target2 = _MockObject()
|
||||
target3 = _MockObject()
|
||||
with self._makeOne(client) as batch:
|
||||
self.assertEqual(list(client._batch_stack), [batch])
|
||||
batch._make_request('POST', URL, {'foo': 1, 'bar': 2},
|
||||
target_object=target1)
|
||||
batch._make_request('PATCH', URL, {'bar': 3},
|
||||
target_object=target2)
|
||||
batch._make_request('DELETE', URL, target_object=target3)
|
||||
|
||||
self.assertEqual(list(client._batch_stack), [])
|
||||
self.assertEqual(len(batch._requests), 3)
|
||||
self.assertEqual(batch._requests[0][0], 'POST')
|
||||
self.assertEqual(batch._requests[1][0], 'PATCH')
|
||||
self.assertEqual(batch._requests[2][0], 'DELETE')
|
||||
self.assertEqual(batch._target_objects, [target1, target2, target3])
|
||||
self.assertEqual(target1._properties,
|
||||
{'foo': 1, 'bar': 2})
|
||||
self.assertEqual(target2._properties,
|
||||
{'foo': 1, 'bar': 3})
|
||||
self.assertEqual(target3._properties, '')
|
||||
|
||||
def test_as_context_mgr_w_error(self):
|
||||
from gcloud.storage.batch import _FutureDict
|
||||
from gcloud.storage.client import Client
|
||||
URL = 'http://example.com/api'
|
||||
http = _HTTP()
|
||||
connection = _Connection(http=http)
|
||||
project = 'PROJECT'
|
||||
credentials = _Credentials()
|
||||
client = Client(project=project, credentials=credentials)
|
||||
client._connection = connection
|
||||
|
||||
self.assertEqual(list(client._batch_stack), [])
|
||||
|
||||
target1 = _MockObject()
|
||||
target2 = _MockObject()
|
||||
target3 = _MockObject()
|
||||
try:
|
||||
with self._makeOne(client) as batch:
|
||||
self.assertEqual(list(client._batch_stack), [batch])
|
||||
batch._make_request('POST', URL, {'foo': 1, 'bar': 2},
|
||||
target_object=target1)
|
||||
batch._make_request('PATCH', URL, {'bar': 3},
|
||||
target_object=target2)
|
||||
batch._make_request('DELETE', URL, target_object=target3)
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.assertEqual(list(client._batch_stack), [])
|
||||
self.assertEqual(len(http._requests), 0)
|
||||
self.assertEqual(len(batch._requests), 3)
|
||||
self.assertEqual(batch._target_objects, [target1, target2, target3])
|
||||
# Since the context manager fails, finish will not get called and
|
||||
# the _properties will still be futures.
|
||||
self.assertTrue(isinstance(target1._properties, _FutureDict))
|
||||
self.assertTrue(isinstance(target2._properties, _FutureDict))
|
||||
self.assertTrue(isinstance(target3._properties, _FutureDict))
|
||||
|
||||
|
||||
class Test__unpack_batch_response(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, response, content):
|
||||
from gcloud.storage.batch import _unpack_batch_response
|
||||
return _unpack_batch_response(response, content)
|
||||
|
||||
def _unpack_helper(self, response, content):
|
||||
import httplib2
|
||||
result = list(self._callFUT(response, content))
|
||||
self.assertEqual(len(result), 3)
|
||||
response0 = httplib2.Response({
|
||||
'content-length': '20',
|
||||
'content-type': 'application/json; charset=UTF-8',
|
||||
'status': '200',
|
||||
})
|
||||
self.assertEqual(result[0], (response0, {u'bar': 2, u'foo': 1}))
|
||||
response1 = response0
|
||||
self.assertEqual(result[1], (response1, {u'foo': 1, u'bar': 3}))
|
||||
response2 = httplib2.Response({
|
||||
'content-length': '0',
|
||||
'status': '204',
|
||||
})
|
||||
self.assertEqual(result[2], (response2, ''))
|
||||
|
||||
def test_bytes(self):
|
||||
RESPONSE = {'content-type': b'multipart/mixed; boundary="DEADBEEF="'}
|
||||
CONTENT = _THREE_PART_MIME_RESPONSE
|
||||
self._unpack_helper(RESPONSE, CONTENT)
|
||||
|
||||
def test_unicode(self):
|
||||
RESPONSE = {'content-type': u'multipart/mixed; boundary="DEADBEEF="'}
|
||||
CONTENT = _THREE_PART_MIME_RESPONSE.decode('utf-8')
|
||||
self._unpack_helper(RESPONSE, CONTENT)
|
||||
|
||||
|
||||
_TWO_PART_MIME_RESPONSE_WITH_FAIL = b"""\
|
||||
--DEADBEEF=
|
||||
Content-Type: application/http
|
||||
Content-ID: <response-8a09ca85-8d1d-4f45-9eb0-da8e8b07ec83+1>
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
Content-Length: 20
|
||||
|
||||
{"foo": 1, "bar": 2}
|
||||
|
||||
--DEADBEEF=
|
||||
Content-Type: application/http
|
||||
Content-ID: <response-8a09ca85-8d1d-4f45-9eb0-da8e8b07ec83+2>
|
||||
|
||||
HTTP/1.1 404 Not Found
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
Content-Length: 35
|
||||
|
||||
{"error": {"message": "Not Found"}}
|
||||
|
||||
--DEADBEEF=--
|
||||
"""
|
||||
|
||||
_THREE_PART_MIME_RESPONSE = b"""\
|
||||
--DEADBEEF=
|
||||
Content-Type: application/http
|
||||
Content-ID: <response-8a09ca85-8d1d-4f45-9eb0-da8e8b07ec83+1>
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
Content-Length: 20
|
||||
|
||||
{"foo": 1, "bar": 2}
|
||||
|
||||
--DEADBEEF=
|
||||
Content-Type: application/http
|
||||
Content-ID: <response-8a09ca85-8d1d-4f45-9eb0-da8e8b07ec83+2>
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
Content-Length: 20
|
||||
|
||||
{"foo": 1, "bar": 3}
|
||||
|
||||
--DEADBEEF=
|
||||
Content-Type: application/http
|
||||
Content-ID: <response-8a09ca85-8d1d-4f45-9eb0-da8e8b07ec83+3>
|
||||
|
||||
HTTP/1.1 204 No Content
|
||||
Content-Length: 0
|
||||
|
||||
--DEADBEEF=--
|
||||
"""
|
||||
|
||||
|
||||
class Test__FutureDict(unittest2.TestCase):
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
from gcloud.storage.batch import _FutureDict
|
||||
return _FutureDict(*args, **kw)
|
||||
|
||||
def test_get(self):
|
||||
future = self._makeOne()
|
||||
self.assertRaises(KeyError, future.get, None)
|
||||
|
||||
def test___getitem__(self):
|
||||
future = self._makeOne()
|
||||
value = orig_value = object()
|
||||
with self.assertRaises(KeyError):
|
||||
value = future[None]
|
||||
self.assertTrue(value is orig_value)
|
||||
|
||||
def test___setitem__(self):
|
||||
future = self._makeOne()
|
||||
with self.assertRaises(KeyError):
|
||||
future[None] = None
|
||||
|
||||
|
||||
class _Connection(object):
|
||||
|
||||
project = 'TESTING'
|
||||
|
||||
def __init__(self, **kw):
|
||||
self.__dict__.update(kw)
|
||||
|
||||
def _make_request(self, method, url, data=None, headers=None):
|
||||
return self.http.request(uri=url, method=method,
|
||||
headers=headers, body=data)
|
||||
|
||||
|
||||
class _Response(dict):
|
||||
|
||||
def __init__(self, status=200, **kw):
|
||||
self.status = status
|
||||
super(_Response, self).__init__(**kw)
|
||||
|
||||
|
||||
class _HTTP(object):
|
||||
|
||||
def __init__(self, *responses):
|
||||
self._requests = []
|
||||
self._responses = list(responses)
|
||||
|
||||
def request(self, uri, method, headers, body):
|
||||
self._requests.append((method, uri, headers, body))
|
||||
response, self._responses = self._responses[0], self._responses[1:]
|
||||
return response
|
||||
|
||||
|
||||
class _MockObject(object):
|
||||
pass
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
self._connection = connection
|
||||
|
||||
|
||||
class _Credentials(object):
|
||||
|
||||
_scopes = None
|
||||
|
||||
@staticmethod
|
||||
def create_scoped_required():
|
||||
return True
|
||||
|
||||
def create_scoped(self, scope):
|
||||
self._scopes = scope
|
||||
return self
|
1398
venv/Lib/site-packages/gcloud/storage/test_blob.py
Normal file
1398
venv/Lib/site-packages/gcloud/storage/test_blob.py
Normal file
File diff suppressed because it is too large
Load diff
1039
venv/Lib/site-packages/gcloud/storage/test_bucket.py
Normal file
1039
venv/Lib/site-packages/gcloud/storage/test_bucket.py
Normal file
File diff suppressed because it is too large
Load diff
441
venv/Lib/site-packages/gcloud/storage/test_client.py
Normal file
441
venv/Lib/site-packages/gcloud/storage/test_client.py
Normal file
|
@ -0,0 +1,441 @@
|
|||
# Copyright 2015 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestClient(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.client import Client
|
||||
return Client
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor_connection_type(self):
|
||||
from gcloud.storage.connection import Connection
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
self.assertEqual(client.project, PROJECT)
|
||||
self.assertTrue(isinstance(client.connection, Connection))
|
||||
self.assertTrue(client.connection.credentials is CREDENTIALS)
|
||||
self.assertTrue(client.current_batch is None)
|
||||
self.assertEqual(list(client._batch_stack), [])
|
||||
|
||||
def test__push_batch_and__pop_batch(self):
|
||||
from gcloud.storage.batch import Batch
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
batch1 = Batch(client)
|
||||
batch2 = Batch(client)
|
||||
client._push_batch(batch1)
|
||||
self.assertEqual(list(client._batch_stack), [batch1])
|
||||
self.assertTrue(client.current_batch is batch1)
|
||||
client._push_batch(batch2)
|
||||
self.assertTrue(client.current_batch is batch2)
|
||||
# list(_LocalStack) returns in reverse order.
|
||||
self.assertEqual(list(client._batch_stack), [batch2, batch1])
|
||||
self.assertTrue(client._pop_batch() is batch2)
|
||||
self.assertEqual(list(client._batch_stack), [batch1])
|
||||
self.assertTrue(client._pop_batch() is batch1)
|
||||
self.assertEqual(list(client._batch_stack), [])
|
||||
|
||||
def test_connection_setter(self):
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
client._connection = None # Unset the value from the constructor
|
||||
client.connection = connection = object()
|
||||
self.assertTrue(client._connection is connection)
|
||||
|
||||
def test_connection_setter_when_set(self):
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
self.assertRaises(ValueError, setattr, client, 'connection', None)
|
||||
|
||||
def test_connection_getter_no_batch(self):
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
self.assertTrue(client.connection is client._connection)
|
||||
self.assertTrue(client.current_batch is None)
|
||||
|
||||
def test_connection_getter_with_batch(self):
|
||||
from gcloud.storage.batch import Batch
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
batch = Batch(client)
|
||||
client._push_batch(batch)
|
||||
self.assertTrue(client.connection is not client._connection)
|
||||
self.assertTrue(client.connection is batch)
|
||||
self.assertTrue(client.current_batch is batch)
|
||||
|
||||
def test_bucket(self):
|
||||
from gcloud.storage.bucket import Bucket
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
BUCKET_NAME = 'BUCKET_NAME'
|
||||
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
bucket = client.bucket(BUCKET_NAME)
|
||||
self.assertTrue(isinstance(bucket, Bucket))
|
||||
self.assertTrue(bucket.client is client)
|
||||
self.assertEqual(bucket.name, BUCKET_NAME)
|
||||
|
||||
def test_batch(self):
|
||||
from gcloud.storage.batch import Batch
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
batch = client.batch()
|
||||
self.assertTrue(isinstance(batch, Batch))
|
||||
self.assertTrue(batch._client is client)
|
||||
|
||||
def test_get_bucket_miss(self):
|
||||
from gcloud.exceptions import NotFound
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
NONESUCH = 'nonesuch'
|
||||
URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b',
|
||||
'nonesuch?projection=noAcl',
|
||||
])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '404', 'content-type': 'application/json'},
|
||||
b'{}',
|
||||
)
|
||||
self.assertRaises(NotFound, client.get_bucket, NONESUCH)
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertEqual(http._called_with['uri'], URI)
|
||||
|
||||
def test_get_bucket_hit(self):
|
||||
from gcloud.storage.bucket import Bucket
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
BLOB_NAME = 'blob-name'
|
||||
URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b',
|
||||
'%s?projection=noAcl' % (BLOB_NAME,),
|
||||
])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '200', 'content-type': 'application/json'},
|
||||
'{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'),
|
||||
)
|
||||
|
||||
bucket = client.get_bucket(BLOB_NAME)
|
||||
self.assertTrue(isinstance(bucket, Bucket))
|
||||
self.assertEqual(bucket.name, BLOB_NAME)
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertEqual(http._called_with['uri'], URI)
|
||||
|
||||
def test_lookup_bucket_miss(self):
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
NONESUCH = 'nonesuch'
|
||||
URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b',
|
||||
'nonesuch?projection=noAcl',
|
||||
])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '404', 'content-type': 'application/json'},
|
||||
b'{}',
|
||||
)
|
||||
bucket = client.lookup_bucket(NONESUCH)
|
||||
self.assertEqual(bucket, None)
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertEqual(http._called_with['uri'], URI)
|
||||
|
||||
def test_lookup_bucket_hit(self):
|
||||
from gcloud.storage.bucket import Bucket
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
BLOB_NAME = 'blob-name'
|
||||
URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b',
|
||||
'%s?projection=noAcl' % (BLOB_NAME,),
|
||||
])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '200', 'content-type': 'application/json'},
|
||||
'{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'),
|
||||
)
|
||||
|
||||
bucket = client.lookup_bucket(BLOB_NAME)
|
||||
self.assertTrue(isinstance(bucket, Bucket))
|
||||
self.assertEqual(bucket.name, BLOB_NAME)
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertEqual(http._called_with['uri'], URI)
|
||||
|
||||
def test_create_bucket_conflict(self):
|
||||
from gcloud.exceptions import Conflict
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
BLOB_NAME = 'blob-name'
|
||||
URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b?project=%s' % (PROJECT,),
|
||||
])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '409', 'content-type': 'application/json'},
|
||||
'{"error": {"message": "Conflict"}}',
|
||||
)
|
||||
|
||||
self.assertRaises(Conflict, client.create_bucket, BLOB_NAME)
|
||||
self.assertEqual(http._called_with['method'], 'POST')
|
||||
self.assertEqual(http._called_with['uri'], URI)
|
||||
|
||||
def test_create_bucket_success(self):
|
||||
from gcloud.storage.bucket import Bucket
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
BLOB_NAME = 'blob-name'
|
||||
URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b?project=%s' % (PROJECT,),
|
||||
])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '200', 'content-type': 'application/json'},
|
||||
'{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'),
|
||||
)
|
||||
|
||||
bucket = client.create_bucket(BLOB_NAME)
|
||||
self.assertTrue(isinstance(bucket, Bucket))
|
||||
self.assertEqual(bucket.name, BLOB_NAME)
|
||||
self.assertEqual(http._called_with['method'], 'POST')
|
||||
self.assertEqual(http._called_with['uri'], URI)
|
||||
|
||||
def test_list_buckets_empty(self):
|
||||
from six.moves.urllib.parse import parse_qs
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
EXPECTED_QUERY = {
|
||||
'project': [PROJECT],
|
||||
'projection': ['noAcl'],
|
||||
}
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '200', 'content-type': 'application/json'},
|
||||
b'{}',
|
||||
)
|
||||
buckets = list(client.list_buckets())
|
||||
self.assertEqual(len(buckets), 0)
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertEqual(http._called_with['body'], None)
|
||||
|
||||
BASE_URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b',
|
||||
])
|
||||
URI = http._called_with['uri']
|
||||
self.assertTrue(URI.startswith(BASE_URI))
|
||||
uri_parts = urlparse(URI)
|
||||
self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY)
|
||||
|
||||
def test_list_buckets_non_empty(self):
|
||||
from six.moves.urllib.parse import parse_qs
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from six.moves.urllib.parse import urlparse
|
||||
PROJECT = 'PROJECT'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
BUCKET_NAME = 'bucket-name'
|
||||
query_params = urlencode({'project': PROJECT, 'projection': 'noAcl'})
|
||||
BASE_URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
])
|
||||
URI = '/'.join([BASE_URI, 'b?%s' % (query_params,)])
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '200', 'content-type': 'application/json'},
|
||||
'{{"items": [{{"name": "{0}"}}]}}'.format(BUCKET_NAME)
|
||||
.encode('utf-8'),
|
||||
)
|
||||
buckets = list(client.list_buckets())
|
||||
self.assertEqual(len(buckets), 1)
|
||||
self.assertEqual(buckets[0].name, BUCKET_NAME)
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertTrue(http._called_with['uri'].startswith(BASE_URI))
|
||||
self.assertEqual(parse_qs(urlparse(http._called_with['uri']).query),
|
||||
parse_qs(urlparse(URI).query))
|
||||
|
||||
def test_list_buckets_all_arguments(self):
|
||||
from six.moves.urllib.parse import parse_qs
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
PROJECT = 'foo-bar'
|
||||
CREDENTIALS = _Credentials()
|
||||
client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
|
||||
|
||||
MAX_RESULTS = 10
|
||||
PAGE_TOKEN = 'ABCD'
|
||||
PREFIX = 'subfolder'
|
||||
PROJECTION = 'full'
|
||||
FIELDS = 'items/id,nextPageToken'
|
||||
EXPECTED_QUERY = {
|
||||
'project': [PROJECT],
|
||||
'maxResults': [str(MAX_RESULTS)],
|
||||
'pageToken': [PAGE_TOKEN],
|
||||
'prefix': [PREFIX],
|
||||
'projection': [PROJECTION],
|
||||
'fields': [FIELDS],
|
||||
}
|
||||
|
||||
http = client.connection._http = _Http(
|
||||
{'status': '200', 'content-type': 'application/json'},
|
||||
'{"items": []}',
|
||||
)
|
||||
iterator = client.list_buckets(
|
||||
max_results=MAX_RESULTS,
|
||||
page_token=PAGE_TOKEN,
|
||||
prefix=PREFIX,
|
||||
projection=PROJECTION,
|
||||
fields=FIELDS,
|
||||
)
|
||||
buckets = list(iterator)
|
||||
self.assertEqual(buckets, [])
|
||||
self.assertEqual(http._called_with['method'], 'GET')
|
||||
self.assertEqual(http._called_with['body'], None)
|
||||
|
||||
BASE_URI = '/'.join([
|
||||
client.connection.API_BASE_URL,
|
||||
'storage',
|
||||
client.connection.API_VERSION,
|
||||
'b'
|
||||
])
|
||||
URI = http._called_with['uri']
|
||||
self.assertTrue(URI.startswith(BASE_URI))
|
||||
uri_parts = urlparse(URI)
|
||||
self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY)
|
||||
|
||||
|
||||
class Test__BucketIterator(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.client import _BucketIterator
|
||||
return _BucketIterator
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_ctor(self):
|
||||
connection = object()
|
||||
client = _Client(connection)
|
||||
iterator = self._makeOne(client)
|
||||
self.assertEqual(iterator.path, '/b')
|
||||
self.assertEqual(iterator.page_number, 0)
|
||||
self.assertEqual(iterator.next_page_token, None)
|
||||
self.assertTrue(iterator.client is client)
|
||||
|
||||
def test_get_items_from_response_empty(self):
|
||||
connection = object()
|
||||
client = _Client(connection)
|
||||
iterator = self._makeOne(client)
|
||||
self.assertEqual(list(iterator.get_items_from_response({})), [])
|
||||
|
||||
def test_get_items_from_response_non_empty(self):
|
||||
from gcloud.storage.bucket import Bucket
|
||||
BLOB_NAME = 'blob-name'
|
||||
response = {'items': [{'name': BLOB_NAME}]}
|
||||
connection = object()
|
||||
client = _Client(connection)
|
||||
iterator = self._makeOne(client)
|
||||
buckets = list(iterator.get_items_from_response(response))
|
||||
self.assertEqual(len(buckets), 1)
|
||||
bucket = buckets[0]
|
||||
self.assertTrue(isinstance(bucket, Bucket))
|
||||
self.assertEqual(bucket.name, BLOB_NAME)
|
||||
|
||||
|
||||
class _Credentials(object):
|
||||
|
||||
_scopes = None
|
||||
|
||||
@staticmethod
|
||||
def create_scoped_required():
|
||||
return True
|
||||
|
||||
def create_scoped(self, scope):
|
||||
self._scopes = scope
|
||||
return self
|
||||
|
||||
|
||||
class _Http(object):
|
||||
|
||||
_called_with = None
|
||||
|
||||
def __init__(self, headers, content):
|
||||
from httplib2 import Response
|
||||
self._response = Response(headers)
|
||||
self._content = content
|
||||
|
||||
def request(self, **kw):
|
||||
self._called_with = kw
|
||||
return self._response, self._content
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
47
venv/Lib/site-packages/gcloud/storage/test_connection.py
Normal file
47
venv/Lib/site-packages/gcloud/storage/test_connection.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestConnection(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.storage.connection import Connection
|
||||
return Connection
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def test_build_api_url_no_extra_query_params(self):
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.API_BASE_URL,
|
||||
'storage',
|
||||
conn.API_VERSION,
|
||||
'foo',
|
||||
])
|
||||
self.assertEqual(conn.build_api_url('/foo'), URI)
|
||||
|
||||
def test_build_api_url_w_extra_query_params(self):
|
||||
from six.moves.urllib.parse import parse_qsl
|
||||
from six.moves.urllib.parse import urlsplit
|
||||
conn = self._makeOne()
|
||||
uri = conn.build_api_url('/foo', {'bar': 'baz'})
|
||||
scheme, netloc, path, qs, _ = urlsplit(uri)
|
||||
self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL)
|
||||
self.assertEqual(path,
|
||||
'/'.join(['', 'storage', conn.API_VERSION, 'foo']))
|
||||
parms = dict(parse_qsl(qs))
|
||||
self.assertEqual(parms['bar'], 'baz')
|
Loading…
Add table
Add a link
Reference in a new issue