Updated DB_Helper by adding firebase methods.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-05 16:53:40 -04:00
parent 485cc3bbba
commit c82121d036
1810 changed files with 537281 additions and 1 deletions

View file

@ -0,0 +1,32 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud BigQuery API wrapper.
The main concepts with this API are:
- :class:`gcloud.bigquery.dataset.Dataset` represents an collection of tables.
- :class:`gcloud.bigquery.table.Table` represents a single "relation".
"""
from gcloud.bigquery.client import Client
from gcloud.bigquery.connection import Connection
from gcloud.bigquery.dataset import AccessGrant
from gcloud.bigquery.dataset import Dataset
from gcloud.bigquery.table import SchemaField
from gcloud.bigquery.table import Table
SCOPE = Connection.SCOPE

View file

@ -0,0 +1,166 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared elper functions for BigQuery API classes."""
from gcloud._helpers import _datetime_from_microseconds
def _not_null(value, field):
"""Check whether 'value' should be coerced to 'field' type."""
return value is not None or field.mode != 'NULLABLE'
def _int_from_json(value, field):
"""Coerce 'value' to an int, if set or not nullable."""
if _not_null(value, field):
return int(value)
def _float_from_json(value, field):
"""Coerce 'value' to a float, if set or not nullable."""
if _not_null(value, field):
return float(value)
def _bool_from_json(value, field):
"""Coerce 'value' to a bool, if set or not nullable."""
if _not_null(value, field):
return value.lower() in ['t', 'true', '1']
def _datetime_from_json(value, field):
"""Coerce 'value' to a datetime, if set or not nullable."""
if _not_null(value, field):
# value will be a float in seconds, to microsecond precision, in UTC.
return _datetime_from_microseconds(1e6 * float(value))
def _record_from_json(value, field):
"""Coerce 'value' to a mapping, if set or not nullable."""
if _not_null(value, field):
record = {}
for subfield, cell in zip(field.fields, value['f']):
converter = _CELLDATA_FROM_JSON[subfield.field_type]
if field.mode == 'REPEATED':
value = [converter(item, field) for item in cell['v']]
else:
value = converter(cell['v'], field)
record[subfield.name] = value
return record
def _string_from_json(value, _):
"""NOOP string -> string coercion"""
return value
_CELLDATA_FROM_JSON = {
'INTEGER': _int_from_json,
'FLOAT': _float_from_json,
'BOOLEAN': _bool_from_json,
'TIMESTAMP': _datetime_from_json,
'RECORD': _record_from_json,
'STRING': _string_from_json,
}
def _rows_from_json(rows, schema):
"""Convert JSON row data to rows w/ appropriate types."""
rows_data = []
for row in rows:
row_data = []
for field, cell in zip(schema, row['f']):
converter = _CELLDATA_FROM_JSON[field.field_type]
if field.mode == 'REPEATED':
row_data.append([converter(item, field)
for item in cell['v']])
else:
row_data.append(converter(cell['v'], field))
rows_data.append(tuple(row_data))
return rows_data
class _ConfigurationProperty(object):
"""Base property implementation.
Values will be stored on a `_configuration` helper attribute of the
property's job instance.
:type name: string
:param name: name of the property
"""
def __init__(self, name):
self.name = name
self._backing_name = '_%s' % (self.name,)
def __get__(self, instance, owner):
"""Descriptor protocal: accesstor"""
if instance is None:
return self
return getattr(instance._configuration, self._backing_name)
def _validate(self, value):
"""Subclasses override to impose validation policy."""
pass
def __set__(self, instance, value):
"""Descriptor protocal: mutator"""
self._validate(value)
setattr(instance._configuration, self._backing_name, value)
def __delete__(self, instance):
"""Descriptor protocal: deleter"""
delattr(instance._configuration, self._backing_name)
class _TypedProperty(_ConfigurationProperty):
"""Property implementation: validates based on value type.
:type name: string
:param name: name of the property
:type property_type: type or sequence of types
:param property_type: type to be validated
"""
def __init__(self, name, property_type):
super(_TypedProperty, self).__init__(name)
self.property_type = property_type
def _validate(self, value):
"""Ensure that 'value' is of the appropriate type.
:raises: ValueError on a type mismatch.
"""
if not isinstance(value, self.property_type):
raise ValueError('Required type: %s' % (self.property_type,))
class _EnumProperty(_ConfigurationProperty):
"""Psedo-enumeration class.
Subclasses must define ``ALLOWED`` as a class-level constant: it must
be a sequence of strings.
:type name: string
:param name: name of the property
"""
def _validate(self, value):
"""Check that ``value`` is one of the allowed values.
:raises: ValueError if value is not allowed.
"""
if value not in self.ALLOWED:
raise ValueError('Pass one of: %s' ', '.join(self.ALLOWED))

View file

@ -0,0 +1,275 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google BigQuery API."""
from gcloud.client import JSONClient
from gcloud.bigquery.connection import Connection
from gcloud.bigquery.dataset import Dataset
from gcloud.bigquery.job import CopyJob
from gcloud.bigquery.job import ExtractTableToStorageJob
from gcloud.bigquery.job import LoadTableFromStorageJob
from gcloud.bigquery.job import QueryJob
from gcloud.bigquery.query import QueryResults
class Client(JSONClient):
"""Client to bundle configuration needed for API requests.
:type project: str
:param project: the project which the client acts on behalf of. Will be
passed when creating a dataset / job. If not passed,
falls back to the default inferred from the environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
"""
_connection_class = Connection
def list_datasets(self, include_all=False, max_results=None,
page_token=None):
"""List datasets for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/v2/datasets/list
:type include_all: boolean
:param include_all: True if results include hidden datasets.
:type max_results: int
:param max_results: maximum number of datasets to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.
:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.bigquery.dataset.Dataset`, plus a
"next page token" string: if the token is not None,
indicates that more datasets can be retrieved with another
call (pass that value as ``page_token``).
"""
params = {}
if include_all:
params['all'] = True
if max_results is not None:
params['maxResults'] = max_results
if page_token is not None:
params['pageToken'] = page_token
path = '/projects/%s/datasets' % (self.project,)
resp = self.connection.api_request(method='GET', path=path,
query_params=params)
datasets = [Dataset.from_api_repr(resource, self)
for resource in resp.get('datasets', ())]
return datasets, resp.get('nextPageToken')
def dataset(self, dataset_name):
"""Construct a dataset bound to this client.
:type dataset_name: str
:param dataset_name: Name of the dataset.
:rtype: :class:`gcloud.bigquery.dataset.Dataset`
:returns: a new ``Dataset`` instance
"""
return Dataset(dataset_name, client=self)
def job_from_resource(self, resource):
"""Detect correct job type from resource and instantiate.
:type resource: dict
:param resource: one job resource from API response
:rtype: One of:
:class:`gcloud.bigquery.job.LoadTableFromStorageJob`,
:class:`gcloud.bigquery.job.CopyJob`,
:class:`gcloud.bigquery.job.ExtractTableToStorageJob`,
:class:`gcloud.bigquery.job.QueryJob`,
:class:`gcloud.bigquery.job.RunSyncQueryJob`
:returns: the job instance, constructed via the resource
"""
config = resource['configuration']
if 'load' in config:
return LoadTableFromStorageJob.from_api_repr(resource, self)
elif 'copy' in config:
return CopyJob.from_api_repr(resource, self)
elif 'extract' in config:
return ExtractTableToStorageJob.from_api_repr(resource, self)
elif 'query' in config:
return QueryJob.from_api_repr(resource, self)
raise ValueError('Cannot parse job resource')
def list_jobs(self, max_results=None, page_token=None, all_users=None,
state_filter=None):
"""List jobs for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/list
:type max_results: int
:param max_results: maximum number of jobs to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of jobs. If
not passed, the API will return the first page of
jobs.
:type all_users: boolean
:param all_users: if true, include jobs owned by all users in the
project.
:type state_filter: str
:param state_filter: if passed, include only jobs matching the given
state. One of
* ``"done"``
* ``"pending"``
* ``"running"``
:rtype: tuple, (list, str)
:returns: list of job instances, plus a "next page token" string:
if the token is not ``None``, indicates that more jobs can be
retrieved with another call, passing that value as
``page_token``).
"""
params = {'projection': 'full'}
if max_results is not None:
params['maxResults'] = max_results
if page_token is not None:
params['pageToken'] = page_token
if all_users is not None:
params['allUsers'] = all_users
if state_filter is not None:
params['stateFilter'] = state_filter
path = '/projects/%s/jobs' % (self.project,)
resp = self.connection.api_request(method='GET', path=path,
query_params=params)
jobs = [self.job_from_resource(resource) for resource in resp['jobs']]
return jobs, resp.get('nextPageToken')
def load_table_from_storage(self, job_name, destination, *source_uris):
"""Construct a job for loading data into a table from CloudStorage.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`gcloud.bigquery.table.Table`
:param destination: Table into which data is to be loaded.
:type source_uris: sequence of string
:param source_uris: URIs of data files to be loaded; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`gcloud.bigquery.job.LoadTableFromStorageJob`
:returns: a new ``LoadTableFromStorageJob`` instance
"""
return LoadTableFromStorageJob(job_name, destination, source_uris,
client=self)
def copy_table(self, job_name, destination, *sources):
"""Construct a job for copying one or more tables into another table.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`gcloud.bigquery.table.Table`
:param destination: Table into which data is to be copied.
:type sources: sequence of :class:`gcloud.bigquery.table.Table`
:param sources: tables to be copied.
:rtype: :class:`gcloud.bigquery.job.CopyJob`
:returns: a new ``CopyJob`` instance
"""
return CopyJob(job_name, destination, sources, client=self)
def extract_table_to_storage(self, job_name, source, *destination_uris):
"""Construct a job for extracting a table into Cloud Storage files.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract
:type job_name: str
:param job_name: Name of the job.
:type source: :class:`gcloud.bigquery.table.Table`
:param source: table to be extracted.
:type destination_uris: sequence of string
:param destination_uris: URIs of CloudStorage file(s) into which
table data is to be extracted; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`gcloud.bigquery.job.ExtractTableToStorageJob`
:returns: a new ``ExtractTableToStorageJob`` instance
"""
return ExtractTableToStorageJob(job_name, source, destination_uris,
client=self)
def run_async_query(self, job_name, query):
"""Construct a job for running a SQL query asynchronously.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
:type job_name: str
:param job_name: Name of the job.
:type query: str
:param query: SQL query to be executed
:rtype: :class:`gcloud.bigquery.job.QueryJob`
:returns: a new ``QueryJob`` instance
"""
return QueryJob(job_name, query, client=self)
def run_sync_query(self, query):
"""Run a SQL query synchronously.
:type query: str
:param query: SQL query to be executed
:rtype: :class:`gcloud.bigquery.query.QueryResults`
:returns: a new ``QueryResults`` instance
"""
return QueryResults(query, client=self)

View file

@ -0,0 +1,34 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create / interact with gcloud bigquery connections."""
from gcloud import connection as base_connection
class Connection(base_connection.JSONConnection):
"""A connection to Google Cloud BigQuery via the JSON REST API."""
API_BASE_URL = 'https://www.googleapis.com'
"""The base of the API call URL."""
API_VERSION = 'v2'
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = '{api_base_url}/bigquery/{api_version}{path}'
"""A template for the URL of a particular API call."""
SCOPE = ('https://www.googleapis.com/auth/bigquery',
'https://www.googleapis.com/auth/cloud-platform')
"""The scopes required for authenticating as a Cloud BigQuery consumer."""

View file

@ -0,0 +1,577 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define API Datasets."""
import six
from gcloud._helpers import _datetime_from_microseconds
from gcloud.exceptions import NotFound
from gcloud.bigquery.table import Table
class AccessGrant(object):
"""Represent grant of an access role to an entity.
Every entry in the access list will have exactly one of
``userByEmail``, ``groupByEmail``, ``domain``, ``specialGroup`` or
``view`` set. And if anything but ``view`` is set, it'll also have a
``role`` specified. ``role`` is omitted for a ``view``, since
``view`` s are always read-only.
See https://cloud.google.com/bigquery/docs/reference/v2/datasets.
:type role: string
:param role: Role granted to the entity. One of
* ``'OWNER'``
* ``'WRITER'``
* ``'READER'``
May also be ``None`` if the ``entity_type`` is ``view``.
:type entity_type: string
:param entity_type: Type of entity being granted the role. One of
:attr:`ENTITY_TYPES`.
:type entity_id: string
:param entity_id: ID of entity being granted the role.
:raises: :class:`ValueError` if the ``entity_type`` is not among
:attr:`ENTITY_TYPES`, or if a ``view`` has ``role`` set or
a non ``view`` **does not** have a ``role`` set.
"""
ENTITY_TYPES = frozenset(['userByEmail', 'groupByEmail', 'domain',
'specialGroup', 'view'])
"""Allowed entity types."""
def __init__(self, role, entity_type, entity_id):
if entity_type not in self.ENTITY_TYPES:
message = 'Entity type %r not among: %s' % (
entity_type, ', '.join(self.ENTITY_TYPES))
raise ValueError(message)
if entity_type == 'view':
if role is not None:
raise ValueError('Role must be None for a view. Received '
'role: %r' % (role,))
else:
if role is None:
raise ValueError('Role must be set for entity '
'type %r' % (entity_type,))
self.role = role
self.entity_type = entity_type
self.entity_id = entity_id
def __eq__(self, other):
return (
self.role == other.role and
self.entity_type == other.entity_type and
self.entity_id == other.entity_id)
def __repr__(self):
return '<AccessGrant: role=%s, %s=%s>' % (
self.role, self.entity_type, self.entity_id)
class Dataset(object):
"""Datasets are containers for tables.
See:
https://cloud.google.com/bigquery/docs/reference/v2/datasets
:type name: string
:param name: the name of the dataset
:type client: :class:`gcloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
for the dataset (which requires a project).
:type access_grants: list of :class:`AccessGrant`
:param access_grants: roles granted to entities for this dataset
"""
_access_grants = None
def __init__(self, name, client, access_grants=()):
self.name = name
self._client = client
self._properties = {}
# Let the @property do validation.
self.access_grants = access_grants
@property
def project(self):
"""Project bound to the dataset.
:rtype: string
:returns: the project (derived from the client).
"""
return self._client.project
@property
def path(self):
"""URL path for the dataset's APIs.
:rtype: string
:returns: the path based on project and dataste name.
"""
return '/projects/%s/datasets/%s' % (self.project, self.name)
@property
def access_grants(self):
"""Dataset's access grants.
:rtype: list of :class:`AccessGrant`
:returns: roles granted to entities for this dataset
"""
return list(self._access_grants)
@access_grants.setter
def access_grants(self, value):
"""Update dataset's access grants
:type value: list of :class:`AccessGrant`
:param value: roles granted to entities for this dataset
:raises: TypeError if 'value' is not a sequence, or ValueError if
any item in the sequence is not an AccessGrant
"""
if not all(isinstance(field, AccessGrant) for field in value):
raise ValueError('Values must be AccessGrant instances')
self._access_grants = tuple(value)
@property
def created(self):
"""Datetime at which the dataset was created.
:rtype: ``datetime.datetime``, or ``NoneType``
:returns: the creation time (None until set from the server).
"""
creation_time = self._properties.get('creationTime')
if creation_time is not None:
# creation_time will be in milliseconds.
return _datetime_from_microseconds(1000.0 * creation_time)
@property
def dataset_id(self):
"""ID for the dataset resource.
:rtype: string, or ``NoneType``
:returns: the ID (None until set from the server).
"""
return self._properties.get('id')
@property
def etag(self):
"""ETag for the dataset resource.
:rtype: string, or ``NoneType``
:returns: the ETag (None until set from the server).
"""
return self._properties.get('etag')
@property
def modified(self):
"""Datetime at which the dataset was last modified.
:rtype: ``datetime.datetime``, or ``NoneType``
:returns: the modification time (None until set from the server).
"""
modified_time = self._properties.get('lastModifiedTime')
if modified_time is not None:
# modified_time will be in milliseconds.
return _datetime_from_microseconds(1000.0 * modified_time)
@property
def self_link(self):
"""URL for the dataset resource.
:rtype: string, or ``NoneType``
:returns: the URL (None until set from the server).
"""
return self._properties.get('selfLink')
@property
def default_table_expiration_ms(self):
"""Default expiration time for tables in the dataset.
:rtype: integer, or ``NoneType``
:returns: The time in milliseconds, or None (the default).
"""
return self._properties.get('defaultTableExpirationMs')
@default_table_expiration_ms.setter
def default_table_expiration_ms(self, value):
"""Update default expiration time for tables in the dataset.
:type value: integer, or ``NoneType``
:param value: new default time, in milliseconds
:raises: ValueError for invalid value types.
"""
if not isinstance(value, six.integer_types) and value is not None:
raise ValueError("Pass an integer, or None")
self._properties['defaultTableExpirationMs'] = value
@property
def description(self):
"""Description of the dataset.
:rtype: string, or ``NoneType``
:returns: The description as set by the user, or None (the default).
"""
return self._properties.get('description')
@description.setter
def description(self, value):
"""Update description of the dataset.
:type value: string, or ``NoneType``
:param value: new description
:raises: ValueError for invalid value types.
"""
if not isinstance(value, six.string_types) and value is not None:
raise ValueError("Pass a string, or None")
self._properties['description'] = value
@property
def friendly_name(self):
"""Title of the dataset.
:rtype: string, or ``NoneType``
:returns: The name as set by the user, or None (the default).
"""
return self._properties.get('friendlyName')
@friendly_name.setter
def friendly_name(self, value):
"""Update title of the dataset.
:type value: string, or ``NoneType``
:param value: new title
:raises: ValueError for invalid value types.
"""
if not isinstance(value, six.string_types) and value is not None:
raise ValueError("Pass a string, or None")
self._properties['friendlyName'] = value
@property
def location(self):
"""Location in which the dataset is hosted.
:rtype: string, or ``NoneType``
:returns: The location as set by the user, or None (the default).
"""
return self._properties.get('location')
@location.setter
def location(self, value):
"""Update location in which the dataset is hosted.
:type value: string, or ``NoneType``
:param value: new location
:raises: ValueError for invalid value types.
"""
if not isinstance(value, six.string_types) and value is not None:
raise ValueError("Pass a string, or None")
self._properties['location'] = value
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a dataset given its API representation
:type resource: dict
:param resource: dataset resource representation returned from the API
:type client: :class:`gcloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`gcloud.bigquery.dataset.Dataset`
:returns: Dataset parsed from ``resource``.
"""
if ('datasetReference' not in resource or
'datasetId' not in resource['datasetReference']):
raise KeyError('Resource lacks required identity information:'
'["datasetReference"]["datasetId"]')
name = resource['datasetReference']['datasetId']
dataset = cls(name, client=client)
dataset._set_properties(resource)
return dataset
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:rtype: :class:`gcloud.bigquery.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
@staticmethod
def _parse_access_grants(access):
"""Parse a resource fragment into a set of access grants.
``role`` augments the entity type and present **unless** the entity
type is ``view``.
:type access: list of mappings
:param access: each mapping represents a single access grant
:rtype: list of :class:`AccessGrant`
:returns: a list of parsed grants
:raises: :class:`ValueError` if a grant in ``access`` has more keys
than ``role`` and one additional key.
"""
result = []
for grant in access:
grant = grant.copy()
role = grant.pop('role', None)
entity_type, entity_id = grant.popitem()
if len(grant) != 0:
raise ValueError('Grant has unexpected keys remaining.', grant)
result.append(
AccessGrant(role, entity_type, entity_id))
return result
def _set_properties(self, api_response):
"""Update properties from resource in body of ``api_response``
:type api_response: httplib2.Response
:param api_response: response returned from an API call
"""
self._properties.clear()
cleaned = api_response.copy()
access = cleaned.pop('access', ())
self.access_grants = self._parse_access_grants(access)
if 'creationTime' in cleaned:
cleaned['creationTime'] = float(cleaned['creationTime'])
if 'lastModifiedTime' in cleaned:
cleaned['lastModifiedTime'] = float(cleaned['lastModifiedTime'])
if 'defaultTableExpirationMs' in cleaned:
cleaned['defaultTableExpirationMs'] = int(
cleaned['defaultTableExpirationMs'])
self._properties.update(cleaned)
def _build_access_resource(self):
"""Generate a resource fragment for dataset's access grants."""
result = []
for grant in self.access_grants:
info = {grant.entity_type: grant.entity_id}
if grant.role is not None:
info['role'] = grant.role
result.append(info)
return result
def _build_resource(self):
"""Generate a resource for ``create`` or ``update``."""
resource = {
'datasetReference': {
'projectId': self.project, 'datasetId': self.name},
}
if self.default_table_expiration_ms is not None:
value = self.default_table_expiration_ms
resource['defaultTableExpirationMs'] = value
if self.description is not None:
resource['description'] = self.description
if self.friendly_name is not None:
resource['friendlyName'] = self.friendly_name
if self.location is not None:
resource['location'] = self.location
if len(self.access_grants) > 0:
resource['access'] = self._build_access_resource()
return resource
def create(self, client=None):
"""API call: create the dataset via a PUT request
See:
https://cloud.google.com/bigquery/docs/reference/v2/tables/insert
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
"""
client = self._require_client(client)
path = '/projects/%s/datasets' % (self.project,)
api_response = client.connection.api_request(
method='POST', path=path, data=self._build_resource())
self._set_properties(api_response)
def exists(self, client=None):
"""API call: test for the existence of the dataset via a GET request
See
https://cloud.google.com/bigquery/docs/reference/v2/datasets/get
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
"""
client = self._require_client(client)
try:
client.connection.api_request(method='GET', path=self.path,
query_params={'fields': 'id'})
except NotFound:
return False
else:
return True
def reload(self, client=None):
"""API call: refresh dataset properties via a GET request
See
https://cloud.google.com/bigquery/docs/reference/v2/datasets/get
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
"""
client = self._require_client(client)
api_response = client.connection.api_request(
method='GET', path=self.path)
self._set_properties(api_response)
def patch(self, client=None, **kw):
"""API call: update individual dataset properties via a PATCH request
See
https://cloud.google.com/bigquery/docs/reference/v2/datasets/patch
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:type kw: ``dict``
:param kw: properties to be patched.
:raises: ValueError for invalid value types.
"""
client = self._require_client(client)
partial = {}
if 'default_table_expiration_ms' in kw:
value = kw['default_table_expiration_ms']
if not isinstance(value, six.integer_types) and value is not None:
raise ValueError("Pass an integer, or None")
partial['defaultTableExpirationMs'] = value
if 'description' in kw:
partial['description'] = kw['description']
if 'friendly_name' in kw:
partial['friendlyName'] = kw['friendly_name']
if 'location' in kw:
partial['location'] = kw['location']
api_response = client.connection.api_request(
method='PATCH', path=self.path, data=partial)
self._set_properties(api_response)
def update(self, client=None):
"""API call: update dataset properties via a PUT request
See
https://cloud.google.com/bigquery/docs/reference/v2/datasets/update
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
"""
client = self._require_client(client)
api_response = client.connection.api_request(
method='PUT', path=self.path, data=self._build_resource())
self._set_properties(api_response)
def delete(self, client=None):
"""API call: delete the dataset via a DELETE request
See:
https://cloud.google.com/bigquery/docs/reference/v2/tables/delete
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
"""
client = self._require_client(client)
client.connection.api_request(method='DELETE', path=self.path)
def list_tables(self, max_results=None, page_token=None):
"""List tables for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/v2/tables/list
:type max_results: int
:param max_results: maximum number of tables to return, If not
passed, defaults to a value set by the API.
:type page_token: string
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.
:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.bigquery.table.Table`, plus a
"next page token" string: if not ``None``, indicates that
more tables can be retrieved with another call (pass that
value as ``page_token``).
"""
params = {}
if max_results is not None:
params['maxResults'] = max_results
if page_token is not None:
params['pageToken'] = page_token
path = '/projects/%s/datasets/%s/tables' % (self.project, self.name)
connection = self._client.connection
resp = connection.api_request(method='GET', path=path,
query_params=params)
tables = [Table.from_api_repr(resource, self)
for resource in resp.get('tables', ())]
return tables, resp.get('nextPageToken')
def table(self, name, schema=()):
"""Construct a table bound to this dataset.
:type name: string
:param name: Name of the table.
:type schema: list of :class:`gcloud.bigquery.table.SchemaField`
:param schema: The table's schema
:rtype: :class:`gcloud.bigquery.table.Table`
:returns: a new ``Table`` instance
"""
return Table(name, dataset=self, schema=schema)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,349 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define API Queries."""
import six
from gcloud.bigquery._helpers import _TypedProperty
from gcloud.bigquery._helpers import _rows_from_json
from gcloud.bigquery.dataset import Dataset
from gcloud.bigquery.job import QueryJob
from gcloud.bigquery.table import _parse_schema_resource
class _SyncQueryConfiguration(object):
"""User-settable configuration options for synchronous query jobs.
Values which are ``None`` -> server defaults.
"""
_default_dataset = None
_dry_run = None
_max_results = None
_timeout_ms = None
_preserve_nulls = None
_use_query_cache = None
class QueryResults(object):
"""Synchronous job: query tables.
:type query: string
:param query: SQL query string
:type client: :class:`gcloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
for the dataset (which requires a project).
"""
def __init__(self, query, client):
self._client = client
self._properties = {}
self.query = query
self._configuration = _SyncQueryConfiguration()
self._job = None
@property
def project(self):
"""Project bound to the job.
:rtype: string
:returns: the project (derived from the client).
"""
return self._client.project
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:rtype: :class:`gcloud.bigquery.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
@property
def cache_hit(self):
"""Query results served from cache.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#cacheHit
:rtype: boolean or ``NoneType``
:returns: True if the query results were served from cache (None
until set by the server).
"""
return self._properties.get('cacheHit')
@property
def complete(self):
"""Server completed query.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#jobComplete
:rtype: boolean or ``NoneType``
:returns: True if the query completed on the server (None
until set by the server).
"""
return self._properties.get('jobComplete')
@property
def errors(self):
"""Errors generated by the query.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#errors
:rtype: list of mapping, or ``NoneType``
:returns: Mappings describing errors generated on the server (None
until set by the server).
"""
return self._properties.get('errors')
@property
def name(self):
"""Job name, generated by the back-end.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#jobReference
:rtype: list of mapping, or ``NoneType``
:returns: Mappings describing errors generated on the server (None
until set by the server).
"""
return self._properties.get('jobReference', {}).get('jobId')
@property
def job(self):
"""Job instance used to run the query.
:rtype: :class:`gcloud.bigquery.job.QueryJob`, or ``NoneType``
:returns: Job instance used to run the query (None until
``jobReference`` property is set by the server).
"""
if self._job is None:
job_ref = self._properties.get('jobReference')
if job_ref is not None:
self._job = QueryJob(job_ref['jobId'], self.query,
self._client)
return self._job
@property
def page_token(self):
"""Token for fetching next bach of results.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#pageToken
:rtype: string, or ``NoneType``
:returns: Token generated on the server (None until set by the server).
"""
return self._properties.get('pageToken')
@property
def total_rows(self):
"""Total number of rows returned by the query
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#totalRows
:rtype: integer, or ``NoneType``
:returns: Count generated on the server (None until set by the server).
"""
return self._properties.get('totalRows')
@property
def total_bytes_processed(self):
"""Total number of bytes processed by the query
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#totalBytesProcessed
:rtype: integer, or ``NoneType``
:returns: Count generated on the server (None until set by the server).
"""
return self._properties.get('totalBytesProcessed')
@property
def rows(self):
"""Query results.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#rows
:rtype: list of tuples of row values, or ``NoneType``
:returns: fields describing the schema (None until set by the server).
"""
return _rows_from_json(self._properties.get('rows', ()), self.schema)
@property
def schema(self):
"""Schema for query results.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#schema
:rtype: list of :class:`SchemaField`, or ``NoneType``
:returns: fields describing the schema (None until set by the server).
"""
return _parse_schema_resource(self._properties.get('schema', {}))
default_dataset = _TypedProperty('default_dataset', Dataset)
"""See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#defaultDataset
"""
dry_run = _TypedProperty('dry_run', bool)
"""See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#dryRun
"""
max_results = _TypedProperty('max_results', six.integer_types)
"""See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#maxResults
"""
preserve_nulls = _TypedProperty('preserve_nulls', bool)
"""See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#preserveNulls
"""
timeout_ms = _TypedProperty('timeout_ms', six.integer_types)
"""See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#timeoutMs
"""
use_query_cache = _TypedProperty('use_query_cache', bool)
"""See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#useQueryCache
"""
def _set_properties(self, api_response):
"""Update properties from resource in body of ``api_response``
:type api_response: httplib2.Response
:param api_response: response returned from an API call
"""
self._properties.clear()
self._properties.update(api_response)
def _build_resource(self):
"""Generate a resource for :meth:`begin`."""
resource = {'query': self.query}
if self.default_dataset is not None:
resource['defaultDataset'] = {
'projectId': self.project,
'datasetId': self.default_dataset.name,
}
if self.max_results is not None:
resource['maxResults'] = self.max_results
if self.preserve_nulls is not None:
resource['preserveNulls'] = self.preserve_nulls
if self.timeout_ms is not None:
resource['timeoutMs'] = self.timeout_ms
if self.use_query_cache is not None:
resource['useQueryCache'] = self.use_query_cache
if self.dry_run is not None:
resource['dryRun'] = self.dry_run
return resource
def run(self, client=None):
"""API call: run the query via a POST request
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/query
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
"""
client = self._require_client(client)
path = '/projects/%s/queries' % (self.project,)
api_response = client.connection.api_request(
method='POST', path=path, data=self._build_resource())
self._set_properties(api_response)
def fetch_data(self, max_results=None, page_token=None, start_index=None,
timeout_ms=None, client=None):
"""API call: fetch a page of query result data via a GET request
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults
:type max_results: integer or ``NoneType``
:param max_results: maximum number of rows to return.
:type page_token: string or ``NoneType``
:param page_token: token representing a cursor into the table's rows.
:type start_index: integer or ``NoneType``
:param start_index: zero-based index of starting row
:type timeout_ms: integer or ``NoneType``
:param timeout_ms: timeout, in milliseconds, to wait for query to
complete
:type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:rtype: tuple
:returns: ``(row_data, total_rows, page_token)``, where ``row_data``
is a list of tuples, one per result row, containing only
the values; ``total_rows`` is a count of the total number
of rows in the table; and ``page_token`` is an opaque
string which can be used to fetch the next batch of rows
(``None`` if no further batches can be fetched).
:raises: ValueError if the query has not yet been executed.
"""
if self.name is None:
raise ValueError("Query not yet executed: call 'run()'")
client = self._require_client(client)
params = {}
if max_results is not None:
params['maxResults'] = max_results
if page_token is not None:
params['pageToken'] = page_token
if start_index is not None:
params['startIndex'] = start_index
if timeout_ms is not None:
params['timeoutMs'] = timeout_ms
path = '/projects/%s/queries/%s' % (self.project, self.name)
response = client.connection.api_request(method='GET',
path=path,
query_params=params)
self._set_properties(response)
total_rows = response.get('totalRows')
page_token = response.get('pageToken')
rows_data = _rows_from_json(response.get('rows', ()), self.schema)
return rows_data, total_rows, page_token

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,116 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class Test_ConfigurationProperty(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigquery._helpers import _ConfigurationProperty
return _ConfigurationProperty
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_it(self):
class Configuration(object):
_attr = None
class Wrapper(object):
attr = self._makeOne('attr')
def __init__(self):
self._configuration = Configuration()
self.assertEqual(Wrapper.attr.name, 'attr')
wrapper = Wrapper()
self.assertEqual(wrapper.attr, None)
value = object()
wrapper.attr = value
self.assertTrue(wrapper.attr is value)
self.assertTrue(wrapper._configuration._attr is value)
del wrapper.attr
self.assertEqual(wrapper.attr, None)
self.assertEqual(wrapper._configuration._attr, None)
class Test_TypedProperty(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigquery._helpers import _TypedProperty
return _TypedProperty
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_it(self):
class Configuration(object):
_attr = None
class Wrapper(object):
attr = self._makeOne('attr', int)
def __init__(self):
self._configuration = Configuration()
wrapper = Wrapper()
with self.assertRaises(ValueError):
wrapper.attr = 'BOGUS'
wrapper.attr = 42
self.assertEqual(wrapper.attr, 42)
self.assertEqual(wrapper._configuration._attr, 42)
del wrapper.attr
self.assertEqual(wrapper.attr, None)
self.assertEqual(wrapper._configuration._attr, None)
class Test_EnumProperty(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigquery._helpers import _EnumProperty
return _EnumProperty
def test_it(self):
class Sub(self._getTargetClass()):
ALLOWED = ('FOO', 'BAR', 'BAZ')
class Configuration(object):
_attr = None
class Wrapper(object):
attr = Sub('attr')
def __init__(self):
self._configuration = Configuration()
wrapper = Wrapper()
with self.assertRaises(ValueError):
wrapper.attr = 'BOGUS'
wrapper.attr = 'FOO'
self.assertEqual(wrapper.attr, 'FOO')
self.assertEqual(wrapper._configuration._attr, 'FOO')
del wrapper.attr
self.assertEqual(wrapper.attr, None)
self.assertEqual(wrapper._configuration._attr, None)

View file

@ -0,0 +1,431 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestClient(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigquery.client import Client
return Client
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
from gcloud.bigquery.connection import Connection
PROJECT = 'PROJECT'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
self.assertTrue(isinstance(client.connection, Connection))
self.assertTrue(client.connection.credentials is creds)
self.assertTrue(client.connection.http is http)
def test_list_datasets_defaults(self):
from gcloud.bigquery.dataset import Dataset
PROJECT = 'PROJECT'
DATASET_1 = 'dataset_one'
DATASET_2 = 'dataset_two'
PATH = 'projects/%s/datasets' % PROJECT
TOKEN = 'TOKEN'
DATA = {
'nextPageToken': TOKEN,
'datasets': [
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_1),
'datasetReference': {'datasetId': DATASET_1,
'projectId': PROJECT},
'friendlyName': None},
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_2),
'datasetReference': {'datasetId': DATASET_2,
'projectId': PROJECT},
'friendlyName': 'Two'},
]
}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)
datasets, token = client.list_datasets()
self.assertEqual(len(datasets), len(DATA['datasets']))
for found, expected in zip(datasets, DATA['datasets']):
self.assertTrue(isinstance(found, Dataset))
self.assertEqual(found.dataset_id, expected['id'])
self.assertEqual(found.friendly_name, expected['friendlyName'])
self.assertEqual(token, TOKEN)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
def test_list_datasets_explicit_response_missing_datasets_key(self):
PROJECT = 'PROJECT'
PATH = 'projects/%s/datasets' % PROJECT
TOKEN = 'TOKEN'
DATA = {}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)
datasets, token = client.list_datasets(
include_all=True, max_results=3, page_token=TOKEN)
self.assertEqual(len(datasets), 0)
self.assertEqual(token, None)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'],
{'all': True, 'maxResults': 3, 'pageToken': TOKEN})
def test_dataset(self):
from gcloud.bigquery.dataset import Dataset
PROJECT = 'PROJECT'
DATASET = 'dataset_name'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
self.assertTrue(isinstance(dataset, Dataset))
self.assertEqual(dataset.name, DATASET)
self.assertTrue(dataset._client is client)
def test_job_from_resource_unknown_type(self):
PROJECT = 'PROJECT'
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
with self.assertRaises(ValueError):
client.job_from_resource({'configuration': {'nonesuch': {}}})
def test_list_jobs_defaults(self):
from gcloud.bigquery.job import LoadTableFromStorageJob
from gcloud.bigquery.job import CopyJob
from gcloud.bigquery.job import ExtractTableToStorageJob
from gcloud.bigquery.job import QueryJob
PROJECT = 'PROJECT'
DATASET = 'test_dataset'
SOURCE_TABLE = 'source_table'
DESTINATION_TABLE = 'destination_table'
QUERY_DESTINATION_TABLE = 'query_destination_table'
SOURCE_URI = 'gs://test_bucket/src_object*'
DESTINATION_URI = 'gs://test_bucket/dst_object*'
JOB_TYPES = {
'load_job': LoadTableFromStorageJob,
'copy_job': CopyJob,
'extract_job': ExtractTableToStorageJob,
'query_job': QueryJob,
}
PATH = 'projects/%s/jobs' % PROJECT
TOKEN = 'TOKEN'
QUERY = 'SELECT * from test_dataset:test_table'
ASYNC_QUERY_DATA = {
'id': '%s:%s' % (PROJECT, 'query_job'),
'jobReference': {
'projectId': PROJECT,
'jobId': 'query_job',
},
'state': 'DONE',
'configuration': {
'query': {
'query': QUERY,
'destinationTable': {
'projectId': PROJECT,
'datasetId': DATASET,
'tableId': QUERY_DESTINATION_TABLE,
},
'createDisposition': 'CREATE_IF_NEEDED',
'writeDisposition': 'WRITE_TRUNCATE',
}
},
}
EXTRACT_DATA = {
'id': '%s:%s' % (PROJECT, 'extract_job'),
'jobReference': {
'projectId': PROJECT,
'jobId': 'extract_job',
},
'state': 'DONE',
'configuration': {
'extract': {
'sourceTable': {
'projectId': PROJECT,
'datasetId': DATASET,
'tableId': SOURCE_TABLE,
},
'destinationUris': [DESTINATION_URI],
}
},
}
COPY_DATA = {
'id': '%s:%s' % (PROJECT, 'copy_job'),
'jobReference': {
'projectId': PROJECT,
'jobId': 'copy_job',
},
'state': 'DONE',
'configuration': {
'copy': {
'sourceTables': [{
'projectId': PROJECT,
'datasetId': DATASET,
'tableId': SOURCE_TABLE,
}],
'destinationTable': {
'projectId': PROJECT,
'datasetId': DATASET,
'tableId': DESTINATION_TABLE,
},
}
},
}
LOAD_DATA = {
'id': '%s:%s' % (PROJECT, 'load_job'),
'jobReference': {
'projectId': PROJECT,
'jobId': 'load_job',
},
'state': 'DONE',
'configuration': {
'load': {
'destinationTable': {
'projectId': PROJECT,
'datasetId': DATASET,
'tableId': SOURCE_TABLE,
},
'sourceUris': [SOURCE_URI],
}
},
}
DATA = {
'nextPageToken': TOKEN,
'jobs': [
ASYNC_QUERY_DATA,
EXTRACT_DATA,
COPY_DATA,
LOAD_DATA,
]
}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)
jobs, token = client.list_jobs()
self.assertEqual(len(jobs), len(DATA['jobs']))
for found, expected in zip(jobs, DATA['jobs']):
name = expected['jobReference']['jobId']
self.assertTrue(isinstance(found, JOB_TYPES[name]))
self.assertEqual(found.name, name)
self.assertEqual(token, TOKEN)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'], {'projection': 'full'})
def test_list_jobs_load_job_wo_sourceUris(self):
from gcloud.bigquery.job import LoadTableFromStorageJob
PROJECT = 'PROJECT'
DATASET = 'test_dataset'
SOURCE_TABLE = 'source_table'
JOB_TYPES = {
'load_job': LoadTableFromStorageJob,
}
PATH = 'projects/%s/jobs' % PROJECT
TOKEN = 'TOKEN'
LOAD_DATA = {
'id': '%s:%s' % (PROJECT, 'load_job'),
'jobReference': {
'projectId': PROJECT,
'jobId': 'load_job',
},
'state': 'DONE',
'configuration': {
'load': {
'destinationTable': {
'projectId': PROJECT,
'datasetId': DATASET,
'tableId': SOURCE_TABLE,
},
}
},
}
DATA = {
'nextPageToken': TOKEN,
'jobs': [
LOAD_DATA,
]
}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)
jobs, token = client.list_jobs()
self.assertEqual(len(jobs), len(DATA['jobs']))
for found, expected in zip(jobs, DATA['jobs']):
name = expected['jobReference']['jobId']
self.assertTrue(isinstance(found, JOB_TYPES[name]))
self.assertEqual(found.name, name)
self.assertEqual(token, TOKEN)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'], {'projection': 'full'})
def test_list_jobs_explicit_empty(self):
PROJECT = 'PROJECT'
PATH = 'projects/%s/jobs' % PROJECT
DATA = {'jobs': []}
TOKEN = 'TOKEN'
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)
jobs, token = client.list_jobs(max_results=1000, page_token=TOKEN,
all_users=True, state_filter='done')
self.assertEqual(len(jobs), 0)
self.assertEqual(token, None)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'],
{'projection': 'full',
'maxResults': 1000,
'pageToken': TOKEN,
'allUsers': True,
'stateFilter': 'done'})
def test_load_table_from_storage(self):
from gcloud.bigquery.job import LoadTableFromStorageJob
PROJECT = 'PROJECT'
JOB = 'job_name'
DATASET = 'dataset_name'
DESTINATION = 'destination_table'
SOURCE_URI = 'http://example.com/source.csv'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
destination = dataset.table(DESTINATION)
job = client.load_table_from_storage(JOB, destination, SOURCE_URI)
self.assertTrue(isinstance(job, LoadTableFromStorageJob))
self.assertTrue(job._client is client)
self.assertEqual(job.name, JOB)
self.assertEqual(list(job.source_uris), [SOURCE_URI])
self.assertTrue(job.destination is destination)
def test_copy_table(self):
from gcloud.bigquery.job import CopyJob
PROJECT = 'PROJECT'
JOB = 'job_name'
DATASET = 'dataset_name'
SOURCE = 'source_table'
DESTINATION = 'destination_table'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
source = dataset.table(SOURCE)
destination = dataset.table(DESTINATION)
job = client.copy_table(JOB, destination, source)
self.assertTrue(isinstance(job, CopyJob))
self.assertTrue(job._client is client)
self.assertEqual(job.name, JOB)
self.assertEqual(list(job.sources), [source])
self.assertTrue(job.destination is destination)
def test_extract_table_to_storage(self):
from gcloud.bigquery.job import ExtractTableToStorageJob
PROJECT = 'PROJECT'
JOB = 'job_name'
DATASET = 'dataset_name'
SOURCE = 'source_table'
DESTINATION = 'gs://bucket_name/object_name'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
source = dataset.table(SOURCE)
job = client.extract_table_to_storage(JOB, source, DESTINATION)
self.assertTrue(isinstance(job, ExtractTableToStorageJob))
self.assertTrue(job._client is client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.source, source)
self.assertEqual(list(job.destination_uris), [DESTINATION])
def test_run_async_query(self):
from gcloud.bigquery.job import QueryJob
PROJECT = 'PROJECT'
JOB = 'job_name'
QUERY = 'select count(*) from persons'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
job = client.run_async_query(JOB, QUERY)
self.assertTrue(isinstance(job, QueryJob))
self.assertTrue(job._client is client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.query, QUERY)
def test_run_sync_query(self):
from gcloud.bigquery.query import QueryResults
PROJECT = 'PROJECT'
QUERY = 'select count(*) from persons'
creds = _Credentials()
http = object()
client = self._makeOne(project=PROJECT, credentials=creds, http=http)
job = client.run_sync_query(QUERY)
self.assertTrue(isinstance(job, QueryResults))
self.assertTrue(job._client is client)
self.assertEqual(job.name, None)
self.assertEqual(job.query, QUERY)
class _Credentials(object):
_scopes = None
@staticmethod
def create_scoped_required():
return True
def create_scoped(self, scope):
self._scopes = scope
return self
class _Connection(object):
def __init__(self, *responses):
self._responses = responses
self._requested = []
def api_request(self, **kw):
self._requested.append(kw)
response, self._responses = self._responses[0], self._responses[1:]
return response

View file

@ -0,0 +1,47 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestConnection(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigquery.connection import Connection
return Connection
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_build_api_url_no_extra_query_params(self):
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'bigquery',
conn.API_VERSION,
'foo',
])
self.assertEqual(conn.build_api_url('/foo'), URI)
def test_build_api_url_w_extra_query_params(self):
from six.moves.urllib.parse import parse_qsl
from six.moves.urllib.parse import urlsplit
conn = self._makeOne()
uri = conn.build_api_url('/foo', {'bar': 'baz'})
scheme, netloc, path, qs, _ = urlsplit(uri)
self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL)
self.assertEqual(path,
'/'.join(['', 'bigquery', conn.API_VERSION, 'foo']))
parms = dict(parse_qsl(qs))
self.assertEqual(parms['bar'], 'baz')

View file

@ -0,0 +1,788 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestAccessGrant(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigquery.dataset import AccessGrant
return AccessGrant
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com')
self.assertEqual(grant.role, 'OWNER')
self.assertEqual(grant.entity_type, 'userByEmail')
self.assertEqual(grant.entity_id, 'phred@example.com')
def test_ctor_bad_entity_type(self):
with self.assertRaises(ValueError):
self._makeOne(None, 'unknown', None)
def test_ctor_view_with_role(self):
role = 'READER'
entity_type = 'view'
with self.assertRaises(ValueError):
self._makeOne(role, entity_type, None)
def test_ctor_view_success(self):
role = None
entity_type = 'view'
entity_id = object()
grant = self._makeOne(role, entity_type, entity_id)
self.assertEqual(grant.role, role)
self.assertEqual(grant.entity_type, entity_type)
self.assertEqual(grant.entity_id, entity_id)
def test_ctor_nonview_without_role(self):
role = None
entity_type = 'userByEmail'
with self.assertRaises(ValueError):
self._makeOne(role, entity_type, None)
def test___eq___role_mismatch(self):
grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com')
other = self._makeOne('WRITER', 'userByEmail', 'phred@example.com')
self.assertNotEqual(grant, other)
def test___eq___entity_type_mismatch(self):
grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com')
other = self._makeOne('OWNER', 'groupByEmail', 'phred@example.com')
self.assertNotEqual(grant, other)
def test___eq___entity_id_mismatch(self):
grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com')
other = self._makeOne('OWNER', 'userByEmail', 'bharney@example.com')
self.assertNotEqual(grant, other)
def test___eq___hit(self):
grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com')
other = self._makeOne('OWNER', 'userByEmail', 'phred@example.com')
self.assertEqual(grant, other)
class TestDataset(unittest2.TestCase):
PROJECT = 'project'
DS_NAME = 'dataset-name'
def _getTargetClass(self):
from gcloud.bigquery.dataset import Dataset
return Dataset
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def _setUpConstants(self):
import datetime
from gcloud._helpers import UTC
self.WHEN_TS = 1437767599.006
self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(
tzinfo=UTC)
self.ETAG = 'ETAG'
self.DS_ID = '%s:%s' % (self.PROJECT, self.DS_NAME)
self.RESOURCE_URL = 'http://example.com/path/to/resource'
def _makeResource(self):
self._setUpConstants()
USER_EMAIL = 'phred@example.com'
GROUP_EMAIL = 'group-name@lists.example.com'
return {
'creationTime': self.WHEN_TS * 1000,
'datasetReference':
{'projectId': self.PROJECT, 'datasetId': self.DS_NAME},
'etag': self.ETAG,
'id': self.DS_ID,
'lastModifiedTime': self.WHEN_TS * 1000,
'location': 'US',
'selfLink': self.RESOURCE_URL,
'access': [
{'role': 'OWNER', 'userByEmail': USER_EMAIL},
{'role': 'OWNER', 'groupByEmail': GROUP_EMAIL},
{'role': 'WRITER', 'specialGroup': 'projectWriters'},
{'role': 'READER', 'specialGroup': 'projectReaders'}],
}
def _verifyAccessGrants(self, access_grants, resource):
r_grants = []
for r_grant in resource['access']:
role = r_grant.pop('role')
for entity_type, entity_id in sorted(r_grant.items()):
r_grants.append({'role': role,
'entity_type': entity_type,
'entity_id': entity_id})
self.assertEqual(len(access_grants), len(r_grants))
for a_grant, r_grant in zip(access_grants, r_grants):
self.assertEqual(a_grant.role, r_grant['role'])
self.assertEqual(a_grant.entity_type, r_grant['entity_type'])
self.assertEqual(a_grant.entity_id, r_grant['entity_id'])
def _verifyReadonlyResourceProperties(self, dataset, resource):
self.assertEqual(dataset.dataset_id, self.DS_ID)
if 'creationTime' in resource:
self.assertEqual(dataset.created, self.WHEN)
else:
self.assertEqual(dataset.created, None)
if 'etag' in resource:
self.assertEqual(dataset.etag, self.ETAG)
else:
self.assertEqual(dataset.etag, None)
if 'lastModifiedTime' in resource:
self.assertEqual(dataset.modified, self.WHEN)
else:
self.assertEqual(dataset.modified, None)
if 'selfLink' in resource:
self.assertEqual(dataset.self_link, self.RESOURCE_URL)
else:
self.assertEqual(dataset.self_link, None)
def _verifyResourceProperties(self, dataset, resource):
self._verifyReadonlyResourceProperties(dataset, resource)
if 'defaultTableExpirationMs' in resource:
self.assertEqual(dataset.default_table_expiration_ms,
int(resource.get('defaultTableExpirationMs')))
else:
self.assertEqual(dataset.default_table_expiration_ms, None)
self.assertEqual(dataset.description, resource.get('description'))
self.assertEqual(dataset.friendly_name, resource.get('friendlyName'))
self.assertEqual(dataset.location, resource.get('location'))
if 'access' in resource:
self._verifyAccessGrants(dataset.access_grants, resource)
else:
self.assertEqual(dataset.access_grants, [])
def test_ctor(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
self.assertEqual(dataset.name, self.DS_NAME)
self.assertTrue(dataset._client is client)
self.assertEqual(dataset.project, client.project)
self.assertEqual(
dataset.path,
'/projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME))
self.assertEqual(dataset.access_grants, [])
self.assertEqual(dataset.created, None)
self.assertEqual(dataset.dataset_id, None)
self.assertEqual(dataset.etag, None)
self.assertEqual(dataset.modified, None)
self.assertEqual(dataset.self_link, None)
self.assertEqual(dataset.default_table_expiration_ms, None)
self.assertEqual(dataset.description, None)
self.assertEqual(dataset.friendly_name, None)
self.assertEqual(dataset.location, None)
def test_access_roles_setter_non_list(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
with self.assertRaises(TypeError):
dataset.access_grants = object()
def test_access_roles_setter_invalid_field(self):
from gcloud.bigquery.dataset import AccessGrant
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
phred = AccessGrant('OWNER', 'userByEmail', 'phred@example.com')
with self.assertRaises(ValueError):
dataset.access_grants = [phred, object()]
def test_access_roles_setter(self):
from gcloud.bigquery.dataset import AccessGrant
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
phred = AccessGrant('OWNER', 'userByEmail', 'phred@example.com')
bharney = AccessGrant('OWNER', 'userByEmail', 'bharney@example.com')
dataset.access_grants = [phred, bharney]
self.assertEqual(dataset.access_grants, [phred, bharney])
def test_default_table_expiration_ms_setter_bad_value(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
with self.assertRaises(ValueError):
dataset.default_table_expiration_ms = 'bogus'
def test_default_table_expiration_ms_setter(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
dataset.default_table_expiration_ms = 12345
self.assertEqual(dataset.default_table_expiration_ms, 12345)
def test_description_setter_bad_value(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
with self.assertRaises(ValueError):
dataset.description = 12345
def test_description_setter(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
dataset.description = 'DESCRIPTION'
self.assertEqual(dataset.description, 'DESCRIPTION')
def test_friendly_name_setter_bad_value(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
with self.assertRaises(ValueError):
dataset.friendly_name = 12345
def test_friendly_name_setter(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
dataset.friendly_name = 'FRIENDLY'
self.assertEqual(dataset.friendly_name, 'FRIENDLY')
def test_location_setter_bad_value(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
with self.assertRaises(ValueError):
dataset.location = 12345
def test_location_setter(self):
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client)
dataset.location = 'LOCATION'
self.assertEqual(dataset.location, 'LOCATION')
def test_from_api_repr_missing_identity(self):
self._setUpConstants()
client = _Client(self.PROJECT)
RESOURCE = {}
klass = self._getTargetClass()
with self.assertRaises(KeyError):
klass.from_api_repr(RESOURCE, client=client)
def test_from_api_repr_bare(self):
self._setUpConstants()
client = _Client(self.PROJECT)
RESOURCE = {
'id': '%s:%s' % (self.PROJECT, self.DS_NAME),
'datasetReference': {
'projectId': self.PROJECT,
'datasetId': self.DS_NAME,
}
}
klass = self._getTargetClass()
dataset = klass.from_api_repr(RESOURCE, client=client)
self.assertTrue(dataset._client is client)
self._verifyResourceProperties(dataset, RESOURCE)
def test_from_api_repr_w_properties(self):
client = _Client(self.PROJECT)
RESOURCE = self._makeResource()
klass = self._getTargetClass()
dataset = klass.from_api_repr(RESOURCE, client=client)
self.assertTrue(dataset._client is client)
self._verifyResourceProperties(dataset, RESOURCE)
def test__parse_access_grants_w_unknown_entity_type(self):
ACCESS = [
{'role': 'READER', 'unknown': 'UNKNOWN'},
]
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client=client)
with self.assertRaises(ValueError):
dataset._parse_access_grants(ACCESS)
def test__parse_access_grants_w_extra_keys(self):
USER_EMAIL = 'phred@example.com'
ACCESS = [
{
'role': 'READER',
'specialGroup': 'projectReaders',
'userByEmail': USER_EMAIL,
},
]
client = _Client(self.PROJECT)
dataset = self._makeOne(self.DS_NAME, client=client)
with self.assertRaises(ValueError):
dataset._parse_access_grants(ACCESS)
def test_create_w_bound_client(self):
PATH = 'projects/%s/datasets' % self.PROJECT
RESOURCE = self._makeResource()
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
dataset.create()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {
'datasetReference':
{'projectId': self.PROJECT, 'datasetId': self.DS_NAME},
}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(dataset, RESOURCE)
def test_create_w_alternate_client(self):
from gcloud.bigquery.dataset import AccessGrant
PATH = 'projects/%s/datasets' % self.PROJECT
USER_EMAIL = 'phred@example.com'
GROUP_EMAIL = 'group-name@lists.example.com'
DESCRIPTION = 'DESCRIPTION'
TITLE = 'TITLE'
RESOURCE = self._makeResource()
RESOURCE['description'] = DESCRIPTION
RESOURCE['friendlyName'] = TITLE
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(RESOURCE)
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
dataset = self._makeOne(self.DS_NAME, client=CLIENT1)
dataset.friendly_name = TITLE
dataset.description = DESCRIPTION
VIEW = {
'projectId': 'my-proj',
'datasetId': 'starry-skies',
'tableId': 'northern-hemisphere',
}
dataset.access_grants = [
AccessGrant('OWNER', 'userByEmail', USER_EMAIL),
AccessGrant('OWNER', 'groupByEmail', GROUP_EMAIL),
AccessGrant('READER', 'domain', 'foo.com'),
AccessGrant('READER', 'specialGroup', 'projectReaders'),
AccessGrant('WRITER', 'specialGroup', 'projectWriters'),
AccessGrant(None, 'view', VIEW),
]
dataset.create(client=CLIENT2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {
'datasetReference': {
'projectId': self.PROJECT,
'datasetId': self.DS_NAME,
},
'description': DESCRIPTION,
'friendlyName': TITLE,
'access': [
{'role': 'OWNER', 'userByEmail': USER_EMAIL},
{'role': 'OWNER', 'groupByEmail': GROUP_EMAIL},
{'role': 'READER', 'domain': 'foo.com'},
{'role': 'READER', 'specialGroup': 'projectReaders'},
{'role': 'WRITER', 'specialGroup': 'projectWriters'},
{'view': VIEW},
],
}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(dataset, RESOURCE)
def test_create_w_missing_output_properties(self):
# In the wild, the resource returned from 'dataset.create' sometimes
# lacks 'creationTime' / 'lastModifiedTime'
PATH = 'projects/%s/datasets' % (self.PROJECT,)
RESOURCE = self._makeResource()
del RESOURCE['creationTime']
del RESOURCE['lastModifiedTime']
self.WHEN = None
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
dataset.create()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {
'datasetReference':
{'projectId': self.PROJECT, 'datasetId': self.DS_NAME},
}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(dataset, RESOURCE)
def test_exists_miss_w_bound_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
conn = _Connection()
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
self.assertFalse(dataset.exists())
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'], {'fields': 'id'})
def test_exists_hit_w_alternate_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection({})
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
dataset = self._makeOne(self.DS_NAME, client=CLIENT1)
self.assertTrue(dataset.exists(client=CLIENT2))
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'], {'fields': 'id'})
def test_reload_w_bound_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
RESOURCE = self._makeResource()
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
dataset.reload()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self._verifyResourceProperties(dataset, RESOURCE)
def test_reload_w_alternate_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
RESOURCE = self._makeResource()
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(RESOURCE)
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
dataset = self._makeOne(self.DS_NAME, client=CLIENT1)
dataset.reload(client=CLIENT2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self._verifyResourceProperties(dataset, RESOURCE)
def test_patch_w_invalid_expiration(self):
RESOURCE = self._makeResource()
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
with self.assertRaises(ValueError):
dataset.patch(default_table_expiration_ms='BOGUS')
def test_patch_w_bound_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
DESCRIPTION = 'DESCRIPTION'
TITLE = 'TITLE'
RESOURCE = self._makeResource()
RESOURCE['description'] = DESCRIPTION
RESOURCE['friendlyName'] = TITLE
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
dataset.patch(description=DESCRIPTION, friendly_name=TITLE)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'PATCH')
SENT = {
'description': DESCRIPTION,
'friendlyName': TITLE,
}
self.assertEqual(req['data'], SENT)
self.assertEqual(req['path'], '/%s' % PATH)
self._verifyResourceProperties(dataset, RESOURCE)
def test_patch_w_alternate_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
DEF_TABLE_EXP = 12345
LOCATION = 'EU'
RESOURCE = self._makeResource()
RESOURCE['defaultTableExpirationMs'] = str(DEF_TABLE_EXP)
RESOURCE['location'] = LOCATION
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(RESOURCE)
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
dataset = self._makeOne(self.DS_NAME, client=CLIENT1)
dataset.patch(client=CLIENT2,
default_table_expiration_ms=DEF_TABLE_EXP,
location=LOCATION)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'PATCH')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {
'defaultTableExpirationMs': DEF_TABLE_EXP,
'location': LOCATION,
}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(dataset, RESOURCE)
def test_update_w_bound_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
DESCRIPTION = 'DESCRIPTION'
TITLE = 'TITLE'
RESOURCE = self._makeResource()
RESOURCE['description'] = DESCRIPTION
RESOURCE['friendlyName'] = TITLE
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
dataset.description = DESCRIPTION
dataset.friendly_name = TITLE
dataset.update()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'PUT')
SENT = {
'datasetReference':
{'projectId': self.PROJECT, 'datasetId': self.DS_NAME},
'description': DESCRIPTION,
'friendlyName': TITLE,
}
self.assertEqual(req['data'], SENT)
self.assertEqual(req['path'], '/%s' % PATH)
self._verifyResourceProperties(dataset, RESOURCE)
def test_update_w_alternate_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
DEF_TABLE_EXP = 12345
LOCATION = 'EU'
RESOURCE = self._makeResource()
RESOURCE['defaultTableExpirationMs'] = 12345
RESOURCE['location'] = LOCATION
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(RESOURCE)
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
dataset = self._makeOne(self.DS_NAME, client=CLIENT1)
dataset.default_table_expiration_ms = DEF_TABLE_EXP
dataset.location = LOCATION
dataset.update(client=CLIENT2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'PUT')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {
'datasetReference':
{'projectId': self.PROJECT, 'datasetId': self.DS_NAME},
'defaultTableExpirationMs': 12345,
'location': 'EU',
}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(dataset, RESOURCE)
def test_delete_w_bound_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
dataset.delete()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'DELETE')
self.assertEqual(req['path'], '/%s' % PATH)
def test_delete_w_alternate_client(self):
PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection({})
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
dataset = self._makeOne(self.DS_NAME, client=CLIENT1)
dataset.delete(client=CLIENT2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'DELETE')
self.assertEqual(req['path'], '/%s' % PATH)
def test_list_tables_empty(self):
from gcloud.bigquery.table import Table
conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
tables, token = dataset.list_tables()
self.assertEqual(tables, [])
self.assertEqual(token, None)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME)
self.assertEqual(req['path'], '/%s' % PATH)
def test_list_tables_defaults(self):
from gcloud.bigquery.table import Table
TABLE_1 = 'table_one'
TABLE_2 = 'table_two'
PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME)
TOKEN = 'TOKEN'
DATA = {
'nextPageToken': TOKEN,
'tables': [
{'kind': 'bigquery#table',
'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1),
'tableReference': {'tableId': TABLE_1,
'datasetId': self.DS_NAME,
'projectId': self.PROJECT},
'type': 'TABLE'},
{'kind': 'bigquery#table',
'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2),
'tableReference': {'tableId': TABLE_2,
'datasetId': self.DS_NAME,
'projectId': self.PROJECT},
'type': 'TABLE'},
]
}
conn = _Connection(DATA)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
tables, token = dataset.list_tables()
self.assertEqual(len(tables), len(DATA['tables']))
for found, expected in zip(tables, DATA['tables']):
self.assertTrue(isinstance(found, Table))
self.assertEqual(found.table_id, expected['id'])
self.assertEqual(found.table_type, expected['type'])
self.assertEqual(token, TOKEN)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
def test_list_tables_explicit(self):
from gcloud.bigquery.table import Table
TABLE_1 = 'table_one'
TABLE_2 = 'table_two'
PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME)
TOKEN = 'TOKEN'
DATA = {
'tables': [
{'kind': 'bigquery#dataset',
'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1),
'tableReference': {'tableId': TABLE_1,
'datasetId': self.DS_NAME,
'projectId': self.PROJECT},
'type': 'TABLE'},
{'kind': 'bigquery#dataset',
'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2),
'tableReference': {'tableId': TABLE_2,
'datasetId': self.DS_NAME,
'projectId': self.PROJECT},
'type': 'TABLE'},
]
}
conn = _Connection(DATA)
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
tables, token = dataset.list_tables(max_results=3, page_token=TOKEN)
self.assertEqual(len(tables), len(DATA['tables']))
for found, expected in zip(tables, DATA['tables']):
self.assertTrue(isinstance(found, Table))
self.assertEqual(found.table_id, expected['id'])
self.assertEqual(found.table_type, expected['type'])
self.assertEqual(token, None)
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'],
{'maxResults': 3, 'pageToken': TOKEN})
def test_table_wo_schema(self):
from gcloud.bigquery.table import Table
conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
table = dataset.table('table_name')
self.assertTrue(isinstance(table, Table))
self.assertEqual(table.name, 'table_name')
self.assertTrue(table._dataset is dataset)
self.assertEqual(table.schema, [])
def test_table_w_schema(self):
from gcloud.bigquery.table import SchemaField
from gcloud.bigquery.table import Table
conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._makeOne(self.DS_NAME, client=client)
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table('table_name', schema=[full_name, age])
self.assertTrue(isinstance(table, Table))
self.assertEqual(table.name, 'table_name')
self.assertTrue(table._dataset is dataset)
self.assertEqual(table.schema, [full_name, age])
class _Client(object):
def __init__(self, project='project', connection=None):
self.project = project
self.connection = connection
class _Connection(object):
def __init__(self, *responses):
self._responses = responses
self._requested = []
def api_request(self, **kw):
from gcloud.exceptions import NotFound
self._requested.append(kw)
try:
response, self._responses = self._responses[0], self._responses[1:]
except:
raise NotFound('miss')
else:
return response

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,330 @@
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestQueryResults(unittest2.TestCase):
PROJECT = 'project'
JOB_NAME = 'job_name'
JOB_NAME = 'test-synchronous-query'
JOB_TYPE = 'query'
QUERY = 'select count(*) from persons'
TOKEN = 'TOKEN'
def _getTargetClass(self):
from gcloud.bigquery.query import QueryResults
return QueryResults
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def _makeResource(self, complete=False):
resource = {
'jobReference': {
'projectId': self.PROJECT,
'jobId': self.JOB_NAME,
},
'jobComplete': complete,
'errors': [],
'schema': {
'fields': [
{'name': 'full_name', 'type': 'STRING', 'mode': 'REQURED'},
{'name': 'age', 'type': 'INTEGER', 'mode': 'REQURED'},
],
},
}
if complete:
resource['totalRows'] = 1000
resource['rows'] = [
{'f': [
{'v': 'Phred Phlyntstone'},
{'v': 32},
]},
{'f': [
{'v': 'Bharney Rhubble'},
{'v': 33},
]},
{'f': [
{'v': 'Wylma Phlyntstone'},
{'v': 29},
]},
{'f': [
{'v': 'Bhettye Rhubble'},
{'v': 27},
]},
]
resource['pageToken'] = self.TOKEN
resource['totalBytesProcessed'] = 100000
resource['cacheHit'] = False
return resource
def _verifySchema(self, query, resource):
from gcloud.bigquery.table import SchemaField
if 'schema' in resource:
fields = resource['schema']['fields']
self.assertEqual(len(query.schema), len(fields))
for found, expected in zip(query.schema, fields):
self.assertTrue(isinstance(found, SchemaField))
self.assertEqual(found.name, expected['name'])
self.assertEqual(found.field_type, expected['type'])
self.assertEqual(found.mode, expected['mode'])
self.assertEqual(found.description,
expected.get('description'))
self.assertEqual(found.fields, expected.get('fields'))
else:
self.assertTrue(query.schema is None)
def _verifyRows(self, query, resource):
expected = resource.get('rows')
if expected is None:
self.assertEqual(query.rows, [])
else:
found = query.rows
self.assertEqual(len(found), len(expected))
for f_row, e_row in zip(found, expected):
self.assertEqual(f_row,
tuple([cell['v'] for cell in e_row['f']]))
def _verifyResourceProperties(self, query, resource):
self.assertEqual(query.cache_hit, resource.get('cacheHit'))
self.assertEqual(query.complete, resource.get('jobComplete'))
self.assertEqual(query.errors, resource.get('errors'))
self.assertEqual(query.page_token, resource.get('pageToken'))
self.assertEqual(query.total_rows, resource.get('totalRows'))
self.assertEqual(query.total_bytes_processed,
resource.get('totalBytesProcessed'))
if 'jobReference' in resource:
self.assertEqual(query.name, resource['jobReference']['jobId'])
else:
self.assertTrue(query.name is None)
self._verifySchema(query, resource)
self._verifyRows(query, resource)
def test_ctor(self):
client = _Client(self.PROJECT)
query = self._makeOne(self.QUERY, client)
self.assertEqual(query.query, self.QUERY)
self.assertTrue(query._client is client)
self.assertTrue(query.cache_hit is None)
self.assertTrue(query.complete is None)
self.assertTrue(query.errors is None)
self.assertTrue(query.name is None)
self.assertTrue(query.page_token is None)
self.assertEqual(query.rows, [])
self.assertTrue(query.schema is None)
self.assertTrue(query.total_rows is None)
self.assertTrue(query.total_bytes_processed is None)
self.assertTrue(query.default_dataset is None)
self.assertTrue(query.max_results is None)
self.assertTrue(query.preserve_nulls is None)
self.assertTrue(query.use_query_cache is None)
def test_job_wo_jobid(self):
client = _Client(self.PROJECT)
query = self._makeOne(self.QUERY, client)
self.assertTrue(query.job is None)
def test_job_w_jobid(self):
from gcloud.bigquery.job import QueryJob
SERVER_GENERATED = 'SERVER_GENERATED'
client = _Client(self.PROJECT)
query = self._makeOne(self.QUERY, client)
query._properties['jobReference'] = {
'projectId': self.PROJECT,
'jobId': SERVER_GENERATED,
}
job = query.job
self.assertTrue(isinstance(job, QueryJob))
self.assertEqual(job.query, self.QUERY)
self.assertTrue(job._client is client)
self.assertEqual(job.name, SERVER_GENERATED)
fetched_later = query.job
self.assertTrue(fetched_later is job)
def test_schema(self):
client = _Client(self.PROJECT)
query = self._makeOne(self.QUERY, client)
self._verifyResourceProperties(query, {})
resource = {
'schema': {
'fields': [
{'name': 'full_name', 'type': 'STRING', 'mode': 'REQURED'},
{'name': 'age', 'type': 'INTEGER', 'mode': 'REQURED'},
],
},
}
query._set_properties(resource)
self._verifyResourceProperties(query, resource)
def test_run_w_bound_client(self):
PATH = 'projects/%s/queries' % self.PROJECT
RESOURCE = self._makeResource(complete=False)
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
query = self._makeOne(self.QUERY, client)
query.run()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {'query': self.QUERY}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(query, RESOURCE)
def test_run_w_alternate_client(self):
PATH = 'projects/%s/queries' % self.PROJECT
RESOURCE = self._makeResource(complete=True)
DATASET = 'test_dataset'
conn1 = _Connection()
client1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(RESOURCE)
client2 = _Client(project=self.PROJECT, connection=conn2)
query = self._makeOne(self.QUERY, client1)
query.default_dataset = client2.dataset(DATASET)
query.max_results = 100
query.preserve_nulls = True
query.timeout_ms = 20000
query.use_query_cache = False
query.dry_run = True
query.run(client=client2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % PATH)
SENT = {
'query': self.QUERY,
'defaultDataset': {
'projectId': self.PROJECT,
'datasetId': DATASET,
},
'dryRun': True,
'maxResults': 100,
'preserveNulls': True,
'timeoutMs': 20000,
'useQueryCache': False,
}
self.assertEqual(req['data'], SENT)
self._verifyResourceProperties(query, RESOURCE)
def test_fetch_data_query_not_yet_run(self):
conn = _Connection()
client = _Client(project=self.PROJECT, connection=conn)
query = self._makeOne(self.QUERY, client)
self.assertRaises(ValueError, query.fetch_data)
def test_fetch_data_w_bound_client(self):
PATH = 'projects/%s/queries/%s' % (self.PROJECT, self.JOB_NAME)
BEFORE = self._makeResource(complete=False)
AFTER = self._makeResource(complete=True)
conn = _Connection(AFTER)
client = _Client(project=self.PROJECT, connection=conn)
query = self._makeOne(self.QUERY, client)
query._set_properties(BEFORE)
self.assertFalse(query.complete)
rows, total_rows, page_token = query.fetch_data()
self.assertTrue(query.complete)
self.assertEqual(len(rows), 4)
self.assertEqual(rows[0], ('Phred Phlyntstone', 32))
self.assertEqual(rows[1], ('Bharney Rhubble', 33))
self.assertEqual(rows[2], ('Wylma Phlyntstone', 29))
self.assertEqual(rows[3], ('Bhettye Rhubble', 27))
self.assertEqual(total_rows, AFTER['totalRows'])
self.assertEqual(page_token, AFTER['pageToken'])
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
def test_fetch_data_w_alternate_client(self):
PATH = 'projects/%s/queries/%s' % (self.PROJECT, self.JOB_NAME)
MAX = 10
TOKEN = 'TOKEN'
START = 2257
TIMEOUT = 20000
BEFORE = self._makeResource(complete=False)
AFTER = self._makeResource(complete=True)
conn1 = _Connection()
client1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(AFTER)
client2 = _Client(project=self.PROJECT, connection=conn2)
query = self._makeOne(self.QUERY, client1)
query._set_properties(BEFORE)
self.assertFalse(query.complete)
rows, total_rows, page_token = query.fetch_data(
client=client2, max_results=MAX, page_token=TOKEN,
start_index=START, timeout_ms=TIMEOUT)
self.assertTrue(query.complete)
self.assertEqual(len(rows), 4)
self.assertEqual(rows[0], ('Phred Phlyntstone', 32))
self.assertEqual(rows[1], ('Bharney Rhubble', 33))
self.assertEqual(rows[2], ('Wylma Phlyntstone', 29))
self.assertEqual(rows[3], ('Bhettye Rhubble', 27))
self.assertEqual(total_rows, AFTER['totalRows'])
self.assertEqual(page_token, AFTER['pageToken'])
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)
self.assertEqual(req['query_params'],
{'maxResults': MAX,
'pageToken': TOKEN,
'startIndex': START,
'timeoutMs': TIMEOUT})
class _Client(object):
def __init__(self, project='project', connection=None):
self.project = project
self.connection = connection
def dataset(self, name):
from gcloud.bigquery.dataset import Dataset
return Dataset(name, client=self)
class _Connection(object):
def __init__(self, *responses):
self._responses = responses
self._requested = []
def api_request(self, **kw):
from gcloud.exceptions import NotFound
self._requested.append(kw)
response, self._responses = self._responses[0], self._responses[1:]
return response

File diff suppressed because it is too large Load diff