Added delete option to database storage.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-12 12:10:01 -04:00
parent 308604a33c
commit 963b5bc68b
1868 changed files with 192402 additions and 13278 deletions

View file

@ -0,0 +1,636 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared helpers for Google Cloud packages.
This module is not part of the public API surface.
"""
from __future__ import absolute_import
import calendar
import datetime
import os
import re
from threading import local as Local
import six
from six.moves import http_client
import google.auth
import google.auth.transport.requests
from google.protobuf import duration_pb2
from google.protobuf import timestamp_pb2
try:
import grpc
import google.auth.transport.grpc
except ImportError: # pragma: NO COVER
grpc = None
_NOW = datetime.datetime.utcnow # To be replaced by tests.
_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
_TIMEONLY_W_MICROS = "%H:%M:%S.%f"
_TIMEONLY_NO_FRACTION = "%H:%M:%S"
# datetime.strptime cannot handle nanosecond precision: parse w/ regex
_RFC3339_NANOS = re.compile(
r"""
(?P<no_fraction>
\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
)
( # Optional decimal part
\. # decimal point
(?P<nanos>\d{1,9}) # nanoseconds, maybe truncated
)?
Z # Zulu
""",
re.VERBOSE,
)
# NOTE: Catching this ImportError is a workaround for GAE not supporting the
# "pwd" module which is imported lazily when "expanduser" is called.
try:
_USER_ROOT = os.path.expanduser("~")
except ImportError: # pragma: NO COVER
_USER_ROOT = None
_GCLOUD_CONFIG_FILE = os.path.join("gcloud", "configurations", "config_default")
_GCLOUD_CONFIG_SECTION = "core"
_GCLOUD_CONFIG_KEY = "project"
class _LocalStack(Local):
"""Manage a thread-local LIFO stack of resources.
Intended for use in :class:`google.cloud.datastore.batch.Batch.__enter__`,
:class:`google.cloud.storage.batch.Batch.__enter__`, etc.
"""
def __init__(self):
super(_LocalStack, self).__init__()
self._stack = []
def __iter__(self):
"""Iterate the stack in LIFO order.
"""
return iter(reversed(self._stack))
def push(self, resource):
"""Push a resource onto our stack.
"""
self._stack.append(resource)
def pop(self):
"""Pop a resource from our stack.
:rtype: object
:returns: the top-most resource, after removing it.
:raises IndexError: if the stack is empty.
"""
return self._stack.pop()
@property
def top(self):
"""Get the top-most resource
:rtype: object
:returns: the top-most item, or None if the stack is empty.
"""
if self._stack:
return self._stack[-1]
class _UTC(datetime.tzinfo):
"""Basic UTC implementation.
Implementing a small surface area to avoid depending on ``pytz``.
"""
_dst = datetime.timedelta(0)
_tzname = "UTC"
_utcoffset = _dst
def dst(self, dt): # pylint: disable=unused-argument
"""Daylight savings time offset."""
return self._dst
def fromutc(self, dt):
"""Convert a timestamp from (naive) UTC to this timezone."""
if dt.tzinfo is None:
return dt.replace(tzinfo=self)
return super(_UTC, self).fromutc(dt)
def tzname(self, dt): # pylint: disable=unused-argument
"""Get the name of this timezone."""
return self._tzname
def utcoffset(self, dt): # pylint: disable=unused-argument
"""UTC offset of this timezone."""
return self._utcoffset
def __repr__(self):
return "<%s>" % (self._tzname,)
def __str__(self):
return self._tzname
def _ensure_tuple_or_list(arg_name, tuple_or_list):
"""Ensures an input is a tuple or list.
This effectively reduces the iterable types allowed to a very short
whitelist: list and tuple.
:type arg_name: str
:param arg_name: Name of argument to use in error message.
:type tuple_or_list: sequence of str
:param tuple_or_list: Sequence to be verified.
:rtype: list of str
:returns: The ``tuple_or_list`` passed in cast to a ``list``.
:raises TypeError: if the ``tuple_or_list`` is not a tuple or list.
"""
if not isinstance(tuple_or_list, (tuple, list)):
raise TypeError(
"Expected %s to be a tuple or list. "
"Received %r" % (arg_name, tuple_or_list)
)
return list(tuple_or_list)
def _determine_default_project(project=None):
"""Determine default project ID explicitly or implicitly as fall-back.
See :func:`google.auth.default` for details on how the default project
is determined.
:type project: str
:param project: Optional. The project name to use as default.
:rtype: str or ``NoneType``
:returns: Default project if it can be determined.
"""
if project is None:
_, project = google.auth.default()
return project
def _millis(when):
"""Convert a zone-aware datetime to integer milliseconds.
:type when: :class:`datetime.datetime`
:param when: the datetime to convert
:rtype: int
:returns: milliseconds since epoch for ``when``
"""
micros = _microseconds_from_datetime(when)
return micros // 1000
def _datetime_from_microseconds(value):
"""Convert timestamp to datetime, assuming UTC.
:type value: float
:param value: The timestamp to convert
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the value.
"""
return _EPOCH + datetime.timedelta(microseconds=value)
def _microseconds_from_datetime(value):
"""Convert non-none datetime to microseconds.
:type value: :class:`datetime.datetime`
:param value: The timestamp to convert.
:rtype: int
:returns: The timestamp, in microseconds.
"""
if not value.tzinfo:
value = value.replace(tzinfo=UTC)
# Regardless of what timezone is on the value, convert it to UTC.
value = value.astimezone(UTC)
# Convert the datetime to a microsecond timestamp.
return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
def _millis_from_datetime(value):
"""Convert non-none datetime to timestamp, assuming UTC.
:type value: :class:`datetime.datetime`
:param value: (Optional) the timestamp
:rtype: int, or ``NoneType``
:returns: the timestamp, in milliseconds, or None
"""
if value is not None:
return _millis(value)
def _date_from_iso8601_date(value):
"""Convert a ISO8601 date string to native datetime date
:type value: str
:param value: The date string to convert
:rtype: :class:`datetime.date`
:returns: A datetime date object created from the string
"""
return datetime.datetime.strptime(value, "%Y-%m-%d").date()
def _time_from_iso8601_time_naive(value):
"""Convert a zoneless ISO8601 time string to naive datetime time
:type value: str
:param value: The time string to convert
:rtype: :class:`datetime.time`
:returns: A datetime time object created from the string
:raises ValueError: if the value does not match a known format.
"""
if len(value) == 8: # HH:MM:SS
fmt = _TIMEONLY_NO_FRACTION
elif len(value) == 15: # HH:MM:SS.micros
fmt = _TIMEONLY_W_MICROS
else:
raise ValueError("Unknown time format: {}".format(value))
return datetime.datetime.strptime(value, fmt).time()
def _rfc3339_to_datetime(dt_str):
"""Convert a microsecond-precision timestamp to a native datetime.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
"""
return datetime.datetime.strptime(dt_str, _RFC3339_MICROS).replace(tzinfo=UTC)
def _rfc3339_nanos_to_datetime(dt_str):
"""Convert a nanosecond-precision timestamp to a native datetime.
.. note::
Python datetimes do not support nanosecond precision; this function
therefore truncates such values to microseconds.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
:raises ValueError: If the timestamp does not match the RFC 3339
regular expression.
"""
with_nanos = _RFC3339_NANOS.match(dt_str)
if with_nanos is None:
raise ValueError(
"Timestamp: %r, does not match pattern: %r"
% (dt_str, _RFC3339_NANOS.pattern)
)
bare_seconds = datetime.datetime.strptime(
with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
)
fraction = with_nanos.group("nanos")
if fraction is None:
micros = 0
else:
scale = 9 - len(fraction)
nanos = int(fraction) * (10 ** scale)
micros = nanos // 1000
return bare_seconds.replace(microsecond=micros, tzinfo=UTC)
def _datetime_to_rfc3339(value, ignore_zone=True):
"""Convert a timestamp to a string.
:type value: :class:`datetime.datetime`
:param value: The datetime object to be converted to a string.
:type ignore_zone: bool
:param ignore_zone: If True, then the timezone (if any) of the datetime
object is ignored.
:rtype: str
:returns: The string representing the datetime stamp.
"""
if not ignore_zone and value.tzinfo is not None:
# Convert to UTC and remove the time zone info.
value = value.replace(tzinfo=None) - value.utcoffset()
return value.strftime(_RFC3339_MICROS)
def _to_bytes(value, encoding="ascii"):
"""Converts a string value to bytes, if necessary.
Unfortunately, ``six.b`` is insufficient for this task since in
Python2 it does not modify ``unicode`` objects.
:type value: str / bytes or unicode
:param value: The string/bytes value to be converted.
:type encoding: str
:param encoding: The encoding to use to convert unicode to bytes. Defaults
to "ascii", which will not allow any characters from
ordinals larger than 127. Other useful values are
"latin-1", which which will only allows byte ordinals
(up to 255) and "utf-8", which will encode any unicode
that needs to be.
:rtype: str / bytes
:returns: The original value converted to bytes (if unicode) or as passed
in if it started out as bytes.
:raises TypeError: if the value could not be converted to bytes.
"""
result = value.encode(encoding) if isinstance(value, six.text_type) else value
if isinstance(result, six.binary_type):
return result
else:
raise TypeError("%r could not be converted to bytes" % (value,))
def _bytes_to_unicode(value):
"""Converts bytes to a unicode value, if necessary.
:type value: bytes
:param value: bytes value to attempt string conversion on.
:rtype: str
:returns: The original value converted to unicode (if bytes) or as passed
in if it started out as unicode.
:raises ValueError: if the value could not be converted to unicode.
"""
result = value.decode("utf-8") if isinstance(value, six.binary_type) else value
if isinstance(result, six.text_type):
return result
else:
raise ValueError("%r could not be converted to unicode" % (value,))
def _from_any_pb(pb_type, any_pb):
"""Converts an Any protobuf to the specified message type
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance of the pb_type message.
Raises:
TypeError: if the message could not be converted.
"""
msg = pb_type()
if not any_pb.Unpack(msg):
raise TypeError(
"Could not convert {} to {}".format(
any_pb.__class__.__name__, pb_type.__name__
)
)
return msg
def _pb_timestamp_to_datetime(timestamp_pb):
"""Convert a Timestamp protobuf to a datetime object.
:type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp_pb: A Google returned timestamp protobuf.
:rtype: :class:`datetime.datetime`
:returns: A UTC datetime object converted from a protobuf timestamp.
"""
return _EPOCH + datetime.timedelta(
seconds=timestamp_pb.seconds, microseconds=(timestamp_pb.nanos / 1000.0)
)
def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Convert a Timestamp protobuf to an RFC 3339 string.
:type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp_pb: A Google returned timestamp protobuf.
:rtype: str
:returns: An RFC 3339 formatted timestamp string.
"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)
def _datetime_to_pb_timestamp(when):
"""Convert a datetime object to a Timestamp protobuf.
:type when: :class:`datetime.datetime`
:param when: the datetime to convert
:rtype: :class:`google.protobuf.timestamp_pb2.Timestamp`
:returns: A timestamp protobuf corresponding to the object.
"""
ms_value = _microseconds_from_datetime(when)
seconds, micros = divmod(ms_value, 10 ** 6)
nanos = micros * 10 ** 3
return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
def _timedelta_to_duration_pb(timedelta_val):
"""Convert a Python timedelta object to a duration protobuf.
.. note::
The Python timedelta has a granularity of microseconds while
the protobuf duration type has a duration of nanoseconds.
:type timedelta_val: :class:`datetime.timedelta`
:param timedelta_val: A timedelta object.
:rtype: :class:`google.protobuf.duration_pb2.Duration`
:returns: A duration object equivalent to the time delta.
"""
duration_pb = duration_pb2.Duration()
duration_pb.FromTimedelta(timedelta_val)
return duration_pb
def _duration_pb_to_timedelta(duration_pb):
"""Convert a duration protobuf to a Python timedelta object.
.. note::
The Python timedelta has a granularity of microseconds while
the protobuf duration type has a duration of nanoseconds.
:type duration_pb: :class:`google.protobuf.duration_pb2.Duration`
:param duration_pb: A protobuf duration object.
:rtype: :class:`datetime.timedelta`
:returns: The converted timedelta object.
"""
return datetime.timedelta(
seconds=duration_pb.seconds, microseconds=(duration_pb.nanos / 1000.0)
)
def _name_from_project_path(path, project, template):
"""Validate a URI path and get the leaf object's name.
:type path: str
:param path: URI path containing the name.
:type project: str
:param project: (Optional) The project associated with the request. It is
included for validation purposes. If passed as None,
disables validation.
:type template: str
:param template: Template regex describing the expected form of the path.
The regex must have two named groups, 'project' and
'name'.
:rtype: str
:returns: Name parsed from ``path``.
:raises ValueError: if the ``path`` is ill-formed or if the project from
the ``path`` does not agree with the ``project``
passed in.
"""
if isinstance(template, str):
template = re.compile(template)
match = template.match(path)
if not match:
raise ValueError(
'path "%s" did not match expected pattern "%s"' % (path, template.pattern)
)
if project is not None:
found_project = match.group("project")
if found_project != project:
raise ValueError(
"Project from client (%s) should agree with "
"project from resource(%s)." % (project, found_project)
)
return match.group("name")
def make_secure_channel(credentials, user_agent, host, extra_options=()):
"""Makes a secure channel for an RPC service.
Uses / depends on gRPC.
:type credentials: :class:`google.auth.credentials.Credentials`
:param credentials: The OAuth2 Credentials to use for creating
access tokens.
:type user_agent: str
:param user_agent: The user agent to be used with API requests.
:type host: str
:param host: The host for the service.
:type extra_options: tuple
:param extra_options: (Optional) Extra gRPC options used when creating the
channel.
:rtype: :class:`grpc._channel.Channel`
:returns: gRPC secure channel with credentials attached.
"""
target = "%s:%d" % (host, http_client.HTTPS_PORT)
http_request = google.auth.transport.requests.Request()
user_agent_option = ("grpc.primary_user_agent", user_agent)
options = (user_agent_option,) + extra_options
return google.auth.transport.grpc.secure_authorized_channel(
credentials, http_request, target, options=options
)
def make_secure_stub(credentials, user_agent, stub_class, host, extra_options=()):
"""Makes a secure stub for an RPC service.
Uses / depends on gRPC.
:type credentials: :class:`google.auth.credentials.Credentials`
:param credentials: The OAuth2 Credentials to use for creating
access tokens.
:type user_agent: str
:param user_agent: The user agent to be used with API requests.
:type stub_class: type
:param stub_class: A gRPC stub type for a given service.
:type host: str
:param host: The host for the service.
:type extra_options: tuple
:param extra_options: (Optional) Extra gRPC options passed when creating
the channel.
:rtype: object, instance of ``stub_class``
:returns: The stub object used to make gRPC requests to a given API.
"""
channel = make_secure_channel(
credentials, user_agent, host, extra_options=extra_options
)
return stub_class(channel)
def make_insecure_stub(stub_class, host, port=None):
"""Makes an insecure stub for an RPC service.
Uses / depends on gRPC.
:type stub_class: type
:param stub_class: A gRPC stub type for a given service.
:type host: str
:param host: The host for the service. May also include the port
if ``port`` is unspecified.
:type port: int
:param port: (Optional) The port for the service.
:rtype: object, instance of ``stub_class``
:returns: The stub object used to make gRPC requests to a given API.
"""
if port is None:
target = host
else:
# NOTE: This assumes port != http_client.HTTPS_PORT:
target = "%s:%d" % (host, port)
channel = grpc.insecure_channel(target)
return stub_class(channel)
try:
from pytz import UTC # pylint: disable=unused-import,wrong-import-order
except ImportError: # pragma: NO COVER
UTC = _UTC() # Singleton instance to be used throughout.
# Need to define _EPOCH at the end of module since it relies on UTC.
_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=UTC)

View file

@ -0,0 +1,440 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared implementation of connections to API servers."""
import collections
import json
import platform
import warnings
from six.moves import collections_abc
from six.moves.urllib.parse import urlencode
from google.api_core.client_info import ClientInfo
from google.cloud import exceptions
from google.cloud import version
API_BASE_URL = "https://www.googleapis.com"
"""The base of the API call URL."""
DEFAULT_USER_AGENT = "gcloud-python/{0}".format(
version.__version__
)
"""The user agent for google-cloud-python requests."""
CLIENT_INFO_HEADER = "X-Goog-API-Client"
CLIENT_INFO_TEMPLATE = "gl-python/" + platform.python_version() + " gccl/{}"
_USER_AGENT_ALL_CAPS_DEPRECATED = """\
The 'USER_AGENT' class-level attribute is deprecated. Please use
'user_agent' instead.
"""
_EXTRA_HEADERS_ALL_CAPS_DEPRECATED = """\
The '_EXTRA_HEADERS' class-level attribute is deprecated. Please use
'extra_headers' instead.
"""
_DEFAULT_TIMEOUT = 60 # in seconds
class Connection(object):
"""A generic connection to Google Cloud Platform.
:type client: :class:`~google.cloud.client.Client`
:param client: The client that owns the current connection.
:type client_info: :class:`~google.api_core.client_info.ClientInfo`
:param client_info: (Optional) instance used to generate user agent.
"""
_user_agent = DEFAULT_USER_AGENT
def __init__(self, client, client_info=None):
self._client = client
if client_info is None:
client_info = ClientInfo()
self._client_info = client_info
self._extra_headers = {}
@property
def USER_AGENT(self):
"""Deprecated: get / set user agent sent by connection.
:rtype: str
:returns: user agent
"""
warnings.warn(
_USER_AGENT_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
return self.user_agent
@USER_AGENT.setter
def USER_AGENT(self, value):
warnings.warn(
_USER_AGENT_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
self.user_agent = value
@property
def user_agent(self):
"""Get / set user agent sent by connection.
:rtype: str
:returns: user agent
"""
return self._client_info.to_user_agent()
@user_agent.setter
def user_agent(self, value):
self._client_info.user_agent = value
@property
def _EXTRA_HEADERS(self):
"""Deprecated: get / set extra headers sent by connection.
:rtype: dict
:returns: header keys / values
"""
warnings.warn(
_EXTRA_HEADERS_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
return self.extra_headers
@_EXTRA_HEADERS.setter
def _EXTRA_HEADERS(self, value):
warnings.warn(
_EXTRA_HEADERS_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
self.extra_headers = value
@property
def extra_headers(self):
"""Get / set extra headers sent by connection.
:rtype: dict
:returns: header keys / values
"""
return self._extra_headers
@extra_headers.setter
def extra_headers(self, value):
self._extra_headers = value
@property
def credentials(self):
"""Getter for current credentials.
:rtype: :class:`google.auth.credentials.Credentials` or
:class:`NoneType`
:returns: The credentials object associated with this connection.
"""
return self._client._credentials
@property
def http(self):
"""A getter for the HTTP transport used in talking to the API.
Returns:
google.auth.transport.requests.AuthorizedSession:
A :class:`requests.Session` instance.
"""
return self._client._http
class JSONConnection(Connection):
"""A connection to a Google JSON-based API.
These APIs are discovery based. For reference:
https://developers.google.com/discovery/
This defines :meth:`api_request` for making a generic JSON
API request and API requests are created elsewhere.
* :attr:`API_BASE_URL`
* :attr:`API_VERSION`
* :attr:`API_URL_TEMPLATE`
must be updated by subclasses.
"""
API_BASE_URL = None
"""The base of the API call URL."""
API_VERSION = None
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = None
"""A template for the URL of a particular API call."""
def build_api_url(
self, path, query_params=None, api_base_url=None, api_version=None
):
"""Construct an API url given a few components, some optional.
Typically, you shouldn't need to use this method.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
:type api_version: str
:param api_version: The version of the API to call.
Typically you shouldn't provide this and instead
use the default for the library.
:rtype: str
:returns: The URL assembled from the pieces provided.
"""
url = self.API_URL_TEMPLATE.format(
api_base_url=(api_base_url or self.API_BASE_URL),
api_version=(api_version or self.API_VERSION),
path=path,
)
query_params = query_params or {}
if isinstance(query_params, collections_abc.Mapping):
query_params = query_params.copy()
else:
query_params_dict = collections.defaultdict(list)
for key, value in query_params:
query_params_dict[key].append(value)
query_params = query_params_dict
query_params.setdefault("prettyPrint", "false")
url += "?" + urlencode(query_params, doseq=True)
return url
def _make_request(
self,
method,
url,
data=None,
content_type=None,
headers=None,
target_object=None,
timeout=_DEFAULT_TIMEOUT,
):
"""A low level method to send a request to the API.
Typically, you shouldn't need to use this method.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type data: str
:param data: The data to send as the body of the request.
:type content_type: str
:param content_type: The proper MIME type of the data provided.
:type headers: dict
:param headers: (Optional) A dictionary of HTTP headers to send with
the request. If passed, will be modified directly
here with added headers.
:type target_object: object
:param target_object:
(Optional) Argument to be used by library callers. This can allow
custom behavior, for example, to defer an HTTP request and complete
initialization of the object at a later time.
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
for the server response.
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
headers = headers or {}
headers.update(self.extra_headers)
headers["Accept-Encoding"] = "gzip"
if content_type:
headers["Content-Type"] = content_type
headers[CLIENT_INFO_HEADER] = self.user_agent
headers["User-Agent"] = self.user_agent
return self._do_request(
method, url, headers, data, target_object, timeout=timeout
)
def _do_request(
self, method, url, headers, data, target_object, timeout=_DEFAULT_TIMEOUT
): # pylint: disable=unused-argument
"""Low-level helper: perform the actual API request over HTTP.
Allows batch context managers to override and defer a request.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type headers: dict
:param headers: A dictionary of HTTP headers to send with the request.
:type data: str
:param data: The data to send as the body of the request.
:type target_object: object
:param target_object:
(Optional) Unused ``target_object`` here but may be used by a
superclass.
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
for the server response.
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
return self.http.request(
url=url, method=method, headers=headers, data=data, timeout=timeout
)
def api_request(
self,
method,
path,
query_params=None,
data=None,
content_type=None,
headers=None,
api_base_url=None,
api_version=None,
expect_json=True,
_target_object=None,
timeout=_DEFAULT_TIMEOUT,
):
"""Make a request over the HTTP transport to the API.
You shouldn't need to use this method, but if you plan to
interact with the API using these primitives, this is the
correct one to use.
:type method: str
:param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
Required.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
Required.
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type data: str
:param data: The data to send as the body of the request. Default is
the empty string.
:type content_type: str
:param content_type: The proper MIME type of the data provided. Default
is None.
:type headers: dict
:param headers: extra HTTP headers to be sent with the request.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
Default is the standard API base URL.
:type api_version: str
:param api_version: The version of the API to call. Typically
you shouldn't provide this and instead use
the default for the library. Default is the
latest API version supported by
google-cloud-python.
:type expect_json: bool
:param expect_json: If True, this method will try to parse the
response as JSON and raise an exception if
that cannot be done. Default is True.
:type _target_object: :class:`object`
:param _target_object:
(Optional) Protected argument to be used by library callers. This
can allow custom behavior, for example, to defer an HTTP request
and complete initialization of the object at a later time.
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
for the server response.
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
:raises ~google.cloud.exceptions.GoogleCloudError: if the response code
is not 200 OK.
:raises ValueError: if the response content type is not JSON.
:rtype: dict or str
:returns: The API response payload, either as a raw string or
a dictionary if the response is valid JSON.
"""
url = self.build_api_url(
path=path,
query_params=query_params,
api_base_url=api_base_url,
api_version=api_version,
)
# Making the executive decision that any dictionary
# data will be sent properly as JSON.
if data and isinstance(data, dict):
data = json.dumps(data)
content_type = "application/json"
response = self._make_request(
method=method,
url=url,
data=data,
content_type=content_type,
headers=headers,
target_object=_target_object,
timeout=timeout,
)
if not 200 <= response.status_code < 300:
raise exceptions.from_http_response(response)
if expect_json and response.content:
return response.json()
else:
return response.content

View file

@ -0,0 +1,126 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared testing utilities."""
from __future__ import absolute_import
class _Monkey(object):
"""Context-manager for replacing module names in the scope of a test."""
def __init__(self, module, **kw):
self.module = module
if not kw: # pragma: NO COVER
raise ValueError("_Monkey was used with nothing to monkey-patch")
self.to_restore = {key: getattr(module, key) for key in kw}
for key, value in kw.items():
setattr(module, key, value)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for key, value in self.to_restore.items():
setattr(self.module, key, value)
class _NamedTemporaryFile(object):
def __init__(self, suffix=""):
import os
import tempfile
filehandle, self.name = tempfile.mkstemp(suffix=suffix)
os.close(filehandle)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
import os
os.remove(self.name)
def _tempdir_maker():
import contextlib
import shutil
import tempfile
@contextlib.contextmanager
def _tempdir_mgr():
temp_dir = tempfile.mkdtemp()
yield temp_dir
shutil.rmtree(temp_dir)
return _tempdir_mgr
# pylint: disable=invalid-name
# Retain _tempdir as a constant for backwards compatibility despite
# being an invalid name.
_tempdir = _tempdir_maker()
del _tempdir_maker
# pylint: enable=invalid-name
class _GAXBaseAPI(object):
_random_gax_error = False
def __init__(self, **kw):
self.__dict__.update(kw)
@staticmethod
def _make_grpc_error(status_code, trailing=None):
from grpc._channel import _RPCState
from google.cloud.exceptions import GrpcRendezvous
details = "Some error details."
exc_state = _RPCState((), None, trailing, status_code, details)
return GrpcRendezvous(exc_state, None, None, None)
def _make_grpc_not_found(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.NOT_FOUND)
def _make_grpc_failed_precondition(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.FAILED_PRECONDITION)
def _make_grpc_already_exists(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.ALREADY_EXISTS)
def _make_grpc_deadline_exceeded(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.DEADLINE_EXCEEDED)
class _GAXPageIterator(object):
def __init__(self, *pages, **kwargs):
self._pages = iter(pages)
self.page_token = kwargs.get("page_token")
def next(self):
"""Iterate to the next page."""
import six
return six.next(self._pages)
__next__ = next

View file

@ -0,0 +1,250 @@
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes for client used to interact with Google Cloud APIs."""
import io
import json
from pickle import PicklingError
import six
import google.api_core.client_options
import google.api_core.exceptions
import google.auth
import google.auth.credentials
import google.auth.transport.requests
from google.cloud._helpers import _determine_default_project
from google.oauth2 import service_account
_GOOGLE_AUTH_CREDENTIALS_HELP = (
"This library only supports credentials from google-auth-library-python. "
"See https://google-auth.readthedocs.io/en/latest/ "
"for help on authentication with this library."
)
# Default timeout for auth requests.
_CREDENTIALS_REFRESH_TIMEOUT = 300
class _ClientFactoryMixin(object):
"""Mixin to allow factories that create credentials.
.. note::
This class is virtual.
"""
_SET_PROJECT = False
@classmethod
def from_service_account_json(cls, json_credentials_path, *args, **kwargs):
"""Factory to retrieve JSON credentials while creating client.
:type json_credentials_path: str
:param json_credentials_path: The path to a private key file (this file
was given to you when you created the
service account). This file must contain
a JSON object with a private key and
other credentials information (downloaded
from the Google APIs console).
:type args: tuple
:param args: Remaining positional arguments to pass to constructor.
:param kwargs: Remaining keyword arguments to pass to constructor.
:rtype: :class:`_ClientFactoryMixin`
:returns: The client created with the retrieved JSON credentials.
:raises TypeError: if there is a conflict with the kwargs
and the credentials created by the factory.
"""
if "credentials" in kwargs:
raise TypeError("credentials must not be in keyword arguments")
with io.open(json_credentials_path, "r", encoding="utf-8") as json_fi:
credentials_info = json.load(json_fi)
credentials = service_account.Credentials.from_service_account_info(
credentials_info
)
if cls._SET_PROJECT:
if "project" not in kwargs:
kwargs["project"] = credentials_info.get("project_id")
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
class Client(_ClientFactoryMixin):
"""Client to bundle configuration needed for API requests.
Stores ``credentials`` and an HTTP object so that subclasses
can pass them along to a connection class.
If no value is passed in for ``_http``, a :class:`requests.Session` object
will be created and authorized with the ``credentials``. If not, the
``credentials`` and ``_http`` need not be related.
Callers and subclasses may seek to use the private key from
``credentials`` to sign data.
Args:
credentials (google.auth.credentials.Credentials):
(Optional) The OAuth2 Credentials to use for this client. If not
passed (and if no ``_http`` object is passed), falls back to the
default inferred from the environment.
client_options (google.api_core.client_options.ClientOptions):
(Optional) Custom options for the client.
_http (requests.Session):
(Optional) HTTP object to make requests. Can be any object that
defines ``request()`` with the same interface as
:meth:`requests.Session.request`. If not passed, an ``_http``
object is created that is bound to the ``credentials`` for the
current object.
This parameter should be considered private, and could change in
the future.
Raises:
google.auth.exceptions.DefaultCredentialsError:
Raised if ``credentials`` is not specified and the library fails
to acquire default credentials.
"""
SCOPE = None
"""The scopes required for authenticating with a service.
Needs to be set by subclasses.
"""
def __init__(self, credentials=None, _http=None, client_options=None):
if isinstance(client_options, dict):
client_options = google.api_core.client_options.from_dict(client_options)
if client_options is None:
client_options = google.api_core.client_options.ClientOptions()
if credentials and client_options.credentials_file:
raise google.api_core.exceptions.DuplicateCredentialArgs(
"'credentials' and 'client_options.credentials_file' are mutually exclusive.")
if credentials and not isinstance(credentials, google.auth.credentials.Credentials):
raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP)
scopes = client_options.scopes or self.SCOPE
# if no http is provided, credentials must exist
if not _http and credentials is None:
if client_options.credentials_file:
credentials, _ = google.auth.load_credentials_from_file(
client_options.credentials_file, scopes=scopes)
else:
credentials, _ = google.auth.default(scopes=scopes)
self._credentials = google.auth.credentials.with_scopes_if_required(
credentials, scopes=scopes)
if client_options.quota_project_id:
self._credentials = self._credentials.with_quota_project(client_options.quota_project_id)
self._http_internal = _http
def __getstate__(self):
"""Explicitly state that clients are not pickleable."""
raise PicklingError(
"\n".join(
[
"Pickling client objects is explicitly not supported.",
"Clients have non-trivial state that is local and unpickleable.",
]
)
)
@property
def _http(self):
"""Getter for object used for HTTP transport.
:rtype: :class:`~requests.Session`
:returns: An HTTP object.
"""
if self._http_internal is None:
self._http_internal = google.auth.transport.requests.AuthorizedSession(
self._credentials,
refresh_timeout=_CREDENTIALS_REFRESH_TIMEOUT,
)
return self._http_internal
class _ClientProjectMixin(object):
"""Mixin to allow setting the project on the client.
:type project: str
:param project: the project which the client acts on behalf of. If not
passed falls back to the default inferred from the
environment.
:raises: :class:`EnvironmentError` if the project is neither passed in nor
set in the environment. :class:`ValueError` if the project value
is invalid.
"""
def __init__(self, project=None):
project = self._determine_default(project)
if project is None:
raise EnvironmentError(
"Project was not passed and could not be "
"determined from the environment."
)
if isinstance(project, six.binary_type):
project = project.decode("utf-8")
if not isinstance(project, six.string_types):
raise ValueError("Project must be a string.")
self.project = project
@staticmethod
def _determine_default(project):
"""Helper: use default project detection."""
return _determine_default_project(project)
class ClientWithProject(Client, _ClientProjectMixin):
"""Client that also stores a project.
:type project: str
:param project: the project which the client acts on behalf of. If not
passed falls back to the default inferred from the
environment.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``_http`` object is
passed), falls back to the default inferred from the
environment.
:type _http: :class:`~requests.Session`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~requests.Session.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
:raises: :class:`ValueError` if the project is neither passed in nor
set in the environment.
"""
_SET_PROJECT = True # Used by from_service_account_json()
def __init__(self, project=None, credentials=None, client_options=None, _http=None):
_ClientProjectMixin.__init__(self, project=project)
Client.__init__(self, credentials=credentials, client_options=client_options, _http=_http)

View file

@ -0,0 +1,38 @@
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Comprehensive list of environment variables used in google-cloud.
These enable many types of implicit behavior in both production
and tests.
"""
GCD_DATASET = "DATASTORE_DATASET"
"""Environment variable defining default dataset ID under GCD."""
GCD_HOST = "DATASTORE_EMULATOR_HOST"
"""Environment variable defining host for GCD dataset server."""
PUBSUB_EMULATOR = "PUBSUB_EMULATOR_HOST"
"""Environment variable defining host for Pub/Sub emulator."""
BIGTABLE_EMULATOR = "BIGTABLE_EMULATOR_HOST"
"""Environment variable defining host for Bigtable emulator."""
DISABLE_GRPC = "GOOGLE_CLOUD_DISABLE_GRPC"
"""Environment variable acting as flag to disable gRPC.
To be used for APIs where both an HTTP and gRPC implementation
exist.
"""

View file

@ -0,0 +1,59 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=invalid-name
# pylint recognizies all of these aliases as constants and thinks they have
# invalid names.
"""Custom exceptions for :mod:`google.cloud` package."""
# Avoid the grpc and google.cloud.grpc collision.
from __future__ import absolute_import
from google.api_core import exceptions
try:
from grpc._channel import _Rendezvous
except ImportError: # pragma: NO COVER
_Rendezvous = None
GrpcRendezvous = _Rendezvous
"""Exception class raised by gRPC stable."""
# Aliases to moved classes.
GoogleCloudError = exceptions.GoogleAPICallError
Redirection = exceptions.Redirection
MovedPermanently = exceptions.MovedPermanently
NotModified = exceptions.NotModified
TemporaryRedirect = exceptions.TemporaryRedirect
ResumeIncomplete = exceptions.ResumeIncomplete
ClientError = exceptions.ClientError
BadRequest = exceptions.BadRequest
Unauthorized = exceptions.Unauthorized
Forbidden = exceptions.Forbidden
NotFound = exceptions.NotFound
MethodNotAllowed = exceptions.MethodNotAllowed
Conflict = exceptions.Conflict
LengthRequired = exceptions.LengthRequired
PreconditionFailed = exceptions.PreconditionFailed
RequestRangeNotSatisfiable = exceptions.RequestRangeNotSatisfiable
TooManyRequests = exceptions.TooManyRequests
ServerError = exceptions.ServerError
InternalServerError = exceptions.InternalServerError
MethodNotImplemented = exceptions.MethodNotImplemented
BadGateway = exceptions.BadGateway
ServiceUnavailable = exceptions.ServiceUnavailable
GatewayTimeout = exceptions.GatewayTimeout
from_http_status = exceptions.from_http_status
from_http_response = exceptions.from_http_response

View file

@ -0,0 +1,69 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python idiomatic client for Google Cloud Firestore."""
from google.cloud.firestore_v1 import __version__
from google.cloud.firestore_v1 import ArrayRemove
from google.cloud.firestore_v1 import ArrayUnion
from google.cloud.firestore_v1 import Client
from google.cloud.firestore_v1 import CollectionReference
from google.cloud.firestore_v1 import DELETE_FIELD
from google.cloud.firestore_v1 import DocumentReference
from google.cloud.firestore_v1 import DocumentSnapshot
from google.cloud.firestore_v1 import enums
from google.cloud.firestore_v1 import ExistsOption
from google.cloud.firestore_v1 import GeoPoint
from google.cloud.firestore_v1 import Increment
from google.cloud.firestore_v1 import LastUpdateOption
from google.cloud.firestore_v1 import Maximum
from google.cloud.firestore_v1 import Minimum
from google.cloud.firestore_v1 import Query
from google.cloud.firestore_v1 import ReadAfterWriteError
from google.cloud.firestore_v1 import SERVER_TIMESTAMP
from google.cloud.firestore_v1 import Transaction
from google.cloud.firestore_v1 import transactional
from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1 import Watch
from google.cloud.firestore_v1 import WriteBatch
from google.cloud.firestore_v1 import WriteOption
__all__ = [
"__version__",
"ArrayRemove",
"ArrayUnion",
"Client",
"CollectionReference",
"DELETE_FIELD",
"DocumentReference",
"DocumentSnapshot",
"enums",
"ExistsOption",
"GeoPoint",
"Increment",
"LastUpdateOption",
"Maximum",
"Minimum",
"Query",
"ReadAfterWriteError",
"SERVER_TIMESTAMP",
"Transaction",
"transactional",
"types",
"Watch",
"WriteBatch",
"WriteOption",
]

View file

@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
import warnings
from google.cloud.firestore_admin_v1 import types
from google.cloud.firestore_admin_v1.gapic import enums
from google.cloud.firestore_admin_v1.gapic import firestore_admin_client
if sys.version_info[:2] == (2, 7):
message = (
"A future version of this library will drop support for Python 2.7. "
"More details about Python 2 support for Google Cloud Client Libraries "
"can be found at https://cloud.google.com/python/docs/python2-sunset/"
)
warnings.warn(message, DeprecationWarning)
class FirestoreAdminClient(firestore_admin_client.FirestoreAdminClient):
__doc__ = firestore_admin_client.FirestoreAdminClient.__doc__
enums = enums
__all__ = (
"enums",
"types",
"FirestoreAdminClient",
)

View file

@ -0,0 +1,142 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrappers for protocol buffer enum types."""
import enum
class OperationState(enum.IntEnum):
"""
Describes the state of the operation.
Attributes:
OPERATION_STATE_UNSPECIFIED (int): Unspecified.
INITIALIZING (int): Request is being prepared for processing.
PROCESSING (int): Request is actively being processed.
CANCELLING (int): Request is in the process of being cancelled after user called
google.longrunning.Operations.CancelOperation on the operation.
FINALIZING (int): Request has been processed and is in its finalization stage.
SUCCESSFUL (int): Request has completed successfully.
FAILED (int): Request has finished being processed, but encountered an error.
CANCELLED (int): Request has finished being cancelled after user called
google.longrunning.Operations.CancelOperation.
"""
OPERATION_STATE_UNSPECIFIED = 0
INITIALIZING = 1
PROCESSING = 2
CANCELLING = 3
FINALIZING = 4
SUCCESSFUL = 5
FAILED = 6
CANCELLED = 7
class FieldOperationMetadata(object):
class IndexConfigDelta(object):
class ChangeType(enum.IntEnum):
"""
Specifies how the index is changing.
Attributes:
CHANGE_TYPE_UNSPECIFIED (int): The type of change is not specified or known.
ADD (int): The single field index is being added.
REMOVE (int): The single field index is being removed.
"""
CHANGE_TYPE_UNSPECIFIED = 0
ADD = 1
REMOVE = 2
class Index(object):
class QueryScope(enum.IntEnum):
"""
Query Scope defines the scope at which a query is run. This is
specified on a StructuredQuery's ``from`` field.
Attributes:
QUERY_SCOPE_UNSPECIFIED (int): The query scope is unspecified. Not a valid option.
COLLECTION (int): Indexes with a collection query scope specified allow queries
against a collection that is the child of a specific document, specified
at query time, and that has the collection id specified by the index.
COLLECTION_GROUP (int): Indexes with a collection group query scope specified allow queries
against all collections that has the collection id specified by the
index.
"""
QUERY_SCOPE_UNSPECIFIED = 0
COLLECTION = 1
COLLECTION_GROUP = 2
class State(enum.IntEnum):
"""
The state of an index. During index creation, an index will be in
the ``CREATING`` state. If the index is created successfully, it will
transition to the ``READY`` state. If the index creation encounters a
problem, the index will transition to the ``NEEDS_REPAIR`` state.
Attributes:
STATE_UNSPECIFIED (int): The state is unspecified.
CREATING (int): The index is being created.
There is an active long-running operation for the index.
The index is updated when writing a document.
Some index data may exist.
READY (int): The index is ready to be used.
The index is updated when writing a document.
The index is fully populated from all stored documents it applies to.
NEEDS_REPAIR (int): The index was being created, but something went wrong.
There is no active long-running operation for the index,
and the most recently finished long-running operation failed.
The index is not updated when writing a document.
Some index data may exist.
Use the google.longrunning.Operations API to determine why the operation
that last attempted to create this index failed, then re-create the
index.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
READY = 2
NEEDS_REPAIR = 3
class IndexField(object):
class ArrayConfig(enum.IntEnum):
"""
The supported array value configurations.
Attributes:
ARRAY_CONFIG_UNSPECIFIED (int): The index does not support additional array queries.
CONTAINS (int): The index supports array containment queries.
"""
ARRAY_CONFIG_UNSPECIFIED = 0
CONTAINS = 1
class Order(enum.IntEnum):
"""
The supported orderings.
Attributes:
ORDER_UNSPECIFIED (int): The ordering is unspecified. Not a valid option.
ASCENDING (int): The field is ordered by ascending field value.
DESCENDING (int): The field is ordered by descending field value.
"""
ORDER_UNSPECIFIED = 0
ASCENDING = 1
DESCENDING = 2

View file

@ -0,0 +1,69 @@
config = {
"interfaces": {
"google.firestore.admin.v1.FirestoreAdmin": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
"non_idempotent": [],
"idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 60000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 600000,
}
},
"methods": {
"DeleteIndex": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"UpdateField": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"CreateIndex": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ListIndexes": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default",
},
"GetIndex": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default",
},
"GetField": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default",
},
"ListFields": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default",
},
"ExportDocuments": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ImportDocuments": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
},
}
}
}

View file

@ -0,0 +1,269 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
import google.api_core.operations_v1
from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2_grpc
class FirestoreAdminGrpcTransport(object):
"""gRPC transport class providing stubs for
google.firestore.admin.v1 FirestoreAdmin API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore",
)
def __init__(
self, channel=None, credentials=None, address="firestore.googleapis.com:443"
):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
"The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
"grpc.max_send_message_length": -1,
"grpc.max_receive_message_length": -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
"firestore_admin_stub": firestore_admin_pb2_grpc.FirestoreAdminStub(
channel
),
}
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
# instantiate an LRO client.
self._operations_client = google.api_core.operations_v1.OperationsClient(
channel
)
@classmethod
def create_channel(
cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def delete_index(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.delete_index`.
Deletes a composite index.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].DeleteIndex
@property
def update_field(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.update_field`.
Updates a field configuration. Currently, field updates apply only
to single field index configuration. However, calls to
``FirestoreAdmin.UpdateField`` should provide a field mask to avoid
changing any configuration that the caller isn't aware of. The field
mask should be specified as: ``{ paths: "index_config" }``.
This call returns a ``google.longrunning.Operation`` which may be used
to track the status of the field update. The metadata for the operation
will be the type ``FieldOperationMetadata``.
To configure the default field settings for the database, use the
special ``Field`` with resource name:
``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].UpdateField
@property
def create_index(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.create_index`.
Creates a composite index. This returns a
``google.longrunning.Operation`` which may be used to track the status
of the creation. The metadata for the operation will be the type
``IndexOperationMetadata``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].CreateIndex
@property
def list_indexes(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.list_indexes`.
Lists composite indexes.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].ListIndexes
@property
def get_index(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.get_index`.
Gets a composite index.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].GetIndex
@property
def get_field(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.get_field`.
Gets the metadata and configuration for a Field.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].GetField
@property
def list_fields(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.list_fields`.
Lists the field configuration and metadata for this database.
Currently, ``FirestoreAdmin.ListFields`` only supports listing fields
that have been explicitly overridden. To issue this query, call
``FirestoreAdmin.ListFields`` with the filter set to
``indexConfig.usesAncestorConfig:false``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].ListFields
@property
def export_documents(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.export_documents`.
Exports a copy of all or a subset of documents from Google Cloud Firestore
to another storage system, such as Google Cloud Storage. Recent updates to
documents may not be reflected in the export. The export occurs in the
background and its progress can be monitored and managed via the
Operation resource that is created. The output of an export may only be
used once the associated operation is done. If an export operation is
cancelled before completion it may leave partial data behind in Google
Cloud Storage.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].ExportDocuments
@property
def import_documents(self):
"""Return the gRPC stub for :meth:`FirestoreAdminClient.import_documents`.
Imports documents into Google Cloud Firestore. Existing documents with the
same name are overwritten. The import occurs in the background and its
progress can be monitored and managed via the Operation resource that is
created. If an ImportDocuments operation is cancelled, it is possible
that a subset of the data has already been imported to Cloud Firestore.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_admin_stub"].ImportDocuments

View file

@ -0,0 +1,100 @@
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
syntax = "proto3";
package google.firestore.admin.v1;
import "google/api/resource.proto";
import "google/firestore/admin/v1/index.proto";
import "google/api/annotations.proto";
option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
option java_multiple_files = true;
option java_outer_classname = "FieldProto";
option java_package = "com.google.firestore.admin.v1";
option objc_class_prefix = "GCFS";
option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
option ruby_package = "Google::Cloud::Firestore::Admin::V1";
// Represents a single field in the database.
//
// Fields are grouped by their "Collection Group", which represent all
// collections in the database with the same id.
message Field {
option (google.api.resource) = {
type: "firestore.googleapis.com/Field"
pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}"
};
// The index configuration for this field.
message IndexConfig {
// The indexes supported for this field.
repeated Index indexes = 1;
// Output only. When true, the `Field`'s index configuration is set from the
// configuration specified by the `ancestor_field`.
// When false, the `Field`'s index configuration is defined explicitly.
bool uses_ancestor_config = 2;
// Output only. Specifies the resource name of the `Field` from which this field's
// index configuration is set (when `uses_ancestor_config` is true),
// or from which it *would* be set if this field had no index configuration
// (when `uses_ancestor_config` is false).
string ancestor_field = 3;
// Output only
// When true, the `Field`'s index configuration is in the process of being
// reverted. Once complete, the index config will transition to the same
// state as the field specified by `ancestor_field`, at which point
// `uses_ancestor_config` will be `true` and `reverting` will be `false`.
bool reverting = 4;
}
// A field name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
//
// A field path may be a simple field name, e.g. `address` or a path to fields
// within map_value , e.g. `address.city`,
// or a special field path. The only valid special field is `*`, which
// represents any field.
//
// Field paths may be quoted using ` (backtick). The only character that needs
// to be escaped within a quoted field path is the backtick character itself,
// escaped using a backslash. Special characters in field paths that
// must be quoted include: `*`, `.`,
// ``` (backtick), `[`, `]`, as well as any ascii symbolic characters.
//
// Examples:
// (Note: Comments here are written in markdown syntax, so there is an
// additional layer of backticks to represent a code block)
// `\`address.city\`` represents a field named `address.city`, not the map key
// `city` in the field `address`.
// `\`*\`` represents a field named `*`, not any field.
//
// A special `Field` contains the default indexing settings for all fields.
// This field's resource name is:
// `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`
// Indexes defined on this `Field` will be applied to all fields which do not
// have their own `Field` index configuration.
string name = 1;
// The index configuration for this field. If unset, field indexing will
// revert to the configuration defined by the `ancestor_field`. To
// explicitly remove all indexes for this field, specify an index config
// with an empty list of indexes.
IndexConfig index_config = 2;
}

View file

@ -0,0 +1,285 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/firestore_admin_v1/proto/field.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.cloud.firestore_admin_v1.proto import (
index_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2,
)
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/firestore_admin_v1/proto/field.proto",
package="google.firestore.admin.v1",
syntax="proto3",
serialized_options=b"\n\035com.google.firestore.admin.v1B\nFieldProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n1google/cloud/firestore_admin_v1/proto/field.proto\x12\x19google.firestore.admin.v1\x1a\x19google/api/resource.proto\x1a\x31google/cloud/firestore_admin_v1/proto/index.proto\x1a\x1cgoogle/api/annotations.proto"\xe0\x02\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x42\n\x0cindex_config\x18\x02 \x01(\x0b\x32,.google.firestore.admin.v1.Field.IndexConfig\x1a\x89\x01\n\x0bIndexConfig\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x1c\n\x14uses_ancestor_config\x18\x02 \x01(\x08\x12\x16\n\x0e\x61ncestor_field\x18\x03 \x01(\t\x12\x11\n\treverting\x18\x04 \x01(\x08:y\xea\x41v\n\x1e\x66irestore.googleapis.com/Field\x12Tprojects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}B\xde\x01\n\x1d\x63om.google.firestore.admin.v1B\nFieldProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
dependencies=[
google_dot_api_dot_resource__pb2.DESCRIPTOR,
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
],
)
_FIELD_INDEXCONFIG = _descriptor.Descriptor(
name="IndexConfig",
full_name="google.firestore.admin.v1.Field.IndexConfig",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="indexes",
full_name="google.firestore.admin.v1.Field.IndexConfig.indexes",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="uses_ancestor_config",
full_name="google.firestore.admin.v1.Field.IndexConfig.uses_ancestor_config",
index=1,
number=2,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="ancestor_field",
full_name="google.firestore.admin.v1.Field.IndexConfig.ancestor_field",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="reverting",
full_name="google.firestore.admin.v1.Field.IndexConfig.reverting",
index=3,
number=4,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=281,
serialized_end=418,
)
_FIELD = _descriptor.Descriptor(
name="Field",
full_name="google.firestore.admin.v1.Field",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.firestore.admin.v1.Field.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="index_config",
full_name="google.firestore.admin.v1.Field.index_config",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[_FIELD_INDEXCONFIG,],
enum_types=[],
serialized_options=b"\352Av\n\036firestore.googleapis.com/Field\022Tprojects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}",
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=189,
serialized_end=541,
)
_FIELD_INDEXCONFIG.fields_by_name[
"indexes"
].message_type = (
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2._INDEX
)
_FIELD_INDEXCONFIG.containing_type = _FIELD
_FIELD.fields_by_name["index_config"].message_type = _FIELD_INDEXCONFIG
DESCRIPTOR.message_types_by_name["Field"] = _FIELD
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Field = _reflection.GeneratedProtocolMessageType(
"Field",
(_message.Message,),
{
"IndexConfig": _reflection.GeneratedProtocolMessageType(
"IndexConfig",
(_message.Message,),
{
"DESCRIPTOR": _FIELD_INDEXCONFIG,
"__module__": "google.cloud.firestore_admin_v1.proto.field_pb2",
"__doc__": """The index configuration for this field.
Attributes:
indexes:
The indexes supported for this field.
uses_ancestor_config:
Output only. When true, the ``Field``\ s index configuration
is set from the configuration specified by the
``ancestor_field``. When false, the ``Field``\ s index
configuration is defined explicitly.
ancestor_field:
Output only. Specifies the resource name of the ``Field`` from
which this fields index configuration is set (when
``uses_ancestor_config`` is true), or from which it *would* be
set if this field had no index configuration (when
``uses_ancestor_config`` is false).
reverting:
Output only When true, the ``Field``\ s index configuration
is in the process of being reverted. Once complete, the index
config will transition to the same state as the field
specified by ``ancestor_field``, at which point
``uses_ancestor_config`` will be ``true`` and ``reverting``
will be ``false``.
""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Field.IndexConfig)
},
),
"DESCRIPTOR": _FIELD,
"__module__": "google.cloud.firestore_admin_v1.proto.field_pb2",
"__doc__": """Represents a single field in the database. Fields are grouped by
their Collection Group, which represent all collections in the
database with the same id.
Attributes:
name:
A field name of the form ``projects/{project_id}/databases/{da
tabase_id}/collectionGroups/{collection_id}/fields/{field_path
}`` A field path may be a simple field name, e.g. ``address``
or a path to fields within map_value , e.g. ``address.city``,
or a special field path. The only valid special field is
``*``, which represents any field. Field paths may be quoted
using ``(backtick). The only character that needs to be
escaped within a quoted field path is the backtick character
itself, escaped using a backslash. Special characters in field
paths that must be quoted include:``\ \*\ ``,``.\ :literal:`,
``` (backtick),`\ [``,``]`, as well as any ascii symbolic
characters. Examples: (Note: Comments here are written in
markdown syntax, so there is an additional layer of backticks
to represent a code block) ``\``\ address.city\`\ ``represents
a field named``\ address.city\ ``, not the map key``\ city\
``in the field``\ address\ ``.``\ \`\ *\`\ ``represents a
field named``*\ \`, not any field. A special ``Field``
contains the default indexing settings for all fields. This
fields resource name is: ``projects/{project_id}/databases/{d
atabase_id}/collectionGroups/__default__/fields/*`` Indexes
defined on this ``Field`` will be applied to all fields which
do not have their own ``Field`` index configuration.
index_config:
The index configuration for this field. If unset, field
indexing will revert to the configuration defined by the
``ancestor_field``. To explicitly remove all indexes for this
field, specify an index config with an empty list of indexes.
""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Field)
},
)
_sym_db.RegisterMessage(Field)
_sym_db.RegisterMessage(Field.IndexConfig)
DESCRIPTOR._options = None
_FIELD._options = None
# @@protoc_insertion_point(module_scope)

View file

@ -0,0 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

View file

@ -0,0 +1,355 @@
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
syntax = "proto3";
package google.firestore.admin.v1;
import "google/api/annotations.proto";
import "google/api/client.proto";
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
import "google/firestore/admin/v1/field.proto";
import "google/firestore/admin/v1/index.proto";
import "google/longrunning/operations.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/field_mask.proto";
option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
option java_multiple_files = true;
option java_outer_classname = "FirestoreAdminProto";
option java_package = "com.google.firestore.admin.v1";
option objc_class_prefix = "GCFS";
option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
option ruby_package = "Google::Cloud::Firestore::Admin::V1";
option (google.api.resource_definition) = {
type: "firestore.googleapis.com/Database"
pattern: "projects/{project}/databases/{database}"
};
option (google.api.resource_definition) = {
type: "firestore.googleapis.com/CollectionGroup"
pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}"
};
// Operations are created by service `FirestoreAdmin`, but are accessed via
// service `google.longrunning.Operations`.
service FirestoreAdmin {
option (google.api.default_host) = "firestore.googleapis.com";
option (google.api.oauth_scopes) =
"https://www.googleapis.com/auth/cloud-platform,"
"https://www.googleapis.com/auth/datastore";
// Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation]
// which may be used to track the status of the creation. The metadata for
// the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
rpc CreateIndex(CreateIndexRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes"
body: "index"
};
option (google.api.method_signature) = "parent,index";
option (google.longrunning.operation_info) = {
response_type: "Index"
metadata_type: "IndexOperationMetadata"
};
}
// Lists composite indexes.
rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes"
};
option (google.api.method_signature) = "parent";
}
// Gets a composite index.
rpc GetIndex(GetIndexRequest) returns (Index) {
option (google.api.http) = {
get: "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}"
};
option (google.api.method_signature) = "name";
}
// Deletes a composite index.
rpc DeleteIndex(DeleteIndexRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}"
};
option (google.api.method_signature) = "name";
}
// Gets the metadata and configuration for a Field.
rpc GetField(GetFieldRequest) returns (Field) {
option (google.api.http) = {
get: "/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}"
};
option (google.api.method_signature) = "name";
}
// Updates a field configuration. Currently, field updates apply only to
// single field index configuration. However, calls to
// [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid
// changing any configuration that the caller isn't aware of. The field mask
// should be specified as: `{ paths: "index_config" }`.
//
// This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to
// track the status of the field update. The metadata for
// the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
//
// To configure the default field settings for the database, use
// the special `Field` with resource name:
// `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`.
rpc UpdateField(UpdateFieldRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
patch: "/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}"
body: "field"
};
option (google.api.method_signature) = "field";
option (google.longrunning.operation_info) = {
response_type: "Field"
metadata_type: "FieldOperationMetadata"
};
}
// Lists the field configuration and metadata for this database.
//
// Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
// that have been explicitly overridden. To issue this query, call
// [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
// `indexConfig.usesAncestorConfig:false`.
rpc ListFields(ListFieldsRequest) returns (ListFieldsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields"
};
option (google.api.method_signature) = "parent";
}
// Exports a copy of all or a subset of documents from Google Cloud Firestore
// to another storage system, such as Google Cloud Storage. Recent updates to
// documents may not be reflected in the export. The export occurs in the
// background and its progress can be monitored and managed via the
// Operation resource that is created. The output of an export may only be
// used once the associated operation is done. If an export operation is
// cancelled before completion it may leave partial data behind in Google
// Cloud Storage.
rpc ExportDocuments(ExportDocumentsRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/{name=projects/*/databases/*}:exportDocuments"
body: "*"
};
option (google.api.method_signature) = "name";
option (google.longrunning.operation_info) = {
response_type: "ExportDocumentsResponse"
metadata_type: "ExportDocumentsMetadata"
};
}
// Imports documents into Google Cloud Firestore. Existing documents with the
// same name are overwritten. The import occurs in the background and its
// progress can be monitored and managed via the Operation resource that is
// created. If an ImportDocuments operation is cancelled, it is possible
// that a subset of the data has already been imported to Cloud Firestore.
rpc ImportDocuments(ImportDocumentsRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/{name=projects/*/databases/*}:importDocuments"
body: "*"
};
option (google.api.method_signature) = "name";
option (google.longrunning.operation_info) = {
response_type: "google.protobuf.Empty"
metadata_type: "ImportDocumentsMetadata"
};
}
}
// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
message CreateIndexRequest {
// Required. A parent name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/CollectionGroup"
}
];
// Required. The composite index to create.
Index index = 2 [(google.api.field_behavior) = REQUIRED];
}
// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
message ListIndexesRequest {
// Required. A parent name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/CollectionGroup"
}
];
// The filter to apply to list results.
string filter = 2;
// The number of results to return.
int32 page_size = 3;
// A page token, returned from a previous call to
// [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], that may be used to get the next
// page of results.
string page_token = 4;
}
// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
message ListIndexesResponse {
// The requested indexes.
repeated Index indexes = 1;
// A page token that may be used to request another page of results. If blank,
// this is the last page.
string next_page_token = 2;
}
// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
message GetIndexRequest {
// Required. A name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/Index"
}
];
}
// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
message DeleteIndexRequest {
// Required. A name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/Index"
}
];
}
// The request for [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
message UpdateFieldRequest {
// Required. The field to be updated.
Field field = 1 [(google.api.field_behavior) = REQUIRED];
// A mask, relative to the field. If specified, only configuration specified
// by this field_mask will be updated in the field.
google.protobuf.FieldMask update_mask = 2;
}
// The request for [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
message GetFieldRequest {
// Required. A name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/Field"
}
];
}
// The request for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
message ListFieldsRequest {
// Required. A parent name of the form
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/CollectionGroup"
}
];
// The filter to apply to list results. Currently,
// [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
// that have been explicitly overridden. To issue this query, call
// [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
// `indexConfig.usesAncestorConfig:false`.
string filter = 2;
// The number of results to return.
int32 page_size = 3;
// A page token, returned from a previous call to
// [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], that may be used to get the next
// page of results.
string page_token = 4;
}
// The response for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
message ListFieldsResponse {
// The requested fields.
repeated Field fields = 1;
// A page token that may be used to request another page of results. If blank,
// this is the last page.
string next_page_token = 2;
}
// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
message ExportDocumentsRequest {
// Required. Database to export. Should be of the form:
// `projects/{project_id}/databases/{database_id}`.
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/Database"
}
];
// Which collection ids to export. Unspecified means all collections.
repeated string collection_ids = 2;
// The output URI. Currently only supports Google Cloud Storage URIs of the
// form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name
// of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional
// Google Cloud Storage namespace path. When
// choosing a name, be sure to consider Google Cloud Storage naming
// guidelines: https://cloud.google.com/storage/docs/naming.
// If the URI is a bucket (without a namespace path), a prefix will be
// generated based on the start time.
string output_uri_prefix = 3;
}
// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
message ImportDocumentsRequest {
// Required. Database to import into. Should be of the form:
// `projects/{project_id}/databases/{database_id}`.
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "firestore.googleapis.com/Database"
}
];
// Which collection ids to import. Unspecified means all collections included
// in the import.
repeated string collection_ids = 2;
// Location of the exported files.
// This must match the output_uri_prefix of an ExportDocumentsResponse from
// an export that has completed successfully.
// See:
// [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix].
string input_uri_prefix = 3;
}

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,478 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.firestore_admin_v1.proto import (
field_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2,
)
from google.cloud.firestore_admin_v1.proto import (
firestore_admin_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2,
)
from google.cloud.firestore_admin_v1.proto import (
index_pb2 as google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2,
)
from google.longrunning import (
operations_pb2 as google_dot_longrunning_dot_operations__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class FirestoreAdminStub(object):
"""Operations are created by service `FirestoreAdmin`, but are accessed via
service `google.longrunning.Operations`.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateIndex = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.ListIndexes = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
)
self.GetIndex = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.Index.FromString,
)
self.DeleteIndex = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetField = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/GetField",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.Field.FromString,
)
self.UpdateField = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.ListFields = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/ListFields",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.FromString,
)
self.ExportDocuments = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.ImportDocuments = channel.unary_unary(
"/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
request_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
class FirestoreAdminServicer(object):
"""Operations are created by service `FirestoreAdmin`, but are accessed via
service `google.longrunning.Operations`.
"""
def CreateIndex(self, request, context):
"""Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation]
which may be used to track the status of the creation. The metadata for
the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListIndexes(self, request, context):
"""Lists composite indexes.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetIndex(self, request, context):
"""Gets a composite index.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteIndex(self, request, context):
"""Deletes a composite index.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetField(self, request, context):
"""Gets the metadata and configuration for a Field.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateField(self, request, context):
"""Updates a field configuration. Currently, field updates apply only to
single field index configuration. However, calls to
[FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid
changing any configuration that the caller isn't aware of. The field mask
should be specified as: `{ paths: "index_config" }`.
This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to
track the status of the field update. The metadata for
the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
To configure the default field settings for the database, use
the special `Field` with resource name:
`projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListFields(self, request, context):
"""Lists the field configuration and metadata for this database.
Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
that have been explicitly overridden. To issue this query, call
[FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
`indexConfig.usesAncestorConfig:false`.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ExportDocuments(self, request, context):
"""Exports a copy of all or a subset of documents from Google Cloud Firestore
to another storage system, such as Google Cloud Storage. Recent updates to
documents may not be reflected in the export. The export occurs in the
background and its progress can be monitored and managed via the
Operation resource that is created. The output of an export may only be
used once the associated operation is done. If an export operation is
cancelled before completion it may leave partial data behind in Google
Cloud Storage.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ImportDocuments(self, request, context):
"""Imports documents into Google Cloud Firestore. Existing documents with the
same name are overwritten. The import occurs in the background and its
progress can be monitored and managed via the Operation resource that is
created. If an ImportDocuments operation is cancelled, it is possible
that a subset of the data has already been imported to Cloud Firestore.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_FirestoreAdminServicer_to_server(servicer, server):
rpc_method_handlers = {
"CreateIndex": grpc.unary_unary_rpc_method_handler(
servicer.CreateIndex,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"ListIndexes": grpc.unary_unary_rpc_method_handler(
servicer.ListIndexes,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.FromString,
response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString,
),
"GetIndex": grpc.unary_unary_rpc_method_handler(
servicer.GetIndex,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.FromString,
response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString,
),
"DeleteIndex": grpc.unary_unary_rpc_method_handler(
servicer.DeleteIndex,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"GetField": grpc.unary_unary_rpc_method_handler(
servicer.GetField,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.FromString,
response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.Field.SerializeToString,
),
"UpdateField": grpc.unary_unary_rpc_method_handler(
servicer.UpdateField,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"ListFields": grpc.unary_unary_rpc_method_handler(
servicer.ListFields,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.FromString,
response_serializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.SerializeToString,
),
"ExportDocuments": grpc.unary_unary_rpc_method_handler(
servicer.ExportDocuments,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"ImportDocuments": grpc.unary_unary_rpc_method_handler(
servicer.ImportDocuments,
request_deserializer=google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.firestore.admin.v1.FirestoreAdmin", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class FirestoreAdmin(object):
"""Operations are created by service `FirestoreAdmin`, but are accessed via
service `google.longrunning.Operations`.
"""
@staticmethod
def CreateIndex(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ListIndexes(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetIndex(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_index__pb2.Index.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def DeleteIndex(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetField(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/GetField",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.SerializeToString,
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_field__pb2.Field.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def UpdateField(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ListFields(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/ListFields",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.SerializeToString,
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ExportDocuments(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ImportDocuments(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
google_dot_cloud_dot_firestore__admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)

View file

@ -0,0 +1,158 @@
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
syntax = "proto3";
package google.firestore.admin.v1;
import "google/api/resource.proto";
import "google/api/annotations.proto";
option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
option java_multiple_files = true;
option java_outer_classname = "IndexProto";
option java_package = "com.google.firestore.admin.v1";
option objc_class_prefix = "GCFS";
option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
option ruby_package = "Google::Cloud::Firestore::Admin::V1";
// Cloud Firestore indexes enable simple and complex queries against
// documents in a database.
message Index {
option (google.api.resource) = {
type: "firestore.googleapis.com/Index"
pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}"
};
// A field in an index.
// The field_path describes which field is indexed, the value_mode describes
// how the field value is indexed.
message IndexField {
// The supported orderings.
enum Order {
// The ordering is unspecified. Not a valid option.
ORDER_UNSPECIFIED = 0;
// The field is ordered by ascending field value.
ASCENDING = 1;
// The field is ordered by descending field value.
DESCENDING = 2;
}
// The supported array value configurations.
enum ArrayConfig {
// The index does not support additional array queries.
ARRAY_CONFIG_UNSPECIFIED = 0;
// The index supports array containment queries.
CONTAINS = 1;
}
// Can be __name__.
// For single field indexes, this must match the name of the field or may
// be omitted.
string field_path = 1;
// How the field value is indexed.
oneof value_mode {
// Indicates that this field supports ordering by the specified order or
// comparing using =, <, <=, >, >=.
Order order = 2;
// Indicates that this field supports operations on `array_value`s.
ArrayConfig array_config = 3;
}
}
// Query Scope defines the scope at which a query is run. This is specified on
// a StructuredQuery's `from` field.
enum QueryScope {
// The query scope is unspecified. Not a valid option.
QUERY_SCOPE_UNSPECIFIED = 0;
// Indexes with a collection query scope specified allow queries
// against a collection that is the child of a specific document, specified
// at query time, and that has the collection id specified by the index.
COLLECTION = 1;
// Indexes with a collection group query scope specified allow queries
// against all collections that has the collection id specified by the
// index.
COLLECTION_GROUP = 2;
}
// The state of an index. During index creation, an index will be in the
// `CREATING` state. If the index is created successfully, it will transition
// to the `READY` state. If the index creation encounters a problem, the index
// will transition to the `NEEDS_REPAIR` state.
enum State {
// The state is unspecified.
STATE_UNSPECIFIED = 0;
// The index is being created.
// There is an active long-running operation for the index.
// The index is updated when writing a document.
// Some index data may exist.
CREATING = 1;
// The index is ready to be used.
// The index is updated when writing a document.
// The index is fully populated from all stored documents it applies to.
READY = 2;
// The index was being created, but something went wrong.
// There is no active long-running operation for the index,
// and the most recently finished long-running operation failed.
// The index is not updated when writing a document.
// Some index data may exist.
// Use the google.longrunning.Operations API to determine why the operation
// that last attempted to create this index failed, then re-create the
// index.
NEEDS_REPAIR = 3;
}
// Output only. A server defined name for this index.
// The form of this name for composite indexes will be:
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}`
// For single field indexes, this field will be empty.
string name = 1;
// Indexes with a collection query scope specified allow queries
// against a collection that is the child of a specific document, specified at
// query time, and that has the same collection id.
//
// Indexes with a collection group query scope specified allow queries against
// all collections descended from a specific document, specified at query
// time, and that have the same collection id as this index.
QueryScope query_scope = 2;
// The fields supported by this index.
//
// For composite indexes, this is always 2 or more fields.
// The last field entry is always for the field path `__name__`. If, on
// creation, `__name__` was not specified as the last field, it will be added
// automatically with the same direction as that of the last field defined. If
// the final field in a composite index is not directional, the `__name__`
// will be ordered ASCENDING (unless explicitly specified).
//
// For single field indexes, this will always be exactly one entry with a
// field path equal to the field path of the associated field.
repeated IndexField fields = 3;
// Output only. The serving state of the index.
State state = 4;
}

View file

@ -0,0 +1,473 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/firestore_admin_v1/proto/index.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/firestore_admin_v1/proto/index.proto",
package="google.firestore.admin.v1",
syntax="proto3",
serialized_options=b"\n\035com.google.firestore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n1google/cloud/firestore_admin_v1/proto/index.proto\x12\x19google.firestore.admin.v1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xa3\x06\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x0bquery_scope\x18\x02 \x01(\x0e\x32+.google.firestore.admin.v1.Index.QueryScope\x12;\n\x06\x66ields\x18\x03 \x03(\x0b\x32+.google.firestore.admin.v1.Index.IndexField\x12\x35\n\x05state\x18\x04 \x01(\x0e\x32&.google.firestore.admin.v1.Index.State\x1a\xbd\x02\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x42\n\x05order\x18\x02 \x01(\x0e\x32\x31.google.firestore.admin.v1.Index.IndexField.OrderH\x00\x12O\n\x0c\x61rray_config\x18\x03 \x01(\x0e\x32\x37.google.firestore.admin.v1.Index.IndexField.ArrayConfigH\x00"=\n\x05Order\x12\x15\n\x11ORDER_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"9\n\x0b\x41rrayConfig\x12\x1c\n\x18\x41RRAY_CONFIG_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43ONTAINS\x10\x01\x42\x0c\n\nvalue_mode"O\n\nQueryScope\x12\x1b\n\x17QUERY_SCOPE_UNSPECIFIED\x10\x00\x12\x0e\n\nCOLLECTION\x10\x01\x12\x14\n\x10\x43OLLECTION_GROUP\x10\x02"I\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x10\n\x0cNEEDS_REPAIR\x10\x03:z\xea\x41w\n\x1e\x66irestore.googleapis.com/Index\x12Uprojects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}B\xde\x01\n\x1d\x63om.google.firestore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
dependencies=[
google_dot_api_dot_resource__pb2.DESCRIPTOR,
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
],
)
_INDEX_INDEXFIELD_ORDER = _descriptor.EnumDescriptor(
name="Order",
full_name="google.firestore.admin.v1.Index.IndexField.Order",
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="ORDER_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="ASCENDING",
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="DESCENDING",
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
serialized_options=None,
serialized_start=527,
serialized_end=588,
)
_sym_db.RegisterEnumDescriptor(_INDEX_INDEXFIELD_ORDER)
_INDEX_INDEXFIELD_ARRAYCONFIG = _descriptor.EnumDescriptor(
name="ArrayConfig",
full_name="google.firestore.admin.v1.Index.IndexField.ArrayConfig",
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="ARRAY_CONFIG_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="CONTAINS",
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
serialized_options=None,
serialized_start=590,
serialized_end=647,
)
_sym_db.RegisterEnumDescriptor(_INDEX_INDEXFIELD_ARRAYCONFIG)
_INDEX_QUERYSCOPE = _descriptor.EnumDescriptor(
name="QueryScope",
full_name="google.firestore.admin.v1.Index.QueryScope",
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="QUERY_SCOPE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="COLLECTION",
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="COLLECTION_GROUP",
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
serialized_options=None,
serialized_start=663,
serialized_end=742,
)
_sym_db.RegisterEnumDescriptor(_INDEX_QUERYSCOPE)
_INDEX_STATE = _descriptor.EnumDescriptor(
name="State",
full_name="google.firestore.admin.v1.Index.State",
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="STATE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="CREATING",
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="READY",
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="NEEDS_REPAIR",
index=3,
number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
serialized_options=None,
serialized_start=744,
serialized_end=817,
)
_sym_db.RegisterEnumDescriptor(_INDEX_STATE)
_INDEX_INDEXFIELD = _descriptor.Descriptor(
name="IndexField",
full_name="google.firestore.admin.v1.Index.IndexField",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="field_path",
full_name="google.firestore.admin.v1.Index.IndexField.field_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="order",
full_name="google.firestore.admin.v1.Index.IndexField.order",
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="array_config",
full_name="google.firestore.admin.v1.Index.IndexField.array_config",
index=2,
number=3,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[_INDEX_INDEXFIELD_ORDER, _INDEX_INDEXFIELD_ARRAYCONFIG,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="value_mode",
full_name="google.firestore.admin.v1.Index.IndexField.value_mode",
index=0,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
),
],
serialized_start=344,
serialized_end=661,
)
_INDEX = _descriptor.Descriptor(
name="Index",
full_name="google.firestore.admin.v1.Index",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.firestore.admin.v1.Index.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="query_scope",
full_name="google.firestore.admin.v1.Index.query_scope",
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="fields",
full_name="google.firestore.admin.v1.Index.fields",
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="state",
full_name="google.firestore.admin.v1.Index.state",
index=3,
number=4,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[_INDEX_INDEXFIELD,],
enum_types=[_INDEX_QUERYSCOPE, _INDEX_STATE,],
serialized_options=b"\352Aw\n\036firestore.googleapis.com/Index\022Uprojects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}",
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=138,
serialized_end=941,
)
_INDEX_INDEXFIELD.fields_by_name["order"].enum_type = _INDEX_INDEXFIELD_ORDER
_INDEX_INDEXFIELD.fields_by_name[
"array_config"
].enum_type = _INDEX_INDEXFIELD_ARRAYCONFIG
_INDEX_INDEXFIELD.containing_type = _INDEX
_INDEX_INDEXFIELD_ORDER.containing_type = _INDEX_INDEXFIELD
_INDEX_INDEXFIELD_ARRAYCONFIG.containing_type = _INDEX_INDEXFIELD
_INDEX_INDEXFIELD.oneofs_by_name["value_mode"].fields.append(
_INDEX_INDEXFIELD.fields_by_name["order"]
)
_INDEX_INDEXFIELD.fields_by_name[
"order"
].containing_oneof = _INDEX_INDEXFIELD.oneofs_by_name["value_mode"]
_INDEX_INDEXFIELD.oneofs_by_name["value_mode"].fields.append(
_INDEX_INDEXFIELD.fields_by_name["array_config"]
)
_INDEX_INDEXFIELD.fields_by_name[
"array_config"
].containing_oneof = _INDEX_INDEXFIELD.oneofs_by_name["value_mode"]
_INDEX.fields_by_name["query_scope"].enum_type = _INDEX_QUERYSCOPE
_INDEX.fields_by_name["fields"].message_type = _INDEX_INDEXFIELD
_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE
_INDEX_QUERYSCOPE.containing_type = _INDEX
_INDEX_STATE.containing_type = _INDEX
DESCRIPTOR.message_types_by_name["Index"] = _INDEX
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Index = _reflection.GeneratedProtocolMessageType(
"Index",
(_message.Message,),
{
"IndexField": _reflection.GeneratedProtocolMessageType(
"IndexField",
(_message.Message,),
{
"DESCRIPTOR": _INDEX_INDEXFIELD,
"__module__": "google.cloud.firestore_admin_v1.proto.index_pb2",
"__doc__": """A field in an index. The field_path describes which field is indexed,
the value_mode describes how the field value is indexed.
Attributes:
field_path:
Can be **name**. For single field indexes, this must match the
name of the field or may be omitted.
value_mode:
How the field value is indexed.
order:
Indicates that this field supports ordering by the specified
order or comparing using =, <, <=, >, >=.
array_config:
Indicates that this field supports operations on
``array_value``\ s.
""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Index.IndexField)
},
),
"DESCRIPTOR": _INDEX,
"__module__": "google.cloud.firestore_admin_v1.proto.index_pb2",
"__doc__": """Cloud Firestore indexes enable simple and complex queries against
documents in a database.
Attributes:
name:
Output only. A server defined name for this index. The form of
this name for composite indexes will be: ``projects/{project_i
d}/databases/{database_id}/collectionGroups/{collection_id}/in
dexes/{composite_index_id}`` For single field indexes, this
field will be empty.
query_scope:
Indexes with a collection query scope specified allow queries
against a collection that is the child of a specific document,
specified at query time, and that has the same collection id.
Indexes with a collection group query scope specified allow
queries against all collections descended from a specific
document, specified at query time, and that have the same
collection id as this index.
fields:
The fields supported by this index. For composite indexes,
this is always 2 or more fields. The last field entry is
always for the field path ``__name__``. If, on creation,
``__name__`` was not specified as the last field, it will be
added automatically with the same direction as that of the
last field defined. If the final field in a composite index is
not directional, the ``__name__`` will be ordered ASCENDING
(unless explicitly specified). For single field indexes, this
will always be exactly one entry with a field path equal to
the field path of the associated field.
state:
Output only. The serving state of the index.
""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Index)
},
)
_sym_db.RegisterMessage(Index)
_sym_db.RegisterMessage(Index.IndexField)
DESCRIPTOR._options = None
_INDEX._options = None
# @@protoc_insertion_point(module_scope)

View file

@ -0,0 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

View file

@ -0,0 +1,35 @@
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
syntax = "proto3";
package google.firestore.admin.v1;
import "google/type/latlng.proto";
import "google/api/annotations.proto";
option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
option java_multiple_files = true;
option java_outer_classname = "LocationProto";
option java_package = "com.google.firestore.admin.v1";
option objc_class_prefix = "GCFS";
option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
option ruby_package = "Google::Cloud::Firestore::Admin::V1";
// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
message LocationMetadata {
}

View file

@ -0,0 +1,71 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/firestore_admin_v1/proto/location.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/firestore_admin_v1/proto/location.proto",
package="google.firestore.admin.v1",
syntax="proto3",
serialized_options=b"\n\035com.google.firestore.admin.v1B\rLocationProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352\002#Google::Cloud::Firestore::Admin::V1",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n4google/cloud/firestore_admin_v1/proto/location.proto\x12\x19google.firestore.admin.v1\x1a\x18google/type/latlng.proto\x1a\x1cgoogle/api/annotations.proto"\x12\n\x10LocationMetadataB\xe1\x01\n\x1d\x63om.google.firestore.admin.v1B\rLocationProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x02#Google::Cloud::Firestore::Admin::V1b\x06proto3',
dependencies=[
google_dot_type_dot_latlng__pb2.DESCRIPTOR,
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
],
)
_LOCATIONMETADATA = _descriptor.Descriptor(
name="LocationMetadata",
full_name="google.firestore.admin.v1.LocationMetadata",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=139,
serialized_end=157,
)
DESCRIPTOR.message_types_by_name["LocationMetadata"] = _LOCATIONMETADATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LocationMetadata = _reflection.GeneratedProtocolMessageType(
"LocationMetadata",
(_message.Message,),
{
"DESCRIPTOR": _LOCATIONMETADATA,
"__module__": "google.cloud.firestore_admin_v1.proto.location_pb2",
"__doc__": """The metadata message for [google.cloud.location.Location.metadata][goo
gle.cloud.location.Location.metadata].""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1.LocationMetadata)
},
)
_sym_db.RegisterMessage(LocationMetadata)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

View file

@ -0,0 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

View file

@ -0,0 +1,204 @@
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
syntax = "proto3";
package google.firestore.admin.v1;
import "google/firestore/admin/v1/index.proto";
import "google/protobuf/timestamp.proto";
import "google/api/annotations.proto";
option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
option java_multiple_files = true;
option java_outer_classname = "OperationProto";
option java_package = "com.google.firestore.admin.v1";
option objc_class_prefix = "GCFS";
option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
option ruby_package = "Google::Cloud::Firestore::Admin::V1";
// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
message IndexOperationMetadata {
// The time this operation started.
google.protobuf.Timestamp start_time = 1;
// The time this operation completed. Will be unset if operation still in
// progress.
google.protobuf.Timestamp end_time = 2;
// The index resource that this operation is acting on. For example:
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
string index = 3;
// The state of the operation.
OperationState state = 4;
// The progress, in documents, of this operation.
Progress progress_documents = 5;
// The progress, in bytes, of this operation.
Progress progress_bytes = 6;
}
// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
// [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
message FieldOperationMetadata {
// Information about an index configuration change.
message IndexConfigDelta {
// Specifies how the index is changing.
enum ChangeType {
// The type of change is not specified or known.
CHANGE_TYPE_UNSPECIFIED = 0;
// The single field index is being added.
ADD = 1;
// The single field index is being removed.
REMOVE = 2;
}
// Specifies how the index is changing.
ChangeType change_type = 1;
// The index being changed.
Index index = 2;
}
// The time this operation started.
google.protobuf.Timestamp start_time = 1;
// The time this operation completed. Will be unset if operation still in
// progress.
google.protobuf.Timestamp end_time = 2;
// The field resource that this operation is acting on. For example:
// `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
string field = 3;
// A list of [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this
// operation.
repeated IndexConfigDelta index_config_deltas = 4;
// The state of the operation.
OperationState state = 5;
// The progress, in documents, of this operation.
Progress progress_documents = 6;
// The progress, in bytes, of this operation.
Progress progress_bytes = 7;
}
// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
message ExportDocumentsMetadata {
// The time this operation started.
google.protobuf.Timestamp start_time = 1;
// The time this operation completed. Will be unset if operation still in
// progress.
google.protobuf.Timestamp end_time = 2;
// The state of the export operation.
OperationState operation_state = 3;
// The progress, in documents, of this operation.
Progress progress_documents = 4;
// The progress, in bytes, of this operation.
Progress progress_bytes = 5;
// Which collection ids are being exported.
repeated string collection_ids = 6;
// Where the entities are being exported to.
string output_uri_prefix = 7;
}
// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
message ImportDocumentsMetadata {
// The time this operation started.
google.protobuf.Timestamp start_time = 1;
// The time this operation completed. Will be unset if operation still in
// progress.
google.protobuf.Timestamp end_time = 2;
// The state of the import operation.
OperationState operation_state = 3;
// The progress, in documents, of this operation.
Progress progress_documents = 4;
// The progress, in bytes, of this operation.
Progress progress_bytes = 5;
// Which collection ids are being imported.
repeated string collection_ids = 6;
// The location of the documents being imported.
string input_uri_prefix = 7;
}
// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
message ExportDocumentsResponse {
// Location of the output files. This can be used to begin an import
// into Cloud Firestore (this project or another project) after the operation
// completes successfully.
string output_uri_prefix = 1;
}
// Describes the progress of the operation.
// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1.Progress]
// is used.
message Progress {
// The amount of work estimated.
int64 estimated_work = 1;
// The amount of work completed.
int64 completed_work = 2;
}
// Describes the state of the operation.
enum OperationState {
// Unspecified.
OPERATION_STATE_UNSPECIFIED = 0;
// Request is being prepared for processing.
INITIALIZING = 1;
// Request is actively being processed.
PROCESSING = 2;
// Request is in the process of being cancelled after user called
// google.longrunning.Operations.CancelOperation on the operation.
CANCELLING = 3;
// Request has been processed and is in its finalization stage.
FINALIZING = 4;
// Request has completed successfully.
SUCCESSFUL = 5;
// Request has finished being processed, but encountered an error.
FAILED = 6;
// Request has finished being cancelled after user called
// google.longrunning.Operations.CancelOperation.
CANCELLED = 7;
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

View file

@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
from google.api_core.protobuf_helpers import get_messages
from google.cloud.firestore_admin_v1.proto import field_pb2
from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2
from google.cloud.firestore_admin_v1.proto import index_pb2
from google.cloud.firestore_admin_v1.proto import location_pb2
from google.cloud.firestore_admin_v1.proto import operation_pb2
from google.longrunning import operations_pb2
from google.protobuf import any_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import timestamp_pb2
from google.rpc import status_pb2
_shared_modules = [
operations_pb2,
any_pb2,
empty_pb2,
field_mask_pb2,
timestamp_pb2,
status_pb2,
]
_local_modules = [
field_pb2,
firestore_admin_pb2,
index_pb2,
location_pb2,
operation_pb2,
]
names = []
for module in _shared_modules: # pragma: NO COVER
for name, message in get_messages(module).items():
setattr(sys.modules[__name__], name, message)
names.append(name)
for module in _local_modules:
for name, message in get_messages(module).items():
message.__module__ = "google.cloud.firestore_admin_v1.types"
setattr(sys.modules[__name__], name, message)
names.append(name)
__all__ = tuple(sorted(names))

View file

@ -0,0 +1,71 @@
# Copyright 2019 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python idiomatic client for Google Cloud Firestore."""
from pkg_resources import get_distribution
__version__ = get_distribution("google-cloud-firestore").version
from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1._helpers import GeoPoint
from google.cloud.firestore_v1._helpers import ExistsOption
from google.cloud.firestore_v1._helpers import LastUpdateOption
from google.cloud.firestore_v1._helpers import ReadAfterWriteError
from google.cloud.firestore_v1._helpers import WriteOption
from google.cloud.firestore_v1.batch import WriteBatch
from google.cloud.firestore_v1.client import Client
from google.cloud.firestore_v1.collection import CollectionReference
from google.cloud.firestore_v1.transforms import ArrayRemove
from google.cloud.firestore_v1.transforms import ArrayUnion
from google.cloud.firestore_v1.transforms import DELETE_FIELD
from google.cloud.firestore_v1.transforms import Increment
from google.cloud.firestore_v1.transforms import Maximum
from google.cloud.firestore_v1.transforms import Minimum
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
from google.cloud.firestore_v1.document import DocumentReference
from google.cloud.firestore_v1.document import DocumentSnapshot
from google.cloud.firestore_v1.gapic import enums
from google.cloud.firestore_v1.query import Query
from google.cloud.firestore_v1.transaction import Transaction
from google.cloud.firestore_v1.transaction import transactional
from google.cloud.firestore_v1.watch import Watch
__all__ = [
"__version__",
"ArrayRemove",
"ArrayUnion",
"Client",
"CollectionReference",
"DELETE_FIELD",
"DocumentReference",
"DocumentSnapshot",
"enums",
"ExistsOption",
"GeoPoint",
"Increment",
"LastUpdateOption",
"Maximum",
"Minimum",
"Query",
"ReadAfterWriteError",
"SERVER_TIMESTAMP",
"Transaction",
"transactional",
"types",
"Watch",
"WriteBatch",
"WriteOption",
]

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,160 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for batch requests to the Google Cloud Firestore API."""
from google.cloud.firestore_v1 import _helpers
class WriteBatch(object):
"""Accumulate write operations to be sent in a batch.
This has the same set of methods for write operations that
:class:`~google.cloud.firestore_v1.document.DocumentReference` does,
e.g. :meth:`~google.cloud.firestore_v1.document.DocumentReference.create`.
Args:
client (:class:`~google.cloud.firestore_v1.client.Client`):
The client that created this batch.
"""
def __init__(self, client):
self._client = client
self._write_pbs = []
self.write_results = None
self.commit_time = None
def _add_write_pbs(self, write_pbs):
"""Add `Write`` protobufs to this transaction.
This method intended to be over-ridden by subclasses.
Args:
write_pbs (List[google.cloud.proto.firestore.v1.\
write_pb2.Write]): A list of write protobufs to be added.
"""
self._write_pbs.extend(write_pbs)
def create(self, reference, document_data):
"""Add a "change" to this batch to create a document.
If the document given by ``reference`` already exists, then this
batch will fail when :meth:`commit`-ed.
Args:
reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
A document reference to be created in this batch.
document_data (dict): Property names and values to use for
creating a document.
"""
write_pbs = _helpers.pbs_for_create(reference._document_path, document_data)
self._add_write_pbs(write_pbs)
def set(self, reference, document_data, merge=False):
"""Add a "change" to replace a document.
See
:meth:`google.cloud.firestore_v1.document.DocumentReference.set` for
more information on how ``option`` determines how the change is
applied.
Args:
reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
A document reference that will have values set in this batch.
document_data (dict):
Property names and values to use for replacing a document.
merge (Optional[bool] or Optional[List<apispec>]):
If True, apply merging instead of overwriting the state
of the document.
"""
if merge is not False:
write_pbs = _helpers.pbs_for_set_with_merge(
reference._document_path, document_data, merge
)
else:
write_pbs = _helpers.pbs_for_set_no_merge(
reference._document_path, document_data
)
self._add_write_pbs(write_pbs)
def update(self, reference, field_updates, option=None):
"""Add a "change" to update a document.
See
:meth:`google.cloud.firestore_v1.document.DocumentReference.update`
for more information on ``field_updates`` and ``option``.
Args:
reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
A document reference that will be updated in this batch.
field_updates (dict):
Field names or paths to update and values to update with.
option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
A write option to make assertions / preconditions on the server
state of the document before applying changes.
"""
if option.__class__.__name__ == "ExistsOption":
raise ValueError("you must not pass an explicit write option to " "update.")
write_pbs = _helpers.pbs_for_update(
reference._document_path, field_updates, option
)
self._add_write_pbs(write_pbs)
def delete(self, reference, option=None):
"""Add a "change" to delete a document.
See
:meth:`google.cloud.firestore_v1.document.DocumentReference.delete`
for more information on how ``option`` determines how the change is
applied.
Args:
reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
A document reference that will be deleted in this batch.
option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
A write option to make assertions / preconditions on the server
state of the document before applying changes.
"""
write_pb = _helpers.pb_for_delete(reference._document_path, option)
self._add_write_pbs([write_pb])
def commit(self):
"""Commit the changes accumulated in this batch.
Returns:
List[:class:`google.cloud.proto.firestore.v1.write_pb2.WriteResult`, ...]:
The write results corresponding to the changes committed, returned
in the same order as the changes were applied to this batch. A
write result contains an ``update_time`` field.
"""
commit_response = self._client._firestore_api.commit(
self._client._database_string,
self._write_pbs,
transaction=None,
metadata=self._client._rpc_metadata,
)
self._write_pbs = []
self.write_results = results = list(commit_response.write_results)
self.commit_time = commit_response.commit_time
return results
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.commit()

View file

@ -0,0 +1,622 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google Cloud Firestore API.
This is the base from which all interactions with the API occur.
In the hierarchy of API concepts
* a :class:`~google.cloud.firestore_v1.client.Client` owns a
:class:`~google.cloud.firestore_v1.collection.CollectionReference`
* a :class:`~google.cloud.firestore_v1.client.Client` owns a
:class:`~google.cloud.firestore_v1.document.DocumentReference`
"""
import os
import google.api_core.client_options
from google.api_core.gapic_v1 import client_info
from google.cloud.client import ClientWithProject
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1 import __version__
from google.cloud.firestore_v1 import query
from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1.batch import WriteBatch
from google.cloud.firestore_v1.collection import CollectionReference
from google.cloud.firestore_v1.document import DocumentReference
from google.cloud.firestore_v1.document import DocumentSnapshot
from google.cloud.firestore_v1.field_path import render_field_path
from google.cloud.firestore_v1.gapic import firestore_client
from google.cloud.firestore_v1.gapic.transports import firestore_grpc_transport
from google.cloud.firestore_v1.transaction import Transaction
DEFAULT_DATABASE = "(default)"
"""str: The default database used in a :class:`~google.cloud.firestore_v1.client.Client`."""
_BAD_OPTION_ERR = (
"Exactly one of ``last_update_time`` or ``exists`` " "must be provided."
)
_BAD_DOC_TEMPLATE = (
"Document {!r} appeared in response but was not present among references"
)
_ACTIVE_TXN = "There is already an active transaction."
_INACTIVE_TXN = "There is no active transaction."
_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__)
_FIRESTORE_EMULATOR_HOST = "FIRESTORE_EMULATOR_HOST"
class Client(ClientWithProject):
"""Client for interacting with Google Cloud Firestore API.
.. note::
Since the Cloud Firestore API requires the gRPC transport, no
``_http`` argument is accepted by this class.
Args:
project (Optional[str]): The project which the client acts on behalf
of. If not passed, falls back to the default inferred
from the environment.
credentials (Optional[~google.auth.credentials.Credentials]): The
OAuth2 Credentials to use for this client. If not passed, falls
back to the default inferred from the environment.
database (Optional[str]): The database name that the client targets.
For now, :attr:`DEFAULT_DATABASE` (the default value) is the
only valid database.
client_info (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
The client info used to send a user-agent string along with API
requests. If ``None``, then default info will be used. Generally,
you only need to set this if you're developing your own library
or partner tool.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
SCOPE = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore",
)
"""The scopes required for authenticating with the Firestore service."""
_firestore_api_internal = None
_database_string_internal = None
_rpc_metadata_internal = None
def __init__(
self,
project=None,
credentials=None,
database=DEFAULT_DATABASE,
client_info=_CLIENT_INFO,
client_options=None,
):
# NOTE: This API has no use for the _http argument, but sending it
# will have no impact since the _http() @property only lazily
# creates a working HTTP object.
super(Client, self).__init__(
project=project,
credentials=credentials,
client_options=client_options,
_http=None,
)
self._client_info = client_info
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
self._client_options = client_options
self._database = database
self._emulator_host = os.getenv(_FIRESTORE_EMULATOR_HOST)
@property
def _firestore_api(self):
"""Lazy-loading getter GAPIC Firestore API.
Returns:
:class:`~google.cloud.gapic.firestore.v1`.firestore_client.FirestoreClient:
<The GAPIC client with the credentials of the current client.
"""
if self._firestore_api_internal is None:
# Use a custom channel.
# We need this in order to set appropriate keepalive options.
if self._emulator_host is not None:
channel = firestore_grpc_transport.firestore_pb2_grpc.grpc.insecure_channel(
self._emulator_host
)
else:
channel = firestore_grpc_transport.FirestoreGrpcTransport.create_channel(
self._target,
credentials=self._credentials,
options={"grpc.keepalive_time_ms": 30000}.items(),
)
self._transport = firestore_grpc_transport.FirestoreGrpcTransport(
address=self._target, channel=channel
)
self._firestore_api_internal = firestore_client.FirestoreClient(
transport=self._transport, client_info=self._client_info
)
return self._firestore_api_internal
@property
def _target(self):
"""Return the target (where the API is).
Returns:
str: The location of the API.
"""
if self._emulator_host is not None:
return self._emulator_host
elif self._client_options and self._client_options.api_endpoint:
return self._client_options.api_endpoint
else:
return firestore_client.FirestoreClient.SERVICE_ADDRESS
@property
def _database_string(self):
"""The database string corresponding to this client's project.
This value is lazy-loaded and cached.
Will be of the form
``projects/{project_id}/databases/{database_id}``
but ``database_id == '(default)'`` for the time being.
Returns:
str: The fully-qualified database string for the current
project. (The default database is also in this string.)
"""
if self._database_string_internal is None:
# NOTE: database_root_path() is a classmethod, so we don't use
# self._firestore_api (it isn't necessary).
db_str = firestore_client.FirestoreClient.database_root_path(
self.project, self._database
)
self._database_string_internal = db_str
return self._database_string_internal
@property
def _rpc_metadata(self):
"""The RPC metadata for this client's associated database.
Returns:
Sequence[Tuple(str, str)]: RPC metadata with resource prefix
for the database associated with this client.
"""
if self._rpc_metadata_internal is None:
self._rpc_metadata_internal = _helpers.metadata_with_prefix(
self._database_string
)
if self._emulator_host is not None:
# The emulator requires additional metadata to be set.
self._rpc_metadata_internal.append(("authorization", "Bearer owner"))
return self._rpc_metadata_internal
def collection(self, *collection_path):
"""Get a reference to a collection.
For a top-level collection:
.. code-block:: python
>>> client.collection('top')
For a sub-collection:
.. code-block:: python
>>> client.collection('mydocs/doc/subcol')
>>> # is the same as
>>> client.collection('mydocs', 'doc', 'subcol')
Sub-collections can be nested deeper in a similar fashion.
Args:
collection_path (Tuple[str, ...]): Can either be
* A single ``/``-delimited path to a collection
* A tuple of collection path segments
Returns:
:class:`~google.cloud.firestore_v1.collection.CollectionReference`:
A reference to a collection in the Firestore database.
"""
if len(collection_path) == 1:
path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
else:
path = collection_path
return CollectionReference(*path, client=self)
def collection_group(self, collection_id):
"""
Creates and returns a new Query that includes all documents in the
database that are contained in a collection or subcollection with the
given collection_id.
.. code-block:: python
>>> query = client.collection_group('mygroup')
@param {string} collectionId Identifies the collections to query over.
Every collection or subcollection with this ID as the last segment of its
path will be included. Cannot contain a slash.
@returns {Query} The created Query.
"""
if "/" in collection_id:
raise ValueError(
"Invalid collection_id "
+ collection_id
+ ". Collection IDs must not contain '/'."
)
collection = self.collection(collection_id)
return query.Query(collection, all_descendants=True)
def document(self, *document_path):
"""Get a reference to a document in a collection.
For a top-level document:
.. code-block:: python
>>> client.document('collek/shun')
>>> # is the same as
>>> client.document('collek', 'shun')
For a document in a sub-collection:
.. code-block:: python
>>> client.document('mydocs/doc/subcol/child')
>>> # is the same as
>>> client.document('mydocs', 'doc', 'subcol', 'child')
Documents in sub-collections can be nested deeper in a similar fashion.
Args:
document_path (Tuple[str, ...]): Can either be
* A single ``/``-delimited path to a document
* A tuple of document path segments
Returns:
:class:`~google.cloud.firestore_v1.document.DocumentReference`:
A reference to a document in a collection.
"""
if len(document_path) == 1:
path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
else:
path = document_path
# DocumentReference takes a relative path. Strip the database string if present.
base_path = self._database_string + "/documents/"
joined_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path)
if joined_path.startswith(base_path):
joined_path = joined_path[len(base_path) :]
path = joined_path.split(_helpers.DOCUMENT_PATH_DELIMITER)
return DocumentReference(*path, client=self)
@staticmethod
def field_path(*field_names):
"""Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
names. It is used to represent a nested field. For example,
in the data
.. code-block:: python
data = {
'aa': {
'bb': {
'cc': 10,
},
},
}
the field path ``'aa.bb.cc'`` represents the data stored in
``data['aa']['bb']['cc']``.
Args:
field_names (Tuple[str, ...]): The list of field names.
Returns:
str: The ``.``-delimited field path.
"""
return render_field_path(field_names)
@staticmethod
def write_option(**kwargs):
"""Create a write option for write operations.
Write operations include :meth:`~google.cloud.DocumentReference.set`,
:meth:`~google.cloud.DocumentReference.update` and
:meth:`~google.cloud.DocumentReference.delete`.
One of the following keyword arguments must be provided:
* ``last_update_time`` (:class:`google.protobuf.timestamp_pb2.\
Timestamp`): A timestamp. When set, the target document must
exist and have been last updated at that time. Protobuf
``update_time`` timestamps are typically returned from methods
that perform write operations as part of a "write result"
protobuf or directly.
* ``exists`` (:class:`bool`): Indicates if the document being modified
should already exist.
Providing no argument would make the option have no effect (so
it is not allowed). Providing multiple would be an apparent
contradiction, since ``last_update_time`` assumes that the
document **was** updated (it can't have been updated if it
doesn't exist) and ``exists`` indicate that it is unknown if the
document exists or not.
Args:
kwargs (Dict[str, Any]): The keyword arguments described above.
Raises:
TypeError: If anything other than exactly one argument is
provided by the caller.
Returns:
:class:`~google.cloud.firestore_v1.client.WriteOption`:
The option to be used to configure a write message.
"""
if len(kwargs) != 1:
raise TypeError(_BAD_OPTION_ERR)
name, value = kwargs.popitem()
if name == "last_update_time":
return _helpers.LastUpdateOption(value)
elif name == "exists":
return _helpers.ExistsOption(value)
else:
extra = "{!r} was provided".format(name)
raise TypeError(_BAD_OPTION_ERR, extra)
def get_all(self, references, field_paths=None, transaction=None):
"""Retrieve a batch of documents.
.. note::
Documents returned by this method are not guaranteed to be
returned in the same order that they are given in ``references``.
.. note::
If multiple ``references`` refer to the same document, the server
will only return one result.
See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
more information on **field paths**.
If a ``transaction`` is used and it already has write operations
added, this method cannot be used (i.e. read-after-write is not
allowed).
Args:
references (List[.DocumentReference, ...]): Iterable of document
references to be retrieved.
field_paths (Optional[Iterable[str, ...]]): An iterable of field
paths (``.``-delimited list of field names) to use as a
projection of document fields in the returned results. If
no value is provided, all fields will be returned.
transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
An existing transaction that these ``references`` will be
retrieved in.
Yields:
.DocumentSnapshot: The next document snapshot that fulfills the
query, or :data:`None` if the document does not exist.
"""
document_paths, reference_map = _reference_info(references)
mask = _get_doc_mask(field_paths)
response_iterator = self._firestore_api.batch_get_documents(
self._database_string,
document_paths,
mask,
transaction=_helpers.get_transaction_id(transaction),
metadata=self._rpc_metadata,
)
for get_doc_response in response_iterator:
yield _parse_batch_get(get_doc_response, reference_map, self)
def collections(self):
"""List top-level collections of the client's database.
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
iterator of subcollections of the current document.
"""
iterator = self._firestore_api.list_collection_ids(
"{}/documents".format(self._database_string), metadata=self._rpc_metadata
)
iterator.client = self
iterator.item_to_value = _item_to_collection_ref
return iterator
def batch(self):
"""Get a batch instance from this client.
Returns:
:class:`~google.cloud.firestore_v1.batch.WriteBatch`:
A "write" batch to be used for accumulating document changes and
sending the changes all at once.
"""
return WriteBatch(self)
def transaction(self, **kwargs):
"""Get a transaction that uses this client.
See :class:`~google.cloud.firestore_v1.transaction.Transaction` for
more information on transactions and the constructor arguments.
Args:
kwargs (Dict[str, Any]): The keyword arguments (other than
``client``) to pass along to the
:class:`~google.cloud.firestore_v1.transaction.Transaction`
constructor.
Returns:
:class:`~google.cloud.firestore_v1.transaction.Transaction`:
A transaction attached to this client.
"""
return Transaction(self, **kwargs)
def _reference_info(references):
"""Get information about document references.
Helper for :meth:`~google.cloud.firestore_v1.client.Client.get_all`.
Args:
references (List[.DocumentReference, ...]): Iterable of document
references.
Returns:
Tuple[List[str, ...], Dict[str, .DocumentReference]]: A two-tuple of
* fully-qualified documents paths for each reference in ``references``
* a mapping from the paths to the original reference. (If multiple
``references`` contains multiple references to the same document,
that key will be overwritten in the result.)
"""
document_paths = []
reference_map = {}
for reference in references:
doc_path = reference._document_path
document_paths.append(doc_path)
reference_map[doc_path] = reference
return document_paths, reference_map
def _get_reference(document_path, reference_map):
"""Get a document reference from a dictionary.
This just wraps a simple dictionary look-up with a helpful error that is
specific to :meth:`~google.cloud.firestore.client.Client.get_all`, the
**public** caller of this function.
Args:
document_path (str): A fully-qualified document path.
reference_map (Dict[str, .DocumentReference]): A mapping (produced
by :func:`_reference_info`) of fully-qualified document paths to
document references.
Returns:
.DocumentReference: The matching reference.
Raises:
ValueError: If ``document_path`` has not been encountered.
"""
try:
return reference_map[document_path]
except KeyError:
msg = _BAD_DOC_TEMPLATE.format(document_path)
raise ValueError(msg)
def _parse_batch_get(get_doc_response, reference_map, client):
"""Parse a `BatchGetDocumentsResponse` protobuf.
Args:
get_doc_response (~google.cloud.proto.firestore.v1.\
firestore_pb2.BatchGetDocumentsResponse): A single response (from
a stream) containing the "get" response for a document.
reference_map (Dict[str, .DocumentReference]): A mapping (produced
by :func:`_reference_info`) of fully-qualified document paths to
document references.
client (:class:`~google.cloud.firestore_v1.client.Client`):
A client that has a document factory.
Returns:
[.DocumentSnapshot]: The retrieved snapshot.
Raises:
ValueError: If the response has a ``result`` field (a oneof) other
than ``found`` or ``missing``.
"""
result_type = get_doc_response.WhichOneof("result")
if result_type == "found":
reference = _get_reference(get_doc_response.found.name, reference_map)
data = _helpers.decode_dict(get_doc_response.found.fields, client)
snapshot = DocumentSnapshot(
reference,
data,
exists=True,
read_time=get_doc_response.read_time,
create_time=get_doc_response.found.create_time,
update_time=get_doc_response.found.update_time,
)
elif result_type == "missing":
reference = _get_reference(get_doc_response.missing, reference_map)
snapshot = DocumentSnapshot(
reference,
None,
exists=False,
read_time=get_doc_response.read_time,
create_time=None,
update_time=None,
)
else:
raise ValueError(
"`BatchGetDocumentsResponse.result` (a oneof) had a field other "
"than `found` or `missing` set, or was unset"
)
return snapshot
def _get_doc_mask(field_paths):
"""Get a document mask if field paths are provided.
Args:
field_paths (Optional[Iterable[str, ...]]): An iterable of field
paths (``.``-delimited list of field names) to use as a
projection of document fields in the returned results.
Returns:
Optional[google.cloud.firestore_v1.types.DocumentMask]: A mask
to project documents to a restricted set of field paths.
"""
if field_paths is None:
return None
else:
return types.DocumentMask(field_paths=field_paths)
def _item_to_collection_ref(iterator, item):
"""Convert collection ID to collection ref.
Args:
iterator (google.api_core.page_iterator.GRPCIterator):
iterator response
item (str): ID of the collection
"""
return iterator.client.collection(item)

View file

@ -0,0 +1,509 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for representing collections for the Google Cloud Firestore API."""
import random
import six
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1 import query as query_mod
from google.cloud.firestore_v1.watch import Watch
from google.cloud.firestore_v1 import document
_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
class CollectionReference(object):
"""A reference to a collection in a Firestore database.
The collection may already exist or this class can facilitate creation
of documents within the collection.
Args:
path (Tuple[str, ...]): The components in the collection path.
This is a series of strings representing each collection and
sub-collection ID, as well as the document IDs for any documents
that contain a sub-collection.
kwargs (dict): The keyword arguments for the constructor. The only
supported keyword is ``client`` and it must be a
:class:`~google.cloud.firestore_v1.client.Client` if provided. It
represents the client that created this collection reference.
Raises:
ValueError: if
* the ``path`` is empty
* there are an even number of elements
* a collection ID in ``path`` is not a string
* a document ID in ``path`` is not a string
TypeError: If a keyword other than ``client`` is used.
"""
def __init__(self, *path, **kwargs):
_helpers.verify_path(path, is_collection=True)
self._path = path
self._client = kwargs.pop("client", None)
if kwargs:
raise TypeError(
"Received unexpected arguments", kwargs, "Only `client` is supported"
)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._path == other._path and self._client == other._client
@property
def id(self):
"""The collection identifier.
Returns:
str: The last component of the path.
"""
return self._path[-1]
@property
def parent(self):
"""Document that owns the current collection.
Returns:
Optional[:class:`~google.cloud.firestore_v1.document.DocumentReference`]:
The parent document, if the current collection is not a
top-level collection.
"""
if len(self._path) == 1:
return None
else:
parent_path = self._path[:-1]
return self._client.document(*parent_path)
def document(self, document_id=None):
"""Create a sub-document underneath the current collection.
Args:
document_id (Optional[str]): The document identifier
within the current collection. If not provided, will default
to a random 20 character string composed of digits,
uppercase and lowercase and letters.
Returns:
:class:`~google.cloud.firestore_v1.document.DocumentReference`:
The child document.
"""
if document_id is None:
document_id = _auto_id()
child_path = self._path + (document_id,)
return self._client.document(*child_path)
def _parent_info(self):
"""Get fully-qualified parent path and prefix for this collection.
Returns:
Tuple[str, str]: Pair of
* the fully-qualified (with database and project) path to the
parent of this collection (will either be the database path
or a document path).
* the prefix to a document in this collection.
"""
parent_doc = self.parent
if parent_doc is None:
parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join(
(self._client._database_string, "documents")
)
else:
parent_path = parent_doc._document_path
expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id))
return parent_path, expected_prefix
def add(self, document_data, document_id=None):
"""Create a document in the Firestore database with the provided data.
Args:
document_data (dict): Property names and values to use for
creating the document.
document_id (Optional[str]): The document identifier within the
current collection. If not provided, an ID will be
automatically assigned by the server (the assigned ID will be
a random 20 character string composed of digits,
uppercase and lowercase letters).
Returns:
Tuple[:class:`google.protobuf.timestamp_pb2.Timestamp`, \
:class:`~google.cloud.firestore_v1.document.DocumentReference`]:
Pair of
* The ``update_time`` when the document was created/overwritten.
* A document reference for the created document.
Raises:
~google.cloud.exceptions.Conflict: If ``document_id`` is provided
and the document already exists.
"""
if document_id is None:
document_id = _auto_id()
document_ref = self.document(document_id)
write_result = document_ref.create(document_data)
return write_result.update_time, document_ref
def list_documents(self, page_size=None):
"""List all subdocuments of the current collection.
Args:
page_size (Optional[int]]): The maximum number of documents
in each page of results from this request. Non-positive values
are ignored. Defaults to a sensible value set by the API.
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]:
iterator of subdocuments of the current collection. If the
collection does not exist at the time of `snapshot`, the
iterator will be empty
"""
parent, _ = self._parent_info()
iterator = self._client._firestore_api.list_documents(
parent,
self.id,
page_size=page_size,
show_missing=True,
metadata=self._client._rpc_metadata,
)
iterator.collection = self
iterator.item_to_value = _item_to_document_ref
return iterator
def select(self, field_paths):
"""Create a "select" query with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.select` for
more information on this method.
Args:
field_paths (Iterable[str, ...]): An iterable of field paths
(``.``-delimited list of field names) to use as a projection
of document fields in the query results.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A "projected" query.
"""
query = query_mod.Query(self)
return query.select(field_paths)
def where(self, field_path, op_string, value):
"""Create a "where" query with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.where` for
more information on this method.
Args:
field_path (str): A field path (``.``-delimited list of
field names) for the field to filter on.
op_string (str): A comparison operation in the form of a string.
Acceptable values are ``<``, ``<=``, ``==``, ``>=``
and ``>``.
value (Any): The value to compare the field against in the filter.
If ``value`` is :data:`None` or a NaN, then ``==`` is the only
allowed operation.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A filtered query.
"""
query = query_mod.Query(self)
return query.where(field_path, op_string, value)
def order_by(self, field_path, **kwargs):
"""Create an "order by" query with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.order_by` for
more information on this method.
Args:
field_path (str): A field path (``.``-delimited list of
field names) on which to order the query results.
kwargs (Dict[str, Any]): The keyword arguments to pass along
to the query. The only supported keyword is ``direction``,
see :meth:`~google.cloud.firestore_v1.query.Query.order_by`
for more information.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
An "order by" query.
"""
query = query_mod.Query(self)
return query.order_by(field_path, **kwargs)
def limit(self, count):
"""Create a limited query with this collection as parent.
.. note::
`limit` and `limit_to_last` are mutually exclusive.
Setting `limit` will drop previously set `limit_to_last`.
See
:meth:`~google.cloud.firestore_v1.query.Query.limit` for
more information on this method.
Args:
count (int): Maximum number of documents to return that match
the query.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A limited query.
"""
query = query_mod.Query(self)
return query.limit(count)
def limit_to_last(self, count):
"""Create a limited to last query with this collection as parent.
.. note::
`limit` and `limit_to_last` are mutually exclusive.
Setting `limit_to_last` will drop previously set `limit`.
See
:meth:`~google.cloud.firestore_v1.query.Query.limit_to_last`
for more information on this method.
Args:
count (int): Maximum number of documents to return that
match the query.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A limited to last query.
"""
query = query_mod.Query(self)
return query.limit_to_last(count)
def offset(self, num_to_skip):
"""Skip to an offset in a query with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.offset` for
more information on this method.
Args:
num_to_skip (int): The number of results to skip at the beginning
of query results. (Must be non-negative.)
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
An offset query.
"""
query = query_mod.Query(self)
return query.offset(num_to_skip)
def start_at(self, document_fields):
"""Start query at a cursor with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.start_at` for
more information on this method.
Args:
document_fields (Union[:class:`~google.cloud.firestore_v1.\
document.DocumentSnapshot`, dict, list, tuple]):
A document snapshot or a dictionary/list/tuple of fields
representing a query results cursor. A cursor is a collection
of values that represent a position in a query result set.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A query with cursor.
"""
query = query_mod.Query(self)
return query.start_at(document_fields)
def start_after(self, document_fields):
"""Start query after a cursor with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.start_after` for
more information on this method.
Args:
document_fields (Union[:class:`~google.cloud.firestore_v1.\
document.DocumentSnapshot`, dict, list, tuple]):
A document snapshot or a dictionary/list/tuple of fields
representing a query results cursor. A cursor is a collection
of values that represent a position in a query result set.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A query with cursor.
"""
query = query_mod.Query(self)
return query.start_after(document_fields)
def end_before(self, document_fields):
"""End query before a cursor with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.end_before` for
more information on this method.
Args:
document_fields (Union[:class:`~google.cloud.firestore_v1.\
document.DocumentSnapshot`, dict, list, tuple]):
A document snapshot or a dictionary/list/tuple of fields
representing a query results cursor. A cursor is a collection
of values that represent a position in a query result set.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A query with cursor.
"""
query = query_mod.Query(self)
return query.end_before(document_fields)
def end_at(self, document_fields):
"""End query at a cursor with this collection as parent.
See
:meth:`~google.cloud.firestore_v1.query.Query.end_at` for
more information on this method.
Args:
document_fields (Union[:class:`~google.cloud.firestore_v1.\
document.DocumentSnapshot`, dict, list, tuple]):
A document snapshot or a dictionary/list/tuple of fields
representing a query results cursor. A cursor is a collection
of values that represent a position in a query result set.
Returns:
:class:`~google.cloud.firestore_v1.query.Query`:
A query with cursor.
"""
query = query_mod.Query(self)
return query.end_at(document_fields)
def get(self, transaction=None):
"""Read the documents in this collection.
This sends a ``RunQuery`` RPC and returns a list of documents
returned in the stream of ``RunQueryResponse`` messages.
Args:
transaction
(Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
An existing transaction that this query will run in.
If a ``transaction`` is used and it already has write operations
added, this method cannot be used (i.e. read-after-write is not
allowed).
Returns:
list: The documents in this collection that match the query.
"""
query = query_mod.Query(self)
return query.get(transaction=transaction)
def stream(self, transaction=None):
"""Read the documents in this collection.
This sends a ``RunQuery`` RPC and then returns an iterator which
consumes each document returned in the stream of ``RunQueryResponse``
messages.
.. note::
The underlying stream of responses will time out after
the ``max_rpc_timeout_millis`` value set in the GAPIC
client configuration for the ``RunQuery`` API. Snapshots
not consumed from the iterator before that point will be lost.
If a ``transaction`` is used and it already has write operations
added, this method cannot be used (i.e. read-after-write is not
allowed).
Args:
transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.\
Transaction`]):
An existing transaction that the query will run in.
Yields:
:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
The next document that fulfills the query.
"""
query = query_mod.Query(self)
return query.stream(transaction=transaction)
def on_snapshot(self, callback):
"""Monitor the documents in this collection.
This starts a watch on this collection using a background thread. The
provided callback is run on the snapshot of the documents.
Args:
callback (Callable[List[:class:`~google.cloud.firestore_v1.collection.CollectionSnapshot`], \
List[:class:`~google.cloud.firestore_v1.watch.DocumentChange`], datetime.datetime], NoneType):
a callback to run when a change occurs.
Example:
from google.cloud import firestore_v1
db = firestore_v1.Client()
collection_ref = db.collection(u'users')
def on_snapshot(docs, changes, read_time):
for doc in docs:
print(u'{} => {}'.format(doc.id, doc.to_dict()))
# Watch this collection
collection_watch = collection_ref.on_snapshot(on_snapshot)
# Terminate this watch
collection_watch.unsubscribe()
"""
return Watch.for_query(
query_mod.Query(self),
callback,
document.DocumentSnapshot,
document.DocumentReference,
)
def _auto_id():
"""Generate a "random" automatically generated ID.
Returns:
str: A 20 character string composed of digits, uppercase and
lowercase and letters.
"""
return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20))
def _item_to_document_ref(iterator, item):
"""Convert Document resource to document ref.
Args:
iterator (google.api_core.page_iterator.GRPCIterator):
iterator response
item (dict): document resource
"""
document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1]
return iterator.collection.document(document_id)

View file

@ -0,0 +1,787 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for representing documents for the Google Cloud Firestore API."""
import copy
import six
from google.api_core import exceptions
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1 import field_path as field_path_module
from google.cloud.firestore_v1.proto import common_pb2
from google.cloud.firestore_v1.watch import Watch
class DocumentReference(object):
"""A reference to a document in a Firestore database.
The document may already exist or can be created by this class.
Args:
path (Tuple[str, ...]): The components in the document path.
This is a series of strings representing each collection and
sub-collection ID, as well as the document IDs for any documents
that contain a sub-collection (as well as the base document).
kwargs (dict): The keyword arguments for the constructor. The only
supported keyword is ``client`` and it must be a
:class:`~google.cloud.firestore_v1.client.Client`. It represents
the client that created this document reference.
Raises:
ValueError: if
* the ``path`` is empty
* there are an even number of elements
* a collection ID in ``path`` is not a string
* a document ID in ``path`` is not a string
TypeError: If a keyword other than ``client`` is used.
"""
_document_path_internal = None
def __init__(self, *path, **kwargs):
_helpers.verify_path(path, is_collection=False)
self._path = path
self._client = kwargs.pop("client", None)
if kwargs:
raise TypeError(
"Received unexpected arguments", kwargs, "Only `client` is supported"
)
def __copy__(self):
"""Shallow copy the instance.
We leave the client "as-is" but tuple-unpack the path.
Returns:
.DocumentReference: A copy of the current document.
"""
result = self.__class__(*self._path, client=self._client)
result._document_path_internal = self._document_path_internal
return result
def __deepcopy__(self, unused_memo):
"""Deep copy the instance.
This isn't a true deep copy, wee leave the client "as-is" but
tuple-unpack the path.
Returns:
.DocumentReference: A copy of the current document.
"""
return self.__copy__()
def __eq__(self, other):
"""Equality check against another instance.
Args:
other (Any): A value to compare against.
Returns:
Union[bool, NotImplementedType]: Indicating if the values are
equal.
"""
if isinstance(other, DocumentReference):
return self._client == other._client and self._path == other._path
else:
return NotImplemented
def __hash__(self):
return hash(self._path) + hash(self._client)
def __ne__(self, other):
"""Inequality check against another instance.
Args:
other (Any): A value to compare against.
Returns:
Union[bool, NotImplementedType]: Indicating if the values are
not equal.
"""
if isinstance(other, DocumentReference):
return self._client != other._client or self._path != other._path
else:
return NotImplemented
@property
def path(self):
"""Database-relative for this document.
Returns:
str: The document's relative path.
"""
return "/".join(self._path)
@property
def _document_path(self):
"""Create and cache the full path for this document.
Of the form:
``projects/{project_id}/databases/{database_id}/...
documents/{document_path}``
Returns:
str: The full document path.
Raises:
ValueError: If the current document reference has no ``client``.
"""
if self._document_path_internal is None:
if self._client is None:
raise ValueError("A document reference requires a `client`.")
self._document_path_internal = _get_document_path(self._client, self._path)
return self._document_path_internal
@property
def id(self):
"""The document identifier (within its collection).
Returns:
str: The last component of the path.
"""
return self._path[-1]
@property
def parent(self):
"""Collection that owns the current document.
Returns:
:class:`~google.cloud.firestore_v1.collection.CollectionReference`:
The parent collection.
"""
parent_path = self._path[:-1]
return self._client.collection(*parent_path)
def collection(self, collection_id):
"""Create a sub-collection underneath the current document.
Args:
collection_id (str): The sub-collection identifier (sometimes
referred to as the "kind").
Returns:
:class:`~google.cloud.firestore_v1.collection.CollectionReference`:
The child collection.
"""
child_path = self._path + (collection_id,)
return self._client.collection(*child_path)
def create(self, document_data):
"""Create the current document in the Firestore database.
Args:
document_data (dict): Property names and values to use for
creating a document.
Returns:
:class:`~google.cloud.firestore_v1.types.WriteResult`:
The write result corresponding to the committed document.
A write result contains an ``update_time`` field.
Raises:
:class:`~google.cloud.exceptions.Conflict`:
If the document already exists.
"""
batch = self._client.batch()
batch.create(self, document_data)
write_results = batch.commit()
return _first_write_result(write_results)
def set(self, document_data, merge=False):
"""Replace the current document in the Firestore database.
A write ``option`` can be specified to indicate preconditions of
the "set" operation. If no ``option`` is specified and this document
doesn't exist yet, this method will create it.
Overwrites all content for the document with the fields in
``document_data``. This method performs almost the same functionality
as :meth:`create`. The only difference is that this method doesn't
make any requirements on the existence of the document (unless
``option`` is used), whereas as :meth:`create` will fail if the
document already exists.
Args:
document_data (dict): Property names and values to use for
replacing a document.
merge (Optional[bool] or Optional[List<apispec>]):
If True, apply merging instead of overwriting the state
of the document.
Returns:
:class:`~google.cloud.firestore_v1.types.WriteResult`:
The write result corresponding to the committed document. A write
result contains an ``update_time`` field.
"""
batch = self._client.batch()
batch.set(self, document_data, merge=merge)
write_results = batch.commit()
return _first_write_result(write_results)
def update(self, field_updates, option=None):
"""Update an existing document in the Firestore database.
By default, this method verifies that the document exists on the
server before making updates. A write ``option`` can be specified to
override these preconditions.
Each key in ``field_updates`` can either be a field name or a
**field path** (For more information on **field paths**, see
:meth:`~google.cloud.firestore_v1.client.Client.field_path`.) To
illustrate this, consider a document with
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
},
'other': True,
}
stored on the server. If the field name is used in the update:
.. code-block:: python
>>> field_updates = {
... 'foo': {
... 'quux': 800,
... },
... }
>>> document.update(field_updates)
then all of ``foo`` will be overwritten on the server and the new
value will be
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'quux': 800,
},
'other': True,
}
On the other hand, if a ``.``-delimited **field path** is used in the
update:
.. code-block:: python
>>> field_updates = {
... 'foo.quux': 800,
... }
>>> document.update(field_updates)
then only ``foo.quux`` will be updated on the server and the
field ``foo.bar`` will remain intact:
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
'quux': 800,
},
'other': True,
}
.. warning::
A **field path** can only be used as a top-level key in
``field_updates``.
To delete / remove a field from an existing document, use the
:attr:`~google.cloud.firestore_v1.transforms.DELETE_FIELD` sentinel.
So with the example above, sending
.. code-block:: python
>>> field_updates = {
... 'other': firestore.DELETE_FIELD,
... }
>>> document.update(field_updates)
would update the value on the server to:
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
},
}
To set a field to the current time on the server when the
update is received, use the
:attr:`~google.cloud.firestore_v1.transforms.SERVER_TIMESTAMP`
sentinel.
Sending
.. code-block:: python
>>> field_updates = {
... 'foo.now': firestore.SERVER_TIMESTAMP,
... }
>>> document.update(field_updates)
would update the value on the server to:
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
'now': datetime.datetime(2012, ...),
},
'other': True,
}
Args:
field_updates (dict): Field names or paths to update and values
to update with.
option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
A write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
:class:`~google.cloud.firestore_v1.types.WriteResult`:
The write result corresponding to the updated document. A write
result contains an ``update_time`` field.
Raises:
~google.cloud.exceptions.NotFound: If the document does not exist.
"""
batch = self._client.batch()
batch.update(self, field_updates, option=option)
write_results = batch.commit()
return _first_write_result(write_results)
def delete(self, option=None):
"""Delete the current document in the Firestore database.
Args:
option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
A write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
:class:`google.protobuf.timestamp_pb2.Timestamp`:
The time that the delete request was received by the server.
If the document did not exist when the delete was sent (i.e.
nothing was deleted), this method will still succeed and will
still return the time that the request was received by the server.
"""
write_pb = _helpers.pb_for_delete(self._document_path, option)
commit_response = self._client._firestore_api.commit(
self._client._database_string,
[write_pb],
transaction=None,
metadata=self._client._rpc_metadata,
)
return commit_response.commit_time
def get(self, field_paths=None, transaction=None):
"""Retrieve a snapshot of the current document.
See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
more information on **field paths**.
If a ``transaction`` is used and it already has write operations
added, this method cannot be used (i.e. read-after-write is not
allowed).
Args:
field_paths (Optional[Iterable[str, ...]]): An iterable of field
paths (``.``-delimited list of field names) to use as a
projection of document fields in the returned results. If
no value is provided, all fields will be returned.
transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
An existing transaction that this reference
will be retrieved in.
Returns:
:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
A snapshot of the current document. If the document does not
exist at the time of the snapshot is taken, the snapshot's
:attr:`reference`, :attr:`data`, :attr:`update_time`, and
:attr:`create_time` attributes will all be ``None`` and
its :attr:`exists` attribute will be ``False``.
"""
if isinstance(field_paths, six.string_types):
raise ValueError("'field_paths' must be a sequence of paths, not a string.")
if field_paths is not None:
mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
else:
mask = None
firestore_api = self._client._firestore_api
try:
document_pb = firestore_api.get_document(
self._document_path,
mask=mask,
transaction=_helpers.get_transaction_id(transaction),
metadata=self._client._rpc_metadata,
)
except exceptions.NotFound:
data = None
exists = False
create_time = None
update_time = None
else:
data = _helpers.decode_dict(document_pb.fields, self._client)
exists = True
create_time = document_pb.create_time
update_time = document_pb.update_time
return DocumentSnapshot(
reference=self,
data=data,
exists=exists,
read_time=None, # No server read_time available
create_time=create_time,
update_time=update_time,
)
def collections(self, page_size=None):
"""List subcollections of the current document.
Args:
page_size (Optional[int]]): The maximum number of collections
in each page of results from this request. Non-positive values
are ignored. Defaults to a sensible value set by the API.
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
iterator of subcollections of the current document. If the
document does not exist at the time of `snapshot`, the
iterator will be empty
"""
iterator = self._client._firestore_api.list_collection_ids(
self._document_path,
page_size=page_size,
metadata=self._client._rpc_metadata,
)
iterator.document = self
iterator.item_to_value = _item_to_collection_ref
return iterator
def on_snapshot(self, callback):
"""Watch this document.
This starts a watch on this document using a background thread. The
provided callback is run on the snapshot.
Args:
callback (Callable[List[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`], \
List[:class:`~google.cloud.firestore_v1.watch.DocumentChange`], datetime.datetime], NoneType):
a callback to run when a change occurs
Example:
.. code-block:: python
from google.cloud import firestore_v1
db = firestore_v1.Client()
collection_ref = db.collection(u'users')
def on_snapshot(docs, changes, read_time):
for doc in docs:
print(u'{} => {}'.format(doc.id, doc.to_dict()))
doc_ref = db.collection(u'users').document(
u'alovelace' + unique_resource_id())
# Watch this document
doc_watch = doc_ref.on_snapshot(on_snapshot)
# Terminate this watch
doc_watch.unsubscribe()
"""
return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference)
class DocumentSnapshot(object):
"""A snapshot of document data in a Firestore database.
This represents data retrieved at a specific time and may not contain
all fields stored for the document (i.e. a hand-picked selection of
fields may have been retrieved).
Instances of this class are not intended to be constructed by hand,
rather they'll be returned as responses to various methods, such as
:meth:`~google.cloud.DocumentReference.get`.
Args:
reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
A document reference corresponding to the document that contains
the data in this snapshot.
data (Dict[str, Any]):
The data retrieved in the snapshot.
exists (bool):
Indicates if the document existed at the time the snapshot was
retrieved.
read_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
The time that this snapshot was read from the server.
create_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
The time that this document was created.
update_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
The time that this document was last updated.
"""
def __init__(self, reference, data, exists, read_time, create_time, update_time):
self._reference = reference
# We want immutable data, so callers can't modify this value
# out from under us.
self._data = copy.deepcopy(data)
self._exists = exists
self.read_time = read_time
self.create_time = create_time
self.update_time = update_time
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._reference == other._reference and self._data == other._data
def __hash__(self):
seconds = self.update_time.seconds
nanos = self.update_time.nanos
return hash(self._reference) + hash(seconds) + hash(nanos)
@property
def _client(self):
"""The client that owns the document reference for this snapshot.
Returns:
:class:`~google.cloud.firestore_v1.client.Client`:
The client that owns this document.
"""
return self._reference._client
@property
def exists(self):
"""Existence flag.
Indicates if the document existed at the time this snapshot
was retrieved.
Returns:
bool: The existence flag.
"""
return self._exists
@property
def id(self):
"""The document identifier (within its collection).
Returns:
str: The last component of the path of the document.
"""
return self._reference.id
@property
def reference(self):
"""Document reference corresponding to document that owns this data.
Returns:
:class:`~google.cloud.firestore_v1.document.DocumentReference`:
A document reference corresponding to this document.
"""
return self._reference
def get(self, field_path):
"""Get a value from the snapshot data.
If the data is nested, for example:
.. code-block:: python
>>> snapshot.to_dict()
{
'top1': {
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
},
'top6': b'\x00\x01 foo',
}
a **field path** can be used to access the nested data. For
example:
.. code-block:: python
>>> snapshot.get('top1')
{
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
}
>>> snapshot.get('top1.middle2')
{
'bottom3': 20,
'bottom4': 22,
}
>>> snapshot.get('top1.middle2.bottom3')
20
See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
more information on **field paths**.
A copy is returned since the data may contain mutable values,
but the data stored in the snapshot must remain immutable.
Args:
field_path (str): A field path (``.``-delimited list of
field names).
Returns:
Any or None:
(A copy of) the value stored for the ``field_path`` or
None if snapshot document does not exist.
Raises:
KeyError: If the ``field_path`` does not match nested data
in the snapshot.
"""
if not self._exists:
return None
nested_data = field_path_module.get_nested_value(field_path, self._data)
return copy.deepcopy(nested_data)
def to_dict(self):
"""Retrieve the data contained in this snapshot.
A copy is returned since the data may contain mutable values,
but the data stored in the snapshot must remain immutable.
Returns:
Dict[str, Any] or None:
The data in the snapshot. Returns None if reference
does not exist.
"""
if not self._exists:
return None
return copy.deepcopy(self._data)
def _get_document_path(client, path):
"""Convert a path tuple into a full path string.
Of the form:
``projects/{project_id}/databases/{database_id}/...
documents/{document_path}``
Args:
client (:class:`~google.cloud.firestore_v1.client.Client`):
The client that holds configuration details and a GAPIC client
object.
path (Tuple[str, ...]): The components in a document path.
Returns:
str: The fully-qualified document path.
"""
parts = (client._database_string, "documents") + path
return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
def _consume_single_get(response_iterator):
"""Consume a gRPC stream that should contain a single response.
The stream will correspond to a ``BatchGetDocuments`` request made
for a single document.
Args:
response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
streaming iterator returned from a ``BatchGetDocuments``
request.
Returns:
~google.cloud.proto.firestore.v1.\
firestore_pb2.BatchGetDocumentsResponse: The single "get"
response in the batch.
Raises:
ValueError: If anything other than exactly one response is returned.
"""
# Calling ``list()`` consumes the entire iterator.
all_responses = list(response_iterator)
if len(all_responses) != 1:
raise ValueError(
"Unexpected response from `BatchGetDocumentsResponse`",
all_responses,
"Expected only one result",
)
return all_responses[0]
def _first_write_result(write_results):
"""Get first write result from list.
For cases where ``len(write_results) > 1``, this assumes the writes
occurred at the same time (e.g. if an update and transform are sent
at the same time).
Args:
write_results (List[google.cloud.proto.firestore.v1.\
write_pb2.WriteResult, ...]: The write results from a
``CommitResponse``.
Returns:
google.cloud.firestore_v1.types.WriteResult: The
lone write result from ``write_results``.
Raises:
ValueError: If there are zero write results. This is likely to
**never** occur, since the backend should be stable.
"""
if not write_results:
raise ValueError("Expected at least one write result")
return write_results[0]
def _item_to_collection_ref(iterator, item):
"""Convert collection ID to collection ref.
Args:
iterator (google.api_core.page_iterator.GRPCIterator):
iterator response
item (str): ID of the collection
"""
return iterator.document.collection(item)

View file

@ -0,0 +1,395 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for managing / converting field paths to / from strings."""
try:
from collections import abc as collections_abc
except ImportError: # Python 2.7
import collections as collections_abc
import re
import six
_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data"
_FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}"
_FIELD_PATH_WRONG_TYPE = (
"The data at {!r} is not a dictionary, so it cannot contain the key {!r}"
)
_FIELD_PATH_DELIMITER = "."
_BACKSLASH = "\\"
_ESCAPED_BACKSLASH = _BACKSLASH * 2
_BACKTICK = "`"
_ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK
_SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$")
_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]")
PATH_ELEMENT_TOKENS = [
("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements
("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted
("DOT", r"\."), # separator
]
TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS)
TOKENS_REGEX = re.compile(TOKENS_PATTERN)
def _tokenize_field_path(path):
"""Lex a field path into tokens (including dots).
Args:
path (str): field path to be lexed.
Returns:
List(str): tokens
"""
pos = 0
get_token = TOKENS_REGEX.match
match = get_token(path)
while match is not None:
type_ = match.lastgroup
value = match.group(type_)
yield value
pos = match.end()
match = get_token(path, pos)
if pos != len(path):
raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:]))
def split_field_path(path):
"""Split a field path into valid elements (without dots).
Args:
path (str): field path to be lexed.
Returns:
List(str): tokens
Raises:
ValueError: if the path does not match the elements-interspersed-
with-dots pattern.
"""
if not path:
return []
elements = []
want_dot = False
for element in _tokenize_field_path(path):
if want_dot:
if element != ".":
raise ValueError("Invalid path: {}".format(path))
else:
want_dot = False
else:
if element == ".":
raise ValueError("Invalid path: {}".format(path))
elements.append(element)
want_dot = True
if not want_dot or not elements:
raise ValueError("Invalid path: {}".format(path))
return elements
def parse_field_path(api_repr):
"""Parse a **field path** from into a list of nested field names.
See :func:`field_path` for more on **field paths**.
Args:
api_repr (str):
The unique Firestore api representation which consists of
either simple or UTF-8 field names. It cannot exceed
1500 bytes, and cannot be empty. Simple field names match
``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are
escaped by surrounding them with backticks.
Returns:
List[str, ...]: The list of field names in the field path.
"""
# code dredged back up from
# https://github.com/googleapis/google-cloud-python/pull/5109/files
field_names = []
for field_name in split_field_path(api_repr):
# non-simple field name
if field_name[0] == "`" and field_name[-1] == "`":
field_name = field_name[1:-1]
field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK)
field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH)
field_names.append(field_name)
return field_names
def render_field_path(field_names):
"""Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
names. It is used to represent a nested field. For example,
in the data
.. code-block: python
data = {
'aa': {
'bb': {
'cc': 10,
},
},
}
the field path ``'aa.bb.cc'`` represents that data stored in
``data['aa']['bb']['cc']``.
Args:
field_names (Iterable[str, ...]): The list of field names.
Returns:
str: The ``.``-delimited field path.
"""
result = []
for field_name in field_names:
match = _SIMPLE_FIELD_NAME.match(field_name)
if match and match.group(0) == field_name:
result.append(field_name)
else:
replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace(
_BACKTICK, _ESCAPED_BACKTICK
)
result.append(_BACKTICK + replaced + _BACKTICK)
return _FIELD_PATH_DELIMITER.join(result)
get_field_path = render_field_path # backward-compatibility
def get_nested_value(field_path, data):
"""Get a (potentially nested) value from a dictionary.
If the data is nested, for example:
.. code-block:: python
>>> data
{
'top1': {
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
},
'top6': b'\x00\x01 foo',
}
a **field path** can be used to access the nested data. For
example:
.. code-block:: python
>>> get_nested_value('top1', data)
{
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
}
>>> get_nested_value('top1.middle2', data)
{
'bottom3': 20,
'bottom4': 22,
}
>>> get_nested_value('top1.middle2.bottom3', data)
20
See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
more information on **field paths**.
Args:
field_path (str): A field path (``.``-delimited list of
field names).
data (Dict[str, Any]): The (possibly nested) data.
Returns:
Any: (A copy of) the value stored for the ``field_path``.
Raises:
KeyError: If the ``field_path`` does not match nested data.
"""
field_names = parse_field_path(field_path)
nested_data = data
for index, field_name in enumerate(field_names):
if isinstance(nested_data, collections_abc.Mapping):
if field_name in nested_data:
nested_data = nested_data[field_name]
else:
if index == 0:
msg = _FIELD_PATH_MISSING_TOP.format(field_name)
raise KeyError(msg)
else:
partial = render_field_path(field_names[:index])
msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial)
raise KeyError(msg)
else:
partial = render_field_path(field_names[:index])
msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name)
raise KeyError(msg)
return nested_data
class FieldPath(object):
"""Field Path object for client use.
A field path is a sequence of element keys, separated by periods.
Each element key can be either a simple identifier, or a full unicode
string.
In the string representation of a field path, non-identifier elements
must be quoted using backticks, with internal backticks and backslashes
escaped with a backslash.
Args:
parts: (one or more strings)
Indicating path of the key to be used.
"""
def __init__(self, *parts):
for part in parts:
if not isinstance(part, six.string_types) or not part:
error = "One or more components is not a string or is empty."
raise ValueError(error)
self.parts = tuple(parts)
@classmethod
def from_api_repr(cls, api_repr):
"""Factory: create a FieldPath from the string formatted per the API.
Args:
api_repr (str): a string path, with non-identifier elements quoted
It cannot exceed 1500 characters, and cannot be empty.
Returns:
(:class:`FieldPath`) An instance parsed from ``api_repr``.
Raises:
ValueError if the parsing fails
"""
api_repr = api_repr.strip()
if not api_repr:
raise ValueError("Field path API representation cannot be empty.")
return cls(*parse_field_path(api_repr))
@classmethod
def from_string(cls, path_string):
"""Factory: create a FieldPath from a unicode string representation.
This method splits on the character `.` and disallows the
characters `~*/[]`. To create a FieldPath whose components have
those characters, call the constructor.
Args:
path_string (str): A unicode string which cannot contain
`~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty.
Returns:
(:class:`FieldPath`) An instance parsed from ``path_string``.
"""
try:
return cls.from_api_repr(path_string)
except ValueError:
elements = path_string.split(".")
for element in elements:
if not element:
raise ValueError("Empty element")
if _LEADING_ALPHA_INVALID.match(element):
raise ValueError(
"Non-alphanum char in element with leading alpha: {}".format(
element
)
)
return FieldPath(*elements)
def __repr__(self):
paths = ""
for part in self.parts:
paths += "'" + part + "',"
paths = paths[:-1]
return "FieldPath({})".format(paths)
def __hash__(self):
return hash(self.to_api_repr())
def __eq__(self, other):
if isinstance(other, FieldPath):
return self.parts == other.parts
return NotImplemented
def __lt__(self, other):
if isinstance(other, FieldPath):
return self.parts < other.parts
return NotImplemented
def __add__(self, other):
"""Adds `other` field path to end of this field path.
Args:
other (~google.cloud.firestore_v1._helpers.FieldPath, str):
The field path to add to the end of this `FieldPath`.
"""
if isinstance(other, FieldPath):
parts = self.parts + other.parts
return FieldPath(*parts)
elif isinstance(other, six.string_types):
parts = self.parts + FieldPath.from_string(other).parts
return FieldPath(*parts)
else:
return NotImplemented
def to_api_repr(self):
"""Render a quoted string representation of the FieldPath
Returns:
(str) Quoted string representation of the path stored
within this FieldPath.
"""
return render_field_path(self.parts)
def eq_or_parent(self, other):
"""Check whether ``other`` is an ancestor.
Returns:
(bool) True IFF ``other`` is an ancestor or equal to ``self``,
else False.
"""
return self.parts[: len(other.parts)] == other.parts[: len(self.parts)]
def lineage(self):
"""Return field paths for all parents.
Returns: Set[:class:`FieldPath`]
"""
indexes = six.moves.range(1, len(self.parts))
return {FieldPath(*self.parts[:index]) for index in indexes}
@staticmethod
def document_id():
"""A special FieldPath value to refer to the ID of a document. It can be used
in queries to sort or filter by the document ID.
Returns: A special sentinel value to refer to the ID of a document.
"""
return "__name__"

View file

@ -0,0 +1,180 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrappers for protocol buffer enum types."""
import enum
class NullValue(enum.IntEnum):
"""
``NullValue`` is a singleton enumeration to represent the null value
for the ``Value`` type union.
The JSON representation for ``NullValue`` is JSON ``null``.
Attributes:
NULL_VALUE (int): Null value.
"""
NULL_VALUE = 0
class DocumentTransform(object):
class FieldTransform(object):
class ServerValue(enum.IntEnum):
"""
A value that is calculated by the server.
Attributes:
SERVER_VALUE_UNSPECIFIED (int): Unspecified. This value must not be used.
REQUEST_TIME (int): The time at which the server processed the request, with millisecond
precision.
"""
SERVER_VALUE_UNSPECIFIED = 0
REQUEST_TIME = 1
class StructuredQuery(object):
class Direction(enum.IntEnum):
"""
A sort direction.
Attributes:
DIRECTION_UNSPECIFIED (int): Unspecified.
ASCENDING (int): Ascending.
DESCENDING (int): Descending.
"""
DIRECTION_UNSPECIFIED = 0
ASCENDING = 1
DESCENDING = 2
class CompositeFilter(object):
class Operator(enum.IntEnum):
"""
A composite filter operator.
Attributes:
OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
AND (int): The results are required to satisfy each of the combined filters.
"""
OPERATOR_UNSPECIFIED = 0
AND = 1
class FieldFilter(object):
class Operator(enum.IntEnum):
"""
A field filter operator.
Attributes:
OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
LESS_THAN (int): The given ``field`` is less than the given ``value``.
Requires:
- That ``field`` come first in ``order_by``.
LESS_THAN_OR_EQUAL (int): The given ``field`` is less than or equal to the given ``value``.
Requires:
- That ``field`` come first in ``order_by``.
GREATER_THAN (int): The given ``field`` is greater than the given ``value``.
Requires:
- That ``field`` come first in ``order_by``.
GREATER_THAN_OR_EQUAL (int): The given ``field`` is greater than or equal to the given ``value``.
Requires:
- That ``field`` come first in ``order_by``.
EQUAL (int): The given ``field`` is equal to the given ``value``.
ARRAY_CONTAINS (int): The given ``field`` is an array that contains the given ``value``.
IN (int): The given ``field`` is equal to at least one value in the given
array.
Requires:
- That ``value`` is a non-empty ``ArrayValue`` with at most 10 values.
- No other ``IN``, ``ARRAY_CONTAINS_ANY``, or ``NOT_IN``.
ARRAY_CONTAINS_ANY (int): The given ``field`` is an array that contains any of the values in
the given array.
Requires:
- That ``value`` is a non-empty ``ArrayValue`` with at most 10 values.
- No other ``IN``, ``ARRAY_CONTAINS_ANY``, or ``NOT_IN``.
"""
OPERATOR_UNSPECIFIED = 0
LESS_THAN = 1
LESS_THAN_OR_EQUAL = 2
GREATER_THAN = 3
GREATER_THAN_OR_EQUAL = 4
EQUAL = 5
ARRAY_CONTAINS = 7
IN = 8
ARRAY_CONTAINS_ANY = 9
class UnaryFilter(object):
class Operator(enum.IntEnum):
"""
A unary operator.
Attributes:
OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
IS_NAN (int): The given ``field`` is equal to ``NaN``.
IS_NULL (int): The given ``field`` is equal to ``NULL``.
"""
OPERATOR_UNSPECIFIED = 0
IS_NAN = 2
IS_NULL = 3
class TargetChange(object):
class TargetChangeType(enum.IntEnum):
"""
The type of change.
Attributes:
NO_CHANGE (int): No change has occurred. Used only to send an updated
``resume_token``.
ADD (int): The targets have been added.
REMOVE (int): The targets have been removed.
CURRENT (int): The targets reflect all changes committed before the targets were
added to the stream.
This will be sent after or with a ``read_time`` that is greater than or
equal to the time at which the targets were added.
Listeners can wait for this change if read-after-write semantics are
desired.
RESET (int): The targets have been reset, and a new initial state for the targets
will be returned in subsequent changes.
After the initial state is complete, ``CURRENT`` will be returned even
if the target was previously indicated to be ``CURRENT``.
"""
NO_CHANGE = 0
ADD = 1
REMOVE = 2
CURRENT = 3
RESET = 4

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,109 @@
config = {
"interfaces": {
"google.firestore.v1.Firestore": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
"aborted_unavailable": ["ABORTED", "UNAVAILABLE"],
"non_idempotent": [],
"idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 60000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 600000,
},
"streaming": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 60000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 600000,
},
},
"methods": {
"GetDocument": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default",
},
"ListDocuments": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default",
},
"CreateDocument": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"UpdateDocument": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"DeleteDocument": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"BatchGetDocuments": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "streaming",
},
"BatchWrite": {
"timeout_millis": 60000,
"retry_codes_name": "aborted_unavailable",
"retry_params_name": "default",
},
"BeginTransaction": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"Commit": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"Rollback": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"RunQuery": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "streaming",
},
"Write": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "streaming",
},
"Listen": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "streaming",
},
"ListCollectionIds": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"PartitionQuery": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
},
}
}
}

View file

@ -0,0 +1,319 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.cloud.firestore_v1.proto import firestore_pb2_grpc
class FirestoreGrpcTransport(object):
"""gRPC transport class providing stubs for
google.firestore.v1 Firestore API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore",
)
def __init__(
self, channel=None, credentials=None, address="firestore.googleapis.com:443"
):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
"The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
"grpc.max_send_message_length": -1,
"grpc.max_receive_message_length": -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
"firestore_stub": firestore_pb2_grpc.FirestoreStub(channel),
}
@classmethod
def create_channel(
cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def get_document(self):
"""Return the gRPC stub for :meth:`FirestoreClient.get_document`.
Gets a single document.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].GetDocument
@property
def list_documents(self):
"""Return the gRPC stub for :meth:`FirestoreClient.list_documents`.
Lists documents.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].ListDocuments
@property
def create_document(self):
"""Return the gRPC stub for :meth:`FirestoreClient.create_document`.
Creates a new document.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].CreateDocument
@property
def update_document(self):
"""Return the gRPC stub for :meth:`FirestoreClient.update_document`.
Updates or inserts a document.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].UpdateDocument
@property
def delete_document(self):
"""Return the gRPC stub for :meth:`FirestoreClient.delete_document`.
Deletes a document.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].DeleteDocument
@property
def batch_get_documents(self):
"""Return the gRPC stub for :meth:`FirestoreClient.batch_get_documents`.
Gets multiple documents.
Documents returned by this method are not guaranteed to be returned in the
same order that they were requested.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].BatchGetDocuments
@property
def batch_write(self):
"""Return the gRPC stub for :meth:`FirestoreClient.batch_write`.
Applies a batch of write operations.
The BatchWrite method does not apply the write operations atomically and
can apply them out of order. Method does not allow more than one write
per document. Each write succeeds or fails independently. See the
``BatchWriteResponse`` for the success status of each write.
If you require an atomically applied set of writes, use ``Commit``
instead.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].BatchWrite
@property
def begin_transaction(self):
"""Return the gRPC stub for :meth:`FirestoreClient.begin_transaction`.
Starts a new transaction.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].BeginTransaction
@property
def commit(self):
"""Return the gRPC stub for :meth:`FirestoreClient.commit`.
Commits a transaction, while optionally updating documents.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].Commit
@property
def rollback(self):
"""Return the gRPC stub for :meth:`FirestoreClient.rollback`.
Rolls back a transaction.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].Rollback
@property
def run_query(self):
"""Return the gRPC stub for :meth:`FirestoreClient.run_query`.
Runs a query.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].RunQuery
@property
def write(self):
"""Return the gRPC stub for :meth:`FirestoreClient.write`.
Streams batches of document updates and deletes, in order.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].Write
@property
def listen(self):
"""Return the gRPC stub for :meth:`FirestoreClient.listen`.
Listens to changes.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].Listen
@property
def list_collection_ids(self):
"""Return the gRPC stub for :meth:`FirestoreClient.list_collection_ids`.
Lists all the collection IDs underneath a document.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].ListCollectionIds
@property
def partition_query(self):
"""Return the gRPC stub for :meth:`FirestoreClient.partition_query`.
Partitions a query by returning partition cursors that can be used to run
the query in parallel. The returned partition cursors are split points that
can be used by RunQuery as starting/end points for the query results.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["firestore_stub"].PartitionQuery

View file

@ -0,0 +1,207 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from google.cloud.firestore_v1._helpers import decode_value
import math
class TypeOrder(Enum):
# NOTE: This order is defined by the backend and cannot be changed.
NULL = 0
BOOLEAN = 1
NUMBER = 2
TIMESTAMP = 3
STRING = 4
BLOB = 5
REF = 6
GEO_POINT = 7
ARRAY = 8
OBJECT = 9
@staticmethod
def from_value(value):
v = value.WhichOneof("value_type")
lut = {
"null_value": TypeOrder.NULL,
"boolean_value": TypeOrder.BOOLEAN,
"integer_value": TypeOrder.NUMBER,
"double_value": TypeOrder.NUMBER,
"timestamp_value": TypeOrder.TIMESTAMP,
"string_value": TypeOrder.STRING,
"bytes_value": TypeOrder.BLOB,
"reference_value": TypeOrder.REF,
"geo_point_value": TypeOrder.GEO_POINT,
"array_value": TypeOrder.ARRAY,
"map_value": TypeOrder.OBJECT,
}
if v not in lut:
raise ValueError("Could not detect value type for " + v)
return lut[v]
class Order(object):
"""
Order implements the ordering semantics of the backend.
"""
@classmethod
def compare(cls, left, right):
"""
Main comparison function for all Firestore types.
@return -1 is left < right, 0 if left == right, otherwise 1
"""
# First compare the types.
leftType = TypeOrder.from_value(left).value
rightType = TypeOrder.from_value(right).value
if leftType != rightType:
if leftType < rightType:
return -1
return 1
value_type = left.WhichOneof("value_type")
if value_type == "null_value":
return 0 # nulls are all equal
elif value_type == "boolean_value":
return cls._compare_to(left.boolean_value, right.boolean_value)
elif value_type == "integer_value":
return cls.compare_numbers(left, right)
elif value_type == "double_value":
return cls.compare_numbers(left, right)
elif value_type == "timestamp_value":
return cls.compare_timestamps(left, right)
elif value_type == "string_value":
return cls._compare_to(left.string_value, right.string_value)
elif value_type == "bytes_value":
return cls.compare_blobs(left, right)
elif value_type == "reference_value":
return cls.compare_resource_paths(left, right)
elif value_type == "geo_point_value":
return cls.compare_geo_points(left, right)
elif value_type == "array_value":
return cls.compare_arrays(left, right)
elif value_type == "map_value":
return cls.compare_objects(left, right)
else:
raise ValueError("Unknown ``value_type``", str(value_type))
@staticmethod
def compare_blobs(left, right):
left_bytes = left.bytes_value
right_bytes = right.bytes_value
return Order._compare_to(left_bytes, right_bytes)
@staticmethod
def compare_timestamps(left, right):
left = left.timestamp_value
right = right.timestamp_value
seconds = Order._compare_to(left.seconds or 0, right.seconds or 0)
if seconds != 0:
return seconds
return Order._compare_to(left.nanos or 0, right.nanos or 0)
@staticmethod
def compare_geo_points(left, right):
left_value = decode_value(left, None)
right_value = decode_value(right, None)
cmp = (left_value.latitude > right_value.latitude) - (
left_value.latitude < right_value.latitude
)
if cmp != 0:
return cmp
return (left_value.longitude > right_value.longitude) - (
left_value.longitude < right_value.longitude
)
@staticmethod
def compare_resource_paths(left, right):
left = left.reference_value
right = right.reference_value
left_segments = left.split("/")
right_segments = right.split("/")
shorter = min(len(left_segments), len(right_segments))
# compare segments
for i in range(shorter):
if left_segments[i] < right_segments[i]:
return -1
if left_segments[i] > right_segments[i]:
return 1
left_length = len(left)
right_length = len(right)
return (left_length > right_length) - (left_length < right_length)
@staticmethod
def compare_arrays(left, right):
l_values = left.array_value.values
r_values = right.array_value.values
length = min(len(l_values), len(r_values))
for i in range(length):
cmp = Order.compare(l_values[i], r_values[i])
if cmp != 0:
return cmp
return Order._compare_to(len(l_values), len(r_values))
@staticmethod
def compare_objects(left, right):
left_fields = left.map_value.fields
right_fields = right.map_value.fields
for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)):
keyCompare = Order._compare_to(left_key, right_key)
if keyCompare != 0:
return keyCompare
value_compare = Order.compare(
left_fields[left_key], right_fields[right_key]
)
if value_compare != 0:
return value_compare
return Order._compare_to(len(left_fields), len(right_fields))
@staticmethod
def compare_numbers(left, right):
left_value = decode_value(left, None)
right_value = decode_value(right, None)
return Order.compare_doubles(left_value, right_value)
@staticmethod
def compare_doubles(left, right):
if math.isnan(left):
if math.isnan(right):
return 0
return -1
if math.isnan(right):
return 1
return Order._compare_to(left, right)
@staticmethod
def _compare_to(left, right):
# We can't just use cmp(left, right) because cmp doesn't exist
# in Python 3, so this is an equivalent suggested by
# https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
return (left > right) - (left < right)

Some files were not shown because too many files have changed in this diff Show more